lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
crates/mun_codegen/src/ir/function.rs
tdejager/mun
769bfe5a3a921bcc5289217e692f7f8cf19648ab
use super::try_convert_any_to_basic; use crate::ir::dispatch_table::DispatchTable; use crate::values::{ BasicValueEnum, CallSiteValue, FloatValue, FunctionValue, InstructionOpcode, IntValue, }; use crate::{IrDatabase, Module, OptimizationLevel}; use inkwell::builder::Builder; use inkwell::passes::{PassManager, PassManagerBuilder}; use inkwell::types::{AnyTypeEnum, BasicTypeEnum}; use inkwell::{FloatPredicate, IntPredicate}; use mun_hir::{ self as hir, ArithOp, BinaryOp, Body, CmpOp, Expr, ExprId, HirDisplay, InferenceResult, Literal, Ordering, Pat, PatId, Path, Resolution, Resolver, Statement, TypeCtor, }; use std::collections::HashMap; use std::mem; use std::sync::Arc; pub(crate) fn create_pass_manager( module: &Module, optimization_lvl: OptimizationLevel, ) -> PassManager<FunctionValue> { let pass_builder = PassManagerBuilder::create(); pass_builder.set_optimization_level(optimization_lvl); let function_pass_manager = PassManager::create(module); pass_builder.populate_function_pass_manager(&function_pass_manager); function_pass_manager.initialize(); function_pass_manager } pub(crate) fn gen_signature( db: &impl IrDatabase, f: hir::Function, module: &Module, ) -> FunctionValue { let name = f.name(db).to_string(); if let AnyTypeEnum::FunctionType(ty) = db.type_ir(f.ty(db)) { module.add_function(&name, ty, None) } else { panic!("not a function type") } } pub(crate) fn gen_body<'a, 'b, D: IrDatabase>( db: &'a D, hir_function: hir::Function, llvm_function: FunctionValue, module: &'a Module, llvm_functions: &'a HashMap<mun_hir::Function, FunctionValue>, dispatch_table: &'b DispatchTable, ) -> FunctionValue { let context = db.context(); let builder = context.create_builder(); let body_ir = context.append_basic_block(&llvm_function, "body"); builder.position_at_end(&body_ir); let mut code_gen = BodyIrGenerator::new( db, module, hir_function, llvm_function, llvm_functions, builder, dispatch_table, ); code_gen.gen_fn_body(); llvm_function } struct BodyIrGenerator<'a, 'b, D: IrDatabase> { db: &'a D, module: &'a Module, body: Arc<Body>, infer: Arc<InferenceResult>, builder: Builder, fn_value: FunctionValue, pat_to_param: HashMap<PatId, inkwell::values::BasicValueEnum>, pat_to_local: HashMap<PatId, inkwell::values::PointerValue>, pat_to_name: HashMap<PatId, String>, function_map: &'a HashMap<mun_hir::Function, FunctionValue>, dispatch_table: &'b DispatchTable, } impl<'a, 'b, D: IrDatabase> BodyIrGenerator<'a, 'b, D> { fn new( db: &'a D, module: &'a Module, f: hir::Function, fn_value: FunctionValue, function_map: &'a HashMap<mun_hir::Function, FunctionValue>, builder: Builder, dispatch_table: &'b DispatchTable, ) -> Self { let body = f.body(db); let infer = f.infer(db); BodyIrGenerator { db, module, body, infer, builder, fn_value, pat_to_param: HashMap::default(), pat_to_local: HashMap::default(), pat_to_name: HashMap::default(), function_map, dispatch_table, } } fn gen_fn_body(&mut self) { for (i, (pat, _ty)) in self.body.params().iter().enumerate() { let body = self.body.clone(); match &body[*pat] { Pat::Bind { name } => { let name = name.to_string(); let param = self.fn_value.get_nth_param(i as u32).unwrap(); param.set_name(&name); self.pat_to_param.insert(*pat, param); self.pat_to_name.insert(*pat, name); } Pat::Wild => {} Pat::Missing | Pat::Path(_) => unreachable!(), } } let ret_value = self.gen_expr(self.body.body_expr()); if let Some(value) = ret_value { self.builder.build_return(Some(&value)); } else { self.builder.build_return(None); } } fn gen_expr(&mut self, expr: ExprId) -> Option<inkwell::values::BasicValueEnum> { let body = self.body.clone(); let mut value = match &body[expr] { &Expr::Block { ref statements, tail, } => { for statement in statements.iter() { match statement { Statement::Let { pat, initializer, .. } => { self.gen_let_statement(*pat, *initializer); } Statement::Expr(expr) => { self.gen_expr(*expr); } }; } tail.and_then(|expr| self.gen_expr(expr)) } Expr::Path(ref p) => { let resolver = mun_hir::resolver_for_expr(self.body.clone(), self.db, expr); Some(self.gen_path_expr(p, expr, &resolver)) } Expr::Literal(lit) => match lit { Literal::Int(v) => Some( self.module .get_context() .i64_type() .const_int(unsafe { mem::transmute::<i64, u64>(*v) }, true) .into(), ), Literal::Float(v) => Some( self.module .get_context() .f64_type() .const_float(*v as f64) .into(), ), Literal::String(_) | Literal::Bool(_) => unreachable!(), }, &Expr::BinaryOp { lhs, rhs, op } => { Some(self.gen_binary_op(lhs, rhs, op.expect("missing op"))) } Expr::Call { ref callee, ref args, } => self.gen_call(*callee, &args).try_as_basic_value().left(), _ => unreachable!("unimplemented expr type"), }; value = value.map(|value| { match ( value.get_type(), try_convert_any_to_basic(self.db.type_ir(self.infer[expr].clone())), ) { (BasicTypeEnum::IntType(_), Some(target @ BasicTypeEnum::FloatType(_))) => self .builder .build_cast(InstructionOpcode::SIToFP, value, target, "implicit_cast"), (a, Some(b)) if a == b => value, _ => unreachable!("could not perform implicit cast"), } }); value } fn new_alloca_builder(&self) -> Builder { let temp_builder = Builder::create(); let block = self .builder .get_insert_block() .expect("at this stage there must be a block"); if let Some(first_instruction) = block.get_first_instruction() { temp_builder.position_before(&first_instruction); } else { temp_builder.position_at_end(&block); } temp_builder } fn gen_let_statement(&mut self, pat: PatId, initializer: Option<ExprId>) { let initializer = initializer.and_then(|expr| self.gen_expr(expr)); match &self.body[pat] { Pat::Bind { name } => { let builder = self.new_alloca_builder(); let ty = try_convert_any_to_basic(self.db.type_ir(self.infer[pat].clone())) .expect("expected basic type"); let ptr = builder.build_alloca(ty, &name.to_string()); self.pat_to_local.insert(pat, ptr); self.pat_to_name.insert(pat, name.to_string()); if let Some(value) = initializer { self.builder.build_store(ptr, value); }; } Pat::Wild => {} Pat::Missing | Pat::Path(_) => unreachable!(), } } fn gen_path_expr( &self, path: &Path, _expr: ExprId, resolver: &Resolver, ) -> inkwell::values::BasicValueEnum { let resolution = resolver .resolve_path_without_assoc_items(self.db, path) .take_values() .expect("unknown path"); match resolution { Resolution::LocalBinding(pat) => { if let Some(param) = self.pat_to_param.get(&pat) { *param } else if let Some(ptr) = self.pat_to_local.get(&pat) { let name = self.pat_to_name.get(&pat).expect("could not find pat name"); self.builder.build_load(*ptr, &name) } else { unreachable!("could not find the pattern.."); } } Resolution::Def(_) => panic!("no support for module definitions"), } } fn gen_binary_op(&mut self, lhs: ExprId, rhs: ExprId, op: BinaryOp) -> BasicValueEnum { let lhs_value = self.gen_expr(lhs).expect("no lhs value"); let rhs_value = self.gen_expr(rhs).expect("no rhs value"); let lhs_type = self.infer[lhs].clone(); let rhs_type = self.infer[rhs].clone(); match lhs_type.as_simple() { Some(TypeCtor::Float) => self.gen_binary_op_float( *lhs_value.as_float_value(), *rhs_value.as_float_value(), op, ), Some(TypeCtor::Int) => { self.gen_binary_op_int(*lhs_value.as_int_value(), *rhs_value.as_int_value(), op) } _ => unreachable!( "Unsupported operation {0}op{1}", lhs_type.display(self.db), rhs_type.display(self.db) ), } } fn gen_binary_op_float( &mut self, lhs: FloatValue, rhs: FloatValue, op: BinaryOp, ) -> BasicValueEnum { match op { BinaryOp::ArithOp(ArithOp::Add) => self.builder.build_float_add(lhs, rhs, "add").into(), BinaryOp::ArithOp(ArithOp::Subtract) => { self.builder.build_float_sub(lhs, rhs, "sub").into() } BinaryOp::ArithOp(ArithOp::Divide) => { self.builder.build_float_div(lhs, rhs, "div").into() } BinaryOp::ArithOp(ArithOp::Multiply) => { self.builder.build_float_mul(lhs, rhs, "mul").into() } BinaryOp::CmpOp(op) => { let (name, predicate) = match op { CmpOp::Eq { negated: false } => ("eq", FloatPredicate::OEQ), CmpOp::Eq { negated: true } => ("neq", FloatPredicate::ONE), CmpOp::Ord { ordering: Ordering::Less, strict: false, } => ("lesseq", FloatPredicate::OLE), CmpOp::Ord { ordering: Ordering::Less, strict: true, } => ("less", FloatPredicate::OLT), CmpOp::Ord { ordering: Ordering::Greater, strict: false, } => ("greatereq", FloatPredicate::OGE), CmpOp::Ord { ordering: Ordering::Greater, strict: true, } => ("greater", FloatPredicate::OGT), }; self.builder .build_float_compare(predicate, lhs, rhs, name) .into() } _ => unreachable!(), } } fn gen_binary_op_int(&mut self, lhs: IntValue, rhs: IntValue, op: BinaryOp) -> BasicValueEnum { match op { BinaryOp::ArithOp(ArithOp::Add) => self.builder.build_int_add(lhs, rhs, "add").into(), BinaryOp::ArithOp(ArithOp::Subtract) => { self.builder.build_int_sub(lhs, rhs, "sub").into() } BinaryOp::ArithOp(ArithOp::Divide) => { self.builder.build_int_signed_div(lhs, rhs, "div").into() } BinaryOp::ArithOp(ArithOp::Multiply) => { self.builder.build_int_mul(lhs, rhs, "mul").into() } BinaryOp::CmpOp(op) => { let (name, predicate) = match op { CmpOp::Eq { negated: false } => ("eq", IntPredicate::EQ), CmpOp::Eq { negated: true } => ("neq", IntPredicate::NE), CmpOp::Ord { ordering: Ordering::Less, strict: false, } => ("lesseq", IntPredicate::SLE), CmpOp::Ord { ordering: Ordering::Less, strict: true, } => ("less", IntPredicate::SLT), CmpOp::Ord { ordering: Ordering::Greater, strict: false, } => ("greatereq", IntPredicate::SGE), CmpOp::Ord { ordering: Ordering::Greater, strict: true, } => ("greater", IntPredicate::SGT), }; self.builder .build_int_compare(predicate, lhs, rhs, name) .into() } _ => unreachable!(), } } fn should_use_dispatch_table(&self) -> bool { true } fn gen_call(&mut self, callee: ExprId, args: &[ExprId]) -> CallSiteValue { let function = self.infer[callee] .as_function_def() .expect("expected a function expression"); let args: Vec<BasicValueEnum> = args .iter() .map(|expr| self.gen_expr(*expr).expect("expected a value")) .collect(); if self.should_use_dispatch_table() { let ptr_value = self.dispatch_table .gen_function_lookup(self.db, &self.builder, function); self.builder .build_call(ptr_value, &args, &function.name(self.db).to_string()) } else { let llvm_function = self .function_map .get(&function) .expect("missing function value for hir function"); self.builder .build_call(*llvm_function, &args, &function.name(self.db).to_string()) } } } trait OptName { fn get_name(&self) -> Option<&str>; fn set_name<T: AsRef<str>>(&self, name: T); } impl OptName for BasicValueEnum { fn get_name(&self) -> Option<&str> { match self { BasicValueEnum::ArrayValue(v) => v.get_name().to_str().ok(), BasicValueEnum::IntValue(v) => v.get_name().to_str().ok(), BasicValueEnum::FloatValue(v) => v.get_name().to_str().ok(), BasicValueEnum::PointerValue(v) => v.get_name().to_str().ok(), BasicValueEnum::StructValue(v) => v.get_name().to_str().ok(), BasicValueEnum::VectorValue(v) => v.get_name().to_str().ok(), } } fn set_name<T: AsRef<str>>(&self, name: T) { match self { BasicValueEnum::ArrayValue(v) => v.set_name(name.as_ref()), BasicValueEnum::IntValue(v) => v.set_name(name.as_ref()), BasicValueEnum::FloatValue(v) => v.set_name(name.as_ref()), BasicValueEnum::PointerValue(v) => v.set_name(name.as_ref()), BasicValueEnum::StructValue(v) => v.set_name(name.as_ref()), BasicValueEnum::VectorValue(v) => v.set_name(name.as_ref()), }; } }
use super::try_convert_any_to_basic; use crate::ir::dispatch_table::DispatchTable; use crate::values::{ BasicValueEnum, CallSiteValue, FloatValue, FunctionValue, InstructionOpcode, IntValue, }; use crate::{IrDatabase, Module, OptimizationLevel}; use inkwell::builder::Builder; use inkwell::passes::{PassManager, PassManagerBuilder}; use inkwell::types::{AnyTypeEnum, BasicTypeEnum}; use inkwell::{FloatPredicate, IntPredicate}; use mun_hir::{ self as hir, ArithOp, BinaryOp, Body, CmpOp, Expr, ExprId, HirDisplay, InferenceResult, Literal, Ordering, Pat, PatId, Path, Resolution, Resolver, Statement, TypeCtor, }; use std::collections::HashMap; use std::mem; use std::sync::Arc; pub(crate) fn create_pass_manager( module: &Module, optimization_lvl: OptimizationLevel, ) -> PassManager<FunctionValue> { let pass_builder = PassManagerBuilder::create(); pass_builder.set_optimization_level(optimization_lvl); let function_pass_manager = PassManager::create(module); pass_builder.populate_function_pass_manager(&function_pass_manager); function_pass_manager.initialize(); function_pass_manager } pub(crate) fn gen_signature( db: &impl IrDatabase, f: hir::Function, module: &Module, ) -> FunctionValue { let name = f.name(db).to_string(); if let AnyTypeEnum::FunctionType(ty) = db.type_ir(f.ty(db)) { module.add_function(&name, ty, None) } else { panic!("not a function type") } } pub(crate) fn gen_body<'a, 'b, D: IrDatabase>( db: &'a D, hir_function: hir::Function, llvm_function: FunctionValue, module: &'a Module, llvm_functions: &'a HashMap<mun_hir::Function, FunctionValue>, dispatch_table: &'b DispatchTable, ) -> FunctionValue { let context = db.context(); let builder = context.create_builder(); let body_ir = context.append_basic_block(&llvm_function, "body"); builder.position_at_end(&body_ir); let mut code_gen = BodyIrGenerator::new( db, module, hir_function, llvm_function, llvm_functions, builder, dispatch_table, ); code_gen.gen_fn_body(); llvm_function } struct BodyIrGenerator<'a, 'b, D: IrDatabase> { db: &'a D, module: &'a Module, body: Arc<Body>, infer: Arc<InferenceResult>, builder: Builder, fn_value: FunctionValue, pat_to_param: HashMap<PatId, inkwell::values::BasicValueEnum>, pat_to_local: HashMap<PatId, inkwell::values::PointerValue>, pat_to_name: HashMap<PatId, String>, function_map: &'a HashMap<mun_hir::Function, FunctionValue>, dispatch_table: &'b DispatchTable, } impl<'a, 'b, D: IrDatabase> BodyIrGenerator<'a, 'b, D> { fn new( db: &'a D, module: &'a Module, f: hir::Function, fn_value: FunctionValue, function_map: &'a HashMap<mun_hir::Function, FunctionValue>, builder: Builder, dispatch_table: &'b DispatchTable, ) -> Self { let body = f.body(db); let infer = f.infer(db); BodyIrGenerator { db, module, body, infer, builder, fn_value, pat_to_param: HashMap::default(), pat_to_local: HashMap::default(), pat_to_name: HashMap::default(), function_map, dispatch_table, } } fn gen_fn_body(&mut self) { for (i, (pat, _ty)) in self.body.params().iter().enumerate() { let body = self.body.clone(); match &body[*pat] { Pat::Bind { name } => { let name = name.to_string(); let param = self.fn_value.get_nth_param(i as u32).unwrap(); param.set_name(&name); self.pat_to_param.insert(*pat, param); self.pat_to_name.insert(*pat, name); } Pat::Wild => {} Pat::Missing | Pat::Path(_) => unreachable!(), } } let ret_value = self.gen_expr(self.body.body_expr()); if let Some(value) = ret_value { self.builder.build_return(Some(&value)); } else { self.builder.build_return(None); } } fn gen_expr(&mut self, expr: ExprId) -> Option<inkwell::values::BasicValueEnum> { let body = self.body.clone(); let mut value = match &body[expr] { &Expr::Block { ref statements, tail, } => { for statement in statements.iter() { match statement { Statement::Let { pat, initializer, .. } => { self.gen_let_statement(*pat, *initializer); } Statement::Expr(expr) => { self.gen_expr(*expr); } }; } tail.and_then(|expr| self.gen_expr(expr)) } Expr::Path(ref p) => { let resolver = mun_hir::resolver_for_expr(self.body.clone(), self.db, expr); Some(self.gen_path_expr(p, expr, &resolver)) } Expr::Literal(lit) => match lit { Literal::Int(v) => Some( self.module .get_context() .i64_type() .const_int(unsafe { mem::transmute::<i64, u64>(*v) }, true) .into(), ), Literal::Float(v) => Some( self.module .get_context() .f64_type() .const_float(*v as f64) .into(), ), Literal::String(_) | Literal::Bool(_) => unreachable!(), }, &Expr::BinaryOp { lhs, rhs, op } => { Some(self.gen_binary_op(lhs, rhs, op.expect("missing op"))) } Expr::Call { ref callee, ref args, } => self.gen_call(*callee, &args).try_as_basic_value().left(), _ => unreachable!("unimplemented expr type"), }; value = value.map(|value| { match ( value.get_type(), try_convert_any_to_basic(self.db.type_ir(self.infer[expr].clone())), ) { (BasicTypeEnum::IntType(_), Some(target @ BasicTypeEnum::FloatType(_))) => self .builder .build_cast(InstructionOpcode::SIToFP, value, target, "implicit_cast"), (a, Some(b)) if a == b => value, _ => unreachable!("could not perform implicit cast"), } }); value } fn new_alloca_builder(&self) -> Builder { let temp_builder = Builder::create(); let block = self .builder .get_insert_blo
Resolution::Def(_) => panic!("no support for module definitions"), } } fn gen_binary_op(&mut self, lhs: ExprId, rhs: ExprId, op: BinaryOp) -> BasicValueEnum { let lhs_value = self.gen_expr(lhs).expect("no lhs value"); let rhs_value = self.gen_expr(rhs).expect("no rhs value"); let lhs_type = self.infer[lhs].clone(); let rhs_type = self.infer[rhs].clone(); match lhs_type.as_simple() { Some(TypeCtor::Float) => self.gen_binary_op_float( *lhs_value.as_float_value(), *rhs_value.as_float_value(), op, ), Some(TypeCtor::Int) => { self.gen_binary_op_int(*lhs_value.as_int_value(), *rhs_value.as_int_value(), op) } _ => unreachable!( "Unsupported operation {0}op{1}", lhs_type.display(self.db), rhs_type.display(self.db) ), } } fn gen_binary_op_float( &mut self, lhs: FloatValue, rhs: FloatValue, op: BinaryOp, ) -> BasicValueEnum { match op { BinaryOp::ArithOp(ArithOp::Add) => self.builder.build_float_add(lhs, rhs, "add").into(), BinaryOp::ArithOp(ArithOp::Subtract) => { self.builder.build_float_sub(lhs, rhs, "sub").into() } BinaryOp::ArithOp(ArithOp::Divide) => { self.builder.build_float_div(lhs, rhs, "div").into() } BinaryOp::ArithOp(ArithOp::Multiply) => { self.builder.build_float_mul(lhs, rhs, "mul").into() } BinaryOp::CmpOp(op) => { let (name, predicate) = match op { CmpOp::Eq { negated: false } => ("eq", FloatPredicate::OEQ), CmpOp::Eq { negated: true } => ("neq", FloatPredicate::ONE), CmpOp::Ord { ordering: Ordering::Less, strict: false, } => ("lesseq", FloatPredicate::OLE), CmpOp::Ord { ordering: Ordering::Less, strict: true, } => ("less", FloatPredicate::OLT), CmpOp::Ord { ordering: Ordering::Greater, strict: false, } => ("greatereq", FloatPredicate::OGE), CmpOp::Ord { ordering: Ordering::Greater, strict: true, } => ("greater", FloatPredicate::OGT), }; self.builder .build_float_compare(predicate, lhs, rhs, name) .into() } _ => unreachable!(), } } fn gen_binary_op_int(&mut self, lhs: IntValue, rhs: IntValue, op: BinaryOp) -> BasicValueEnum { match op { BinaryOp::ArithOp(ArithOp::Add) => self.builder.build_int_add(lhs, rhs, "add").into(), BinaryOp::ArithOp(ArithOp::Subtract) => { self.builder.build_int_sub(lhs, rhs, "sub").into() } BinaryOp::ArithOp(ArithOp::Divide) => { self.builder.build_int_signed_div(lhs, rhs, "div").into() } BinaryOp::ArithOp(ArithOp::Multiply) => { self.builder.build_int_mul(lhs, rhs, "mul").into() } BinaryOp::CmpOp(op) => { let (name, predicate) = match op { CmpOp::Eq { negated: false } => ("eq", IntPredicate::EQ), CmpOp::Eq { negated: true } => ("neq", IntPredicate::NE), CmpOp::Ord { ordering: Ordering::Less, strict: false, } => ("lesseq", IntPredicate::SLE), CmpOp::Ord { ordering: Ordering::Less, strict: true, } => ("less", IntPredicate::SLT), CmpOp::Ord { ordering: Ordering::Greater, strict: false, } => ("greatereq", IntPredicate::SGE), CmpOp::Ord { ordering: Ordering::Greater, strict: true, } => ("greater", IntPredicate::SGT), }; self.builder .build_int_compare(predicate, lhs, rhs, name) .into() } _ => unreachable!(), } } fn should_use_dispatch_table(&self) -> bool { true } fn gen_call(&mut self, callee: ExprId, args: &[ExprId]) -> CallSiteValue { let function = self.infer[callee] .as_function_def() .expect("expected a function expression"); let args: Vec<BasicValueEnum> = args .iter() .map(|expr| self.gen_expr(*expr).expect("expected a value")) .collect(); if self.should_use_dispatch_table() { let ptr_value = self.dispatch_table .gen_function_lookup(self.db, &self.builder, function); self.builder .build_call(ptr_value, &args, &function.name(self.db).to_string()) } else { let llvm_function = self .function_map .get(&function) .expect("missing function value for hir function"); self.builder .build_call(*llvm_function, &args, &function.name(self.db).to_string()) } } } trait OptName { fn get_name(&self) -> Option<&str>; fn set_name<T: AsRef<str>>(&self, name: T); } impl OptName for BasicValueEnum { fn get_name(&self) -> Option<&str> { match self { BasicValueEnum::ArrayValue(v) => v.get_name().to_str().ok(), BasicValueEnum::IntValue(v) => v.get_name().to_str().ok(), BasicValueEnum::FloatValue(v) => v.get_name().to_str().ok(), BasicValueEnum::PointerValue(v) => v.get_name().to_str().ok(), BasicValueEnum::StructValue(v) => v.get_name().to_str().ok(), BasicValueEnum::VectorValue(v) => v.get_name().to_str().ok(), } } fn set_name<T: AsRef<str>>(&self, name: T) { match self { BasicValueEnum::ArrayValue(v) => v.set_name(name.as_ref()), BasicValueEnum::IntValue(v) => v.set_name(name.as_ref()), BasicValueEnum::FloatValue(v) => v.set_name(name.as_ref()), BasicValueEnum::PointerValue(v) => v.set_name(name.as_ref()), BasicValueEnum::StructValue(v) => v.set_name(name.as_ref()), BasicValueEnum::VectorValue(v) => v.set_name(name.as_ref()), }; } }
ck() .expect("at this stage there must be a block"); if let Some(first_instruction) = block.get_first_instruction() { temp_builder.position_before(&first_instruction); } else { temp_builder.position_at_end(&block); } temp_builder } fn gen_let_statement(&mut self, pat: PatId, initializer: Option<ExprId>) { let initializer = initializer.and_then(|expr| self.gen_expr(expr)); match &self.body[pat] { Pat::Bind { name } => { let builder = self.new_alloca_builder(); let ty = try_convert_any_to_basic(self.db.type_ir(self.infer[pat].clone())) .expect("expected basic type"); let ptr = builder.build_alloca(ty, &name.to_string()); self.pat_to_local.insert(pat, ptr); self.pat_to_name.insert(pat, name.to_string()); if let Some(value) = initializer { self.builder.build_store(ptr, value); }; } Pat::Wild => {} Pat::Missing | Pat::Path(_) => unreachable!(), } } fn gen_path_expr( &self, path: &Path, _expr: ExprId, resolver: &Resolver, ) -> inkwell::values::BasicValueEnum { let resolution = resolver .resolve_path_without_assoc_items(self.db, path) .take_values() .expect("unknown path"); match resolution { Resolution::LocalBinding(pat) => { if let Some(param) = self.pat_to_param.get(&pat) { *param } else if let Some(ptr) = self.pat_to_local.get(&pat) { let name = self.pat_to_name.get(&pat).expect("could not find pat name"); self.builder.build_load(*ptr, &name) } else { unreachable!("could not find the pattern.."); } }
random
[ { "content": "/// Build the declared type of a function. This should not need to look at the\n\n/// function body.\n\nfn type_for_fn(_db: &impl HirDatabase, def: Function) -> Ty {\n\n Ty::simple(TypeCtor::FnDef(def))\n\n}\n\n\n", "file_path": "crates/mun_hir/src/ty/lower.rs", "rank": 0, "score": 376498.15212437155 }, { "content": "// needs arbitrary_self_types to be a method... or maybe move to the def?\n\npub fn resolver_for_expr(body: Arc<Body>, db: &impl HirDatabase, expr_id: ExprId) -> Resolver {\n\n let scopes = db.expr_scopes(body.owner);\n\n resolver_for_scope(body, db, scopes.scope_for(expr_id))\n\n}\n\n\n\npub(crate) fn resolver_for_scope(\n\n body: Arc<Body>,\n\n db: &impl HirDatabase,\n\n scope_id: Option<scope::ScopeId>,\n\n) -> Resolver {\n\n let mut r = body.owner.resolver(db);\n\n let scopes = db.expr_scopes(body.owner);\n\n let scope_chain = scopes.scope_chain(scope_id).collect::<Vec<_>>();\n\n for scope in scope_chain.into_iter().rev() {\n\n r = r.push_expr_scope(Arc::clone(&scopes), scope);\n\n }\n\n r\n\n}\n", "file_path": "crates/mun_hir/src/expr.rs", "rank": 1, "score": 363439.14726419514 }, { "content": "/// The entry point of type inference. This method takes a body and infers the types of all the\n\n/// expressions and patterns. Diagnostics are also reported and stored in the `InferenceResult`.\n\npub fn infer_query(db: &impl HirDatabase, def: DefWithBody) -> Arc<InferenceResult> {\n\n let body = def.body(db);\n\n let resolver = def.resolver(db);\n\n let mut ctx = InferenceResultBuilder::new(db, body, resolver);\n\n\n\n match def {\n\n DefWithBody::Function(_) => ctx.infer_signature(),\n\n }\n\n\n\n ctx.infer_body();\n\n\n\n Arc::new(ctx.resolve_all())\n\n}\n\n\n", "file_path": "crates/mun_hir/src/ty/infer.rs", "rank": 2, "score": 348783.2355419431 }, { "content": "pub fn fn_sig_for_fn(db: &impl HirDatabase, def: Function) -> FnSig {\n\n let data = def.data(db);\n\n let resolver = def.resolver(db);\n\n let params = data\n\n .params()\n\n .iter()\n\n .map(|tr| Ty::from_hir(db, &resolver, data.type_ref_map(), tr).ty)\n\n .collect::<Vec<_>>();\n\n let ret = Ty::from_hir(db, &resolver, data.type_ref_map(), data.ret_type()).ty;\n\n FnSig::from_params_and_return(params, ret)\n\n}\n\n\n\npub mod diagnostics {\n\n use crate::type_ref::TypeRefId;\n\n\n\n #[derive(Debug, PartialEq, Eq, Clone)]\n\n pub(crate) enum LowerDiagnostic {\n\n UnresolvedType { id: TypeRefId },\n\n }\n\n}\n", "file_path": "crates/mun_hir/src/ty/lower.rs", "rank": 3, "score": 323363.115285824 }, { "content": "/// Construct an IR `MunTypeInfo` struct value for the specified `TypeInfo`\n\nfn type_info_ir(ty: &TypeInfo, module: &Module) -> StructValue {\n\n let context = module.get_context();\n\n let guid_values: [IntValue; 16] =\n\n array_init::array_init(|i| context.i8_type().const_int(u64::from(ty.guid[i]), false));\n\n context.const_struct(\n\n &[\n\n context.i8_type().const_array(&guid_values).into(),\n\n intern_string(module, &ty.name).into(),\n\n ],\n\n false,\n\n )\n\n}\n\n\n", "file_path": "crates/mun_codegen/src/code_gen/symbols.rs", "rank": 4, "score": 280105.6333174797 }, { "content": "fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope: ScopeId) {\n\n scopes.set_scope(expr, scope);\n\n match &body[expr] {\n\n Expr::Block { statements, tail } => {\n\n compute_block_scopes(&statements, *tail, body, scopes, scope);\n\n }\n\n e => e.walk_child_exprs(|e| compute_expr_scopes(e, body, scopes, scope)),\n\n };\n\n}\n", "file_path": "crates/mun_hir/src/expr/scope.rs", "rank": 5, "score": 276479.8159017526 }, { "content": "pub fn type_info_query(_db: &impl IrDatabase, ty: Ty) -> TypeInfo {\n\n match ty {\n\n Ty::Apply(ctor) => match ctor.ctor {\n\n TypeCtor::Float => TypeInfo::from_name(\"@core::float\"),\n\n TypeCtor::Int => TypeInfo::from_name(\"@core::int\"),\n\n TypeCtor::Bool => TypeInfo::from_name(\"@core::bool\"),\n\n _ => unreachable!(),\n\n },\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "crates/mun_codegen/src/code_gen/symbols.rs", "rank": 6, "score": 266210.5326480015 }, { "content": "/// Walks the subtree in bfs order, calling `f` for each node.\n\nfn bfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode)) {\n\n let mut curr_layer = vec![node.clone()];\n\n let mut next_layer = vec![];\n\n while !curr_layer.is_empty() {\n\n curr_layer.drain(..).for_each(|node| {\n\n next_layer.extend(node.children());\n\n f(node);\n\n });\n\n std::mem::swap(&mut curr_layer, &mut next_layer);\n\n }\n\n}\n", "file_path": "crates/mun_hir/src/source_id.rs", "rank": 7, "score": 248965.5451851138 }, { "content": "/// The inference context contains all information needed during type inference.\n\nstruct InferenceResultBuilder<'a, D: HirDatabase> {\n\n db: &'a D,\n\n body: Arc<Body>,\n\n resolver: Resolver,\n\n\n\n type_of_expr: ArenaMap<ExprId, Ty>,\n\n type_of_pat: ArenaMap<PatId, Ty>,\n\n diagnostics: Vec<InferenceDiagnostic>,\n\n\n\n type_variables: TypeVariableTable,\n\n\n\n /// The return type of the function being inferred.\n\n return_ty: Ty,\n\n}\n\n\n\nimpl<'a, D: HirDatabase> InferenceResultBuilder<'a, D> {\n\n /// Construct a new `InferenceContext` from a `Body` and a `Resolver` for that body.\n\n fn new(db: &'a D, body: Arc<Body>, resolver: Resolver) -> Self {\n\n InferenceResultBuilder {\n\n type_of_expr: ArenaMap::default(),\n", "file_path": "crates/mun_hir/src/ty/infer.rs", "rank": 8, "score": 239985.10886390612 }, { "content": "struct TypeVariableData {\n\n // origin: TypeVariableOrigin,\n\n// diverging: bool,\n\n}\n\n\n", "file_path": "crates/mun_hir/src/ty/infer/type_variable.rs", "rank": 9, "score": 239599.86695437314 }, { "content": "fn name_ref(p: &mut Parser) {\n\n if p.at(IDENT) {\n\n let m = p.start();\n\n p.bump(IDENT);\n\n m.complete(p, NAME_REF);\n\n } else {\n\n p.error_and_bump(\"expected identifier\");\n\n }\n\n}\n\n\n", "file_path": "crates/mun_syntax/src/parsing/grammar.rs", "rank": 10, "score": 237208.7003765151 }, { "content": "struct Instantiate {\n\n tv: TypeVarId,\n\n}\n\n\n", "file_path": "crates/mun_hir/src/ty/infer/type_variable.rs", "rank": 11, "score": 236603.92915359593 }, { "content": "struct Delegate;\n\n\n\nimpl TypeVariableTable {\n\n /// Creates a new generic infer type variable\n\n pub fn new_type_var(&mut self) -> TypeVarId {\n\n let eq_key = self.eq_relations.new_key(TypeVarValue::Unknown);\n\n let index = self.values.push(TypeVariableData {});\n\n assert_eq!(eq_key.0, index as u32);\n\n eq_key\n\n }\n\n\n\n /// Records that `a == b`\n\n pub fn equate(&mut self, a: TypeVarId, b: TypeVarId) {\n\n debug_assert!(self.eq_relations.probe_value(a).is_unknown());\n\n debug_assert!(self.eq_relations.probe_value(b).is_unknown());\n\n self.eq_relations.union(a, b);\n\n }\n\n\n\n /// Instantiates `tv` with the type `ty`.\n\n pub fn instantiate(&mut self, tv: TypeVarId, ty: Ty) {\n", "file_path": "crates/mun_hir/src/ty/infer/type_variable.rs", "rank": 12, "score": 236603.92915359593 }, { "content": "fn param(p: &mut Parser) {\n\n let m = p.start();\n\n patterns::pattern(p);\n\n types::ascription(p);\n\n m.complete(p, PARAM);\n\n}\n", "file_path": "crates/mun_syntax/src/parsing/grammar/params.rs", "rank": 13, "score": 235322.50625240366 }, { "content": "fn name(p: &mut Parser) {\n\n name_recovery(p, TokenSet::empty())\n\n}\n\n\n", "file_path": "crates/mun_syntax/src/parsing/grammar.rs", "rank": 14, "score": 227496.45888674998 }, { "content": "fn path_expr(p: &mut Parser) -> CompletedMarker {\n\n let m = p.start();\n\n paths::expr_path(p);\n\n m.complete(p, PATH_EXPR)\n\n}\n\n\n", "file_path": "crates/mun_syntax/src/parsing/grammar/expressions.rs", "rank": 15, "score": 224508.04462180397 }, { "content": "/// Build the declared type of a static.\n\nfn type_for_builtin(def: BuiltinType) -> Ty {\n\n Ty::simple(match def {\n\n BuiltinType::Float => TypeCtor::Float,\n\n BuiltinType::Int => TypeCtor::Int,\n\n BuiltinType::Boolean => TypeCtor::Bool,\n\n })\n\n}\n\n\n", "file_path": "crates/mun_hir/src/ty/lower.rs", "rank": 16, "score": 222592.00940739643 }, { "content": "fn path(p: &mut Parser, mode: Mode) {\n\n let path = p.start();\n\n path_segment(p, mode, true);\n\n let mut qualifier = path.complete(p, PATH);\n\n loop {\n\n let import_tree = match p.nth(1) {\n\n T![*] | T!['{'] => true,\n\n _ => false,\n\n };\n\n if p.at(T![::]) && !import_tree {\n\n let path = qualifier.precede(p);\n\n p.bump(T![::]);\n\n path_segment(p, mode, false);\n\n let path = path.complete(p, PATH);\n\n qualifier = path;\n\n } else {\n\n break;\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/mun_syntax/src/parsing/grammar/paths.rs", "rank": 17, "score": 219937.28062066413 }, { "content": "fn parse_query(db: &impl SourceDatabase, file_id: FileId) -> Parse<SourceFile> {\n\n let text = db.file_text(file_id);\n\n SourceFile::parse(&*text)\n\n}\n\n\n", "file_path": "crates/mun_hir/src/db.rs", "rank": 18, "score": 212551.49062520027 }, { "content": "fn line_index_query(db: &impl SourceDatabase, file_id: FileId) -> Arc<LineIndex> {\n\n let text = db.file_text(file_id);\n\n Arc::new(LineIndex::new(text.as_ref()))\n\n}\n", "file_path": "crates/mun_hir/src/db.rs", "rank": 19, "score": 210555.93831021746 }, { "content": "fn call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {\n\n assert!(p.at(T!['(']));\n\n let m = lhs.precede(p);\n\n arg_list(p);\n\n m.complete(p, CALL_EXPR)\n\n}\n\n\n", "file_path": "crates/mun_syntax/src/parsing/grammar/expressions.rs", "rank": 20, "score": 209861.7607575452 }, { "content": "/// Construct a global from the specified value\n\nfn gen_global(module: &Module, value: &dyn BasicValue, name: &str) -> GlobalValue {\n\n let global = module.add_global(value.as_basic_value_enum().get_type(), None, name);\n\n global.set_linkage(Linkage::Private);\n\n global.set_constant(true);\n\n global.set_unnamed_address(UnnamedAddress::Global);\n\n global.set_initializer(value);\n\n global\n\n}\n\n\n", "file_path": "crates/mun_codegen/src/code_gen/symbols.rs", "rank": 21, "score": 203913.38239131664 }, { "content": "#[derive(Clone, PartialEq, Eq, Debug)]\n\nstruct Expectation {\n\n ty: Ty,\n\n // FIXME: In some cases, we need to be aware whether the expectation is that\n\n // the type match exactly what we passed, or whether it just needs to be\n\n // coercible to the expected type. See Expectation::rvalue_hint in rustc.\n\n}\n\n\n\nimpl Expectation {\n\n /// The expectation that the type of the expression needs to equal the given\n\n /// type.\n\n fn has_type(ty: Ty) -> Self {\n\n Expectation { ty }\n\n }\n\n\n\n /// This expresses no expectation on the type.\n\n fn none() -> Self {\n\n Expectation { ty: Ty::Unknown }\n\n }\n\n}\n\n\n", "file_path": "crates/mun_hir/src/ty/infer.rs", "rank": 22, "score": 201740.62324162055 }, { "content": "/// Get the output from running `llvm-config` with the given argument.\n\n///\n\n/// Lazily searches for or compiles LLVM as configured by the environment\n\n/// variables.\n\nfn llvm_config(arg: &str) -> String {\n\n llvm_config_ex(&*LLVM_CONFIG_PATH, arg).expect(\"Surprising failure from llvm-config\")\n\n}\n\n\n", "file_path": "crates/mun_codegen/build.rs", "rank": 23, "score": 200849.0171953663 }, { "content": "fn parse_from_tokens<F>(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink, f: F)\n\nwhere\n\n F: FnOnce(&mut parser::Parser),\n\n{\n\n let mut p = parser::Parser::new(token_source);\n\n f(&mut p);\n\n let events = p.finish();\n\n event::process(tree_sink, events);\n\n}\n\n\n", "file_path": "crates/mun_syntax/src/parsing.rs", "rank": 24, "score": 197713.77721663675 }, { "content": "fn list(p: &mut Parser) {\n\n assert!(p.at(T!['(']));\n\n let m = p.start();\n\n p.bump(T!['(']);\n\n while !p.at(EOF) && !p.at(T![')']) {\n\n if !p.at_ts(VALUE_PARAMETER_FIRST) {\n\n p.error(\"expected value parameter\");\n\n break;\n\n }\n\n param(p);\n\n if !p.at(T![')']) {\n\n p.expect(T![,]);\n\n }\n\n }\n\n p.expect(T![')']);\n\n m.complete(p, PARAM_LIST);\n\n}\n\n\n\nconst VALUE_PARAMETER_FIRST: TokenSet = patterns::PATTERN_FIRST;\n\n\n", "file_path": "crates/mun_syntax/src/parsing/grammar/params.rs", "rank": 25, "score": 197038.5049152783 }, { "content": "/// Invoke the specified binary as llvm-config.\n\n///\n\n/// Explicit version of the `llvm_config` function that bubbles errors\n\n/// up.\n\nfn llvm_config_ex<S: AsRef<OsStr>>(binary: S, arg: &str) -> io::Result<String> {\n\n Command::new(binary)\n\n .arg(arg)\n\n .arg(\"--link-static\") // Don't use dylib for >= 3.9\n\n .output()\n\n .map(|output| {\n\n String::from_utf8(output.stdout).expect(\"Output from llvm-config was not valid UTF-8\")\n\n })\n\n}\n\n\n", "file_path": "crates/mun_codegen/build.rs", "rank": 26, "score": 195791.63146723967 }, { "content": "fn arg_list(p: &mut Parser) {\n\n assert!(p.at(T!['(']));\n\n let m = p.start();\n\n p.bump(T!['(']);\n\n while !p.at(T![')']) && !p.at(EOF) {\n\n if !p.at_ts(EXPR_FIRST) {\n\n p.error(\"expected expression\");\n\n break;\n\n }\n\n\n\n expr(p);\n\n if !p.at(T![')']) && !p.expect(T![,]) {\n\n break;\n\n }\n\n }\n\n p.eat(T![')']);\n\n m.complete(p, ARG_LIST);\n\n}\n\n\n", "file_path": "crates/mun_syntax/src/parsing/grammar/expressions.rs", "rank": 27, "score": 194974.92175105488 }, { "content": "fn opt_fn_ret_type(p: &mut Parser) -> bool {\n\n if p.at(T![:]) {\n\n let m = p.start();\n\n p.bump(T![:]);\n\n types::type_(p);\n\n m.complete(p, RET_TYPE);\n\n true\n\n } else {\n\n false\n\n }\n\n}\n", "file_path": "crates/mun_syntax/src/parsing/grammar/declarations.rs", "rank": 28, "score": 194552.8811916252 }, { "content": "/// Try to down cast an `AnyTypeEnum` into a `BasicTypeEnum`.\n\nfn try_convert_any_to_basic(ty: AnyTypeEnum) -> Option<BasicTypeEnum> {\n\n match ty {\n\n AnyTypeEnum::ArrayType(t) => Some(t.into()),\n\n AnyTypeEnum::FloatType(t) => Some(t.into()),\n\n AnyTypeEnum::IntType(t) => Some(t.into()),\n\n AnyTypeEnum::PointerType(t) => Some(t.into()),\n\n AnyTypeEnum::StructType(t) => Some(t.into()),\n\n AnyTypeEnum::VectorType(t) => Some(t.into()),\n\n _ => None,\n\n }\n\n}\n", "file_path": "crates/mun_codegen/src/ir.rs", "rank": 29, "score": 194351.2811039369 }, { "content": "fn postfix_expr(p: &mut Parser, mut lhs: CompletedMarker) -> CompletedMarker {\n\n loop {\n\n lhs = match p.current() {\n\n T!['('] => call_expr(p, lhs),\n\n _ => break,\n\n }\n\n }\n\n lhs\n\n}\n\n\n", "file_path": "crates/mun_syntax/src/parsing/grammar/expressions.rs", "rank": 30, "score": 187869.56224649103 }, { "content": "fn compute_block_scopes(\n\n statements: &[Statement],\n\n tail: Option<ExprId>,\n\n body: &Body,\n\n scopes: &mut ExprScopes,\n\n mut scope: ScopeId,\n\n) {\n\n for stmt in statements {\n\n match stmt {\n\n Statement::Let {\n\n pat, initializer, ..\n\n } => {\n\n if let Some(expr) = initializer {\n\n scopes.set_scope(*expr, scope);\n\n compute_expr_scopes(*expr, body, scopes, scope);\n\n }\n\n scope = scopes.new_scope(scope);\n\n scopes.add_bindings(body, scope, *pat);\n\n }\n\n Statement::Expr(expr) => {\n\n scopes.set_scope(*expr, scope);\n\n compute_expr_scopes(*expr, body, scopes, scope);\n\n }\n\n }\n\n }\n\n if let Some(expr) = tail {\n\n compute_expr_scopes(expr, body, scopes, scope);\n\n }\n\n}\n\n\n", "file_path": "crates/mun_hir/src/expr/scope.rs", "rank": 31, "score": 187648.36839063597 }, { "content": "fn placeholder_pat(p: &mut Parser) -> CompletedMarker {\n\n assert!(p.at(T![_]));\n\n let m = p.start();\n\n p.bump(T![_]);\n\n m.complete(p, PLACEHOLDER_PAT)\n\n}\n\n\n", "file_path": "crates/mun_syntax/src/parsing/grammar/patterns.rs", "rank": 32, "score": 186660.57443244875 }, { "content": "fn bind_pat(p: &mut Parser) -> CompletedMarker {\n\n let m = p.start();\n\n name(p);\n\n m.complete(p, BIND_PAT)\n\n}\n", "file_path": "crates/mun_syntax/src/parsing/grammar/patterns.rs", "rank": 33, "score": 186660.57443244875 }, { "content": "fn paren_expr(p: &mut Parser) -> CompletedMarker {\n\n assert!(p.at(T!['(']));\n\n let m = p.start();\n\n p.bump(T!['(']);\n\n expr(p);\n\n p.expect(T![')']);\n\n m.complete(p, PAREN_EXPR)\n\n}\n", "file_path": "crates/mun_syntax/src/parsing/grammar/expressions.rs", "rank": 34, "score": 186233.10207423294 }, { "content": "fn let_stmt(p: &mut Parser, m: Marker) {\n\n assert!(p.at(T![let]));\n\n p.bump(T![let]);\n\n patterns::pattern(p);\n\n if p.at(T![:]) {\n\n types::ascription(p);\n\n }\n\n if p.eat(T![=]) {\n\n expressions::expr(p);\n\n }\n\n\n\n p.eat(T![;]); // Semicolon at the end of statement belongs to the statement\n\n m.complete(p, LET_STMT);\n\n}\n\n\n\npub(super) fn expr(p: &mut Parser) {\n\n expr_bp(p, 1);\n\n}\n\n\n", "file_path": "crates/mun_syntax/src/parsing/grammar/expressions.rs", "rank": 35, "score": 184565.7937360722 }, { "content": "fn error_block(p: &mut Parser, message: &str) {\n\n assert!(p.at(T!['{']));\n\n let m = p.start();\n\n p.error(message);\n\n p.bump(T!['{']);\n\n expressions::expr_block_contents(p);\n\n p.eat(T!['{']);\n\n m.complete(p, ERROR);\n\n}\n", "file_path": "crates/mun_syntax/src/parsing/grammar.rs", "rank": 36, "score": 184549.32253492245 }, { "content": "fn literal(p: &mut Parser) -> Option<CompletedMarker> {\n\n if !p.at_ts(LITERAL_FIRST) {\n\n return None;\n\n }\n\n let m = p.start();\n\n p.bump_any();\n\n Some(m.complete(p, LITERAL))\n\n}\n\n\n", "file_path": "crates/mun_syntax/src/parsing/grammar/expressions.rs", "rank": 37, "score": 182667.06851540453 }, { "content": "fn path_segment(p: &mut Parser, _mode: Mode, first: bool) {\n\n let m = p.start();\n\n if first {\n\n p.eat(T![::]);\n\n }\n\n match p.current() {\n\n IDENT => {\n\n name_ref(p);\n\n }\n\n T![self] | T![super] => p.bump_any(),\n\n _ => p.error_recover(\n\n \"expected identifier\",\n\n declarations::DECLARATION_RECOVERY_SET,\n\n ),\n\n }\n\n m.complete(p, PATH_SEGMENT);\n\n}\n", "file_path": "crates/mun_syntax/src/parsing/grammar/paths.rs", "rank": 38, "score": 182584.14312334568 }, { "content": "fn name_recovery(p: &mut Parser, recovery: TokenSet) {\n\n if p.at(IDENT) {\n\n let m = p.start();\n\n p.bump(IDENT);\n\n m.complete(p, NAME);\n\n } else {\n\n p.error_recover(\"expected a name\", recovery)\n\n }\n\n}\n\n\n", "file_path": "crates/mun_syntax/src/parsing/grammar.rs", "rank": 39, "score": 182540.44129370316 }, { "content": "fn expr_stmt(p: &mut Parser) -> Option<CompletedMarker> {\n\n expr_bp(p, 1)\n\n}\n\n\n", "file_path": "crates/mun_syntax/src/parsing/grammar/expressions.rs", "rank": 40, "score": 180401.17511937092 }, { "content": "fn atom_expr(p: &mut Parser) -> Option<CompletedMarker> {\n\n if let Some(m) = literal(p) {\n\n return Some(m);\n\n }\n\n\n\n if paths::is_path_start(p) {\n\n return Some(path_expr(p));\n\n }\n\n\n\n let marker = match p.current() {\n\n T!['('] => paren_expr(p),\n\n _ => {\n\n p.error_recover(\"expected expression\", EXPR_RECOVERY_SET);\n\n return None;\n\n }\n\n };\n\n Some(marker)\n\n}\n\n\n", "file_path": "crates/mun_syntax/src/parsing/grammar/expressions.rs", "rank": 41, "score": 180401.17511937092 }, { "content": "/// Intern a string by constructing a global value. Looks something like this:\n\n/// ```c\n\n/// const char[] GLOBAL_ = \"str\";\n\n/// ```\n\nfn intern_string(module: &Module, str: &str) -> PointerValue {\n\n let value = module.get_context().const_string(str, true);\n\n gen_global(module, &value, \".str\").as_pointer_value()\n\n}\n\n\n", "file_path": "crates/mun_codegen/src/code_gen/symbols.rs", "rank": 42, "score": 177408.52540364824 }, { "content": "fn expr_bp(p: &mut Parser, bp: u8) -> Option<CompletedMarker> {\n\n // Parse left hand side of the expression\n\n let mut lhs = match lhs(p) {\n\n Some(lhs) => lhs,\n\n None => return None,\n\n };\n\n\n\n loop {\n\n let (op_bp, op) = current_op(p);\n\n if op_bp < bp {\n\n break;\n\n }\n\n\n\n let m = lhs.precede(p);\n\n p.bump(op);\n\n\n\n expr_bp(p, op_bp + 1);\n\n lhs = m.complete(p, BIN_EXPR);\n\n }\n\n\n\n Some(lhs)\n\n}\n\n\n", "file_path": "crates/mun_syntax/src/parsing/grammar/expressions.rs", "rank": 43, "score": 170093.75483382464 }, { "content": "pub fn macos_llvm_target(arch: &str) -> String {\n\n let (major, minor) = macos_deployment_target();\n\n format!(\"{}-apple-macosx{}.{}.0\", arch, major, minor)\n\n}\n", "file_path": "crates/mun_target/src/spec/apple_base.rs", "rank": 45, "score": 169763.7604471236 }, { "content": "fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> {\n\n let t1 = p.nth(0);\n\n if t1 == IDENT {\n\n return Some(bind_pat(p));\n\n }\n\n\n\n let m = match t1 {\n\n T![_] => placeholder_pat(p),\n\n _ => {\n\n p.error_recover(\"expected pattern\", recovery_set);\n\n return None;\n\n }\n\n };\n\n Some(m)\n\n}\n\n\n", "file_path": "crates/mun_syntax/src/parsing/grammar/patterns.rs", "rank": 46, "score": 167212.4675373569 }, { "content": "/// Optimizes the specified LLVM `Module` using the default passes for the given\n\n/// `OptimizationLevel`.\n\nfn optimize_module(module: &Module, optimization_lvl: OptimizationLevel) {\n\n let pass_builder = PassManagerBuilder::create();\n\n pass_builder.set_optimization_level(optimization_lvl);\n\n\n\n let module_pass_manager = PassManager::create(());\n\n pass_builder.populate_module_pass_manager(&module_pass_manager);\n\n module_pass_manager.run_on(module);\n\n}\n\n\n\npub mod symbols;\n", "file_path": "crates/mun_codegen/src/code_gen.rs", "rank": 47, "score": 163960.55002100134 }, { "content": "\n\nimpl TypeVarValue {\n\n fn known(&self) -> Option<&Ty> {\n\n match self {\n\n TypeVarValue::Known(ty) => Some(ty),\n\n TypeVarValue::Unknown => None,\n\n }\n\n }\n\n\n\n fn is_unknown(&self) -> bool {\n\n match self {\n\n TypeVarValue::Known(_) => false,\n\n TypeVarValue::Unknown => true,\n\n }\n\n }\n\n}\n\n\n\nimpl UnifyValue for TypeVarValue {\n\n type Error = NoError;\n\n\n", "file_path": "crates/mun_hir/src/ty/infer/type_variable.rs", "rank": 48, "score": 163942.37025352404 }, { "content": " debug_assert!(\n\n self.eq_relations.probe_value(tv).is_unknown(),\n\n \"instantiating type variable `{:?}` twice: new-value = {:?}, old-value={:?}\",\n\n tv,\n\n ty,\n\n self.eq_relations.probe_value(tv).known().unwrap()\n\n );\n\n self.eq_relations.union_value(tv, TypeVarValue::Known(ty));\n\n }\n\n\n\n /// If `ty` is a type-inference variable, and it has been instantiated, then return the\n\n /// instantiated type; otherwise returns `ty`.\n\n pub fn replace_if_possible<'t>(&mut self, ty: &'t Ty) -> Cow<'t, Ty> {\n\n let ty = Cow::Borrowed(ty);\n\n match &*ty {\n\n Ty::Infer(tv) => match self.eq_relations.probe_value(*tv).known() {\n\n Some(known_ty) => Cow::Owned(known_ty.clone()),\n\n _ => ty,\n\n },\n\n _ => ty,\n", "file_path": "crates/mun_hir/src/ty/infer/type_variable.rs", "rank": 49, "score": 163937.65210219307 }, { "content": " }\n\n }\n\n\n\n /// Returns indices of all variables that are not yet instantiated.\n\n pub fn unsolved_variables(&mut self) -> Vec<TypeVarId> {\n\n (0..self.values.len())\n\n .filter_map(|i| {\n\n let tv = TypeVarId::from_index(i as u32);\n\n match self.eq_relations.probe_value(tv) {\n\n TypeVarValue::Unknown { .. } => Some(tv),\n\n TypeVarValue::Known { .. } => None,\n\n }\n\n })\n\n .collect()\n\n }\n\n\n\n /// Returns true if the table still contains unresolved type variables\n\n pub fn has_unsolved_variables(&mut self) -> bool {\n\n (0..self.values.len()).any(|i| {\n\n let tv = TypeVarId::from_index(i as u32);\n", "file_path": "crates/mun_hir/src/ty/infer/type_variable.rs", "rank": 50, "score": 163933.89779280507 }, { "content": "use crate::Ty;\n\nuse drop_bomb::DropBomb;\n\nuse ena::snapshot_vec::{SnapshotVec, SnapshotVecDelegate};\n\nuse ena::unify::{InPlace, InPlaceUnificationTable, NoError, UnifyKey, UnifyValue};\n\nuse std::borrow::Cow;\n\nuse std::fmt;\n\n\n\n/// The ID of a type variable.\n\n#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]\n\npub struct TypeVarId(pub(crate) u32);\n\n\n\nimpl fmt::Display for TypeVarId {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"'{}\", self.0)\n\n }\n\n}\n\n\n\nimpl UnifyKey for TypeVarId {\n\n type Value = TypeVarValue;\n\n\n", "file_path": "crates/mun_hir/src/ty/infer/type_variable.rs", "rank": 51, "score": 163933.87540037863 }, { "content": " match self.eq_relations.probe_value(tv) {\n\n TypeVarValue::Unknown { .. } => true,\n\n TypeVarValue::Known { .. } => false,\n\n }\n\n })\n\n }\n\n}\n\n\n\npub struct Snapshot {\n\n snapshot: ena::snapshot_vec::Snapshot,\n\n eq_snapshot: ena::unify::Snapshot<InPlace<TypeVarId>>,\n\n bomb: DropBomb,\n\n}\n\n\n\nimpl TypeVariableTable {\n\n /// Creates a snapshot of the type variable state. This snapshot must later be committed\n\n /// (`commit`) or rolled back (`rollback_to()`). Nested snapshots are permitted but must be\n\n /// processed in a stack-like fashion.\n\n pub fn snapshot(&mut self) -> Snapshot {\n\n Snapshot {\n", "file_path": "crates/mun_hir/src/ty/infer/type_variable.rs", "rank": 52, "score": 163931.52714582527 }, { "content": " /// snapshot was created within another snapshot). Any snapshot created since that point\n\n /// must already have been committed or rolled back.\n\n pub fn commit(&mut self, s: Snapshot) {\n\n let Snapshot {\n\n snapshot,\n\n eq_snapshot,\n\n mut bomb,\n\n } = s;\n\n self.values.commit(snapshot);\n\n self.eq_relations.commit(eq_snapshot);\n\n bomb.defuse();\n\n }\n\n}\n\n\n\nimpl SnapshotVecDelegate for Delegate {\n\n type Value = TypeVariableData;\n\n type Undo = Instantiate;\n\n\n\n fn reverse(_values: &mut Vec<TypeVariableData>, _action: Instantiate) {\n\n // We don't actually have to *do* anything to reverse an\n", "file_path": "crates/mun_hir/src/ty/infer/type_variable.rs", "rank": 53, "score": 163929.9441868945 }, { "content": " fn unify_values(value1: &Self, value2: &Self) -> Result<Self, NoError> {\n\n match (value1, value2) {\n\n // We should never equate two type variables, both of which have\n\n // known types. Instead, we recursively equate those types.\n\n (TypeVarValue::Known(t1), TypeVarValue::Known(t2)) => panic!(\n\n \"equating two type variables, both of which have known types: {:?} and {:?}\",\n\n t1, t2\n\n ),\n\n\n\n // If one side is known, prefer that one.\n\n (TypeVarValue::Known(..), TypeVarValue::Unknown) => Ok(value1.clone()),\n\n (TypeVarValue::Unknown, TypeVarValue::Known(..)) => Ok(value2.clone()),\n\n\n\n (TypeVarValue::Unknown, TypeVarValue::Unknown) => Ok(TypeVarValue::Unknown),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Default)]\n\npub struct TypeVariableTable {\n\n values: SnapshotVec<Delegate>,\n\n eq_relations: InPlaceUnificationTable<TypeVarId>,\n\n}\n\n\n", "file_path": "crates/mun_hir/src/ty/infer/type_variable.rs", "rank": 54, "score": 163928.36420318755 }, { "content": " fn index(&self) -> u32 {\n\n self.0\n\n }\n\n\n\n fn from_index(i: u32) -> Self {\n\n TypeVarId(i)\n\n }\n\n\n\n fn tag() -> &'static str {\n\n \"TypeVarId\"\n\n }\n\n}\n\n\n\n/// The value of a type variable: either we already know the type, or we don't\n\n/// know it yet.\n\n#[derive(Clone, PartialEq, Eq, Debug)]\n\npub enum TypeVarValue {\n\n Known(Ty),\n\n Unknown,\n\n}\n", "file_path": "crates/mun_hir/src/ty/infer/type_variable.rs", "rank": 55, "score": 163927.62328999495 }, { "content": " snapshot: self.values.start_snapshot(),\n\n eq_snapshot: self.eq_relations.snapshot(),\n\n bomb: DropBomb::new(\"Snapshot must be committed or rolled back\"),\n\n }\n\n }\n\n\n\n /// Undoes all changes since the snapshot was created. Any snapshot created since that point\n\n /// must already have been committed or rolled back.\n\n pub fn rollback_to(&mut self, s: Snapshot) {\n\n let Snapshot {\n\n snapshot,\n\n eq_snapshot,\n\n mut bomb,\n\n } = s;\n\n self.values.rollback_to(snapshot);\n\n self.eq_relations.rollback_to(eq_snapshot);\n\n bomb.defuse();\n\n }\n\n\n\n /// Commits all changes since the snapshot was created, making them permanent (unless this\n", "file_path": "crates/mun_hir/src/ty/infer/type_variable.rs", "rank": 56, "score": 163924.77646521205 }, { "content": " // instantiation; the value for a variable is stored in the\n\n // `eq_relations` and hence its rollback code will handle\n\n // it. In fact, we could *almost* just remove the\n\n // `SnapshotVec` entirely, except that we would have to\n\n // reproduce *some* of its logic, since we want to know which\n\n // type variables have been instantiated since the snapshot\n\n // was started, so we can implement `types_escaping_snapshot`.\n\n //\n\n // (If we extended the `UnificationTable` to let us see which\n\n // values have been unified and so forth, that might also\n\n // suffice.)\n\n }\n\n}\n", "file_path": "crates/mun_hir/src/ty/infer/type_variable.rs", "rank": 57, "score": 163915.3740648267 }, { "content": "#[test]\n\nfn let_statement() {\n\n test_snapshot(\n\n r#\"\n\n fn main(a:int):int {\n\n let b = a+1\n\n b\n\n }\n\n \"#,\n\n );\n\n}\n\n\n", "file_path": "crates/mun_codegen/src/test.rs", "rank": 58, "score": 153530.7439069893 }, { "content": "#[test]\n\nfn function_calls() {\n\n test_snapshot(\n\n r#\"\n\n fn add_impl(a:int, b:int):int {\n\n a+b\n\n }\n\n\n\n fn add(a:int, b:int):int {\n\n add_impl(a,b)\n\n }\n\n\n\n fn test():int {\n\n add(4,5)\n\n add_impl(4,5)\n\n add(4,5)\n\n }\n\n \"#,\n\n );\n\n}\n\n\n", "file_path": "crates/mun_codegen/src/test.rs", "rank": 59, "score": 153229.98387806618 }, { "content": "fn scan_block_comment(cursor: &mut Cursor) -> Option<SyntaxKind> {\n\n if cursor.matches('*') {\n\n cursor.bump();\n\n let mut depth: u32 = 1;\n\n while depth > 0 {\n\n if cursor.matches_str(\"*/\") {\n\n depth -= 1;\n\n cursor.bump();\n\n cursor.bump();\n\n } else if cursor.matches_str(\"/*\") {\n\n depth += 1;\n\n cursor.bump();\n\n cursor.bump();\n\n } else if cursor.bump().is_none() {\n\n break;\n\n }\n\n }\n\n Some(COMMENT)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "crates/mun_syntax/src/parsing/lexer/comments.rs", "rank": 60, "score": 152563.93082451253 }, { "content": "#[test]\n\nfn function_calls() {\n\n ok_snapshot_test(\n\n r#\"\n\n fn bar(i:number) { }\n\n fn foo(i:number) {\n\n bar(i+1)\n\n }\n\n \"#,\n\n )\n\n}\n\n\n", "file_path": "crates/mun_syntax/src/tests/parser.rs", "rank": 61, "score": 150972.68040751404 }, { "content": "fn opt_visibility(p: &mut Parser) -> bool {\n\n if p.at(EXPORT_KW) {\n\n let m = p.start();\n\n p.bump(EXPORT_KW);\n\n m.complete(p, VISIBILITY);\n\n true\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "crates/mun_syntax/src/parsing/grammar.rs", "rank": 62, "score": 150880.20821881108 }, { "content": "/// Starts the runtime with the specified library and invokes function `entry`.\n\nfn start(matches: &ArgMatches) -> Result<(), failure::Error> {\n\n let mut runtime = runtime(matches)?;\n\n\n\n let entry_point = matches.value_of(\"entry\").unwrap_or(\"main\");\n\n\n\n #[allow(clippy::unit_arg)]\n\n invoke_fn!(runtime, entry_point).map_err(|e| failure::err_msg(format!(\"{}\", e)))\n\n}\n\n\n", "file_path": "crates/mun/src/main.rs", "rank": 63, "score": 147352.1733545011 }, { "content": "/// Build the source file specified\n\nfn build(matches: &ArgMatches) -> Result<(), failure::Error> {\n\n let options = compiler_options(matches)?;\n\n if matches.is_present(\"watch\") {\n\n mun_compiler_daemon::main(&options)\n\n } else {\n\n mun_compiler::main(&options).map(|_| {})\n\n }\n\n}\n\n\n", "file_path": "crates/mun/src/main.rs", "rank": 64, "score": 147347.6712303363 }, { "content": "#[test]\n\nfn function() {\n\n test_snapshot(\n\n r#\"\n\n fn main() {\n\n }\n\n \"#,\n\n );\n\n}\n\n\n", "file_path": "crates/mun_codegen/src/test.rs", "rank": 65, "score": 147088.4283860523 }, { "content": "#[test]\n\nfn block() {\n\n ok_snapshot_test(\n\n r#\"\n\n fn foo() {\n\n let a;\n\n let b:i32;\n\n let c:string;\n\n }\"#,\n\n );\n\n}\n\n\n", "file_path": "crates/mun_syntax/src/tests/parser.rs", "rank": 66, "score": 145912.39599045884 }, { "content": "#[test]\n\nfn function() {\n\n ok_snapshot_test(\n\n r#\"\n\n // Source file comment\n\n\n\n // Comment that belongs to the function\n\n fn a() {}\n\n fn b(value:number) {}\n\n export fn c() {}\n\n fn b(value:number):number {}\"#,\n\n );\n\n}\n\n\n", "file_path": "crates/mun_syntax/src/tests/parser.rs", "rank": 67, "score": 145537.8384702248 }, { "content": "pub fn target() -> TargetResult {\n\n let mut base = super::apple_base::opts();\n\n base.cpu = \"core2\".to_string();\n\n\n\n // Clang automatically chooses a more specific target based on\n\n // MACOSX_DEPLOYMENT_TARGET. To enable cross-language LTO to work\n\n // correctly, we do too.\n\n let arch = \"x86_64\";\n\n let llvm_target = super::apple_base::macos_llvm_target(&arch);\n\n\n\n Ok(Target {\n\n llvm_target,\n\n target_os: \"macos\".to_string(),\n\n target_env: String::new(),\n\n target_vendor: \"apple\".to_string(),\n\n arch: arch.to_string(),\n\n linker_flavor: LinkerFlavor::Ld64,\n\n options: base,\n\n })\n\n}\n", "file_path": "crates/mun_target/src/spec/x86_64_apple_darwin.rs", "rank": 68, "score": 145170.49232961194 }, { "content": "fn lhs(p: &mut Parser) -> Option<CompletedMarker> {\n\n let m;\n\n let kind = match p.current() {\n\n T![-] | T![!] => {\n\n m = p.start();\n\n p.bump_any();\n\n PREFIX_EXPR\n\n }\n\n _ => {\n\n let lhs = atom_expr(p)?;\n\n return Some(postfix_expr(p, lhs));\n\n }\n\n };\n\n expr_bp(p, 255);\n\n Some(m.complete(p, kind))\n\n}\n\n\n", "file_path": "crates/mun_syntax/src/parsing/grammar/expressions.rs", "rank": 69, "score": 144320.774615378 }, { "content": "pub fn target() -> TargetResult {\n\n let mut base = super::linux_base::opts();\n\n base.cpu = \"x86-64\".to_string();\n\n\n\n Ok(Target {\n\n llvm_target: \"x86_64-unknown-linux-gnu\".to_string(),\n\n target_os: \"linux\".to_string(),\n\n target_env: \"gnu\".to_string(),\n\n target_vendor: \"unknown\".to_string(),\n\n arch: \"x86_64\".to_string(),\n\n linker_flavor: LinkerFlavor::Ld,\n\n options: base,\n\n })\n\n}\n", "file_path": "crates/mun_target/src/spec/x86_64_unknown_linux_gnu.rs", "rank": 70, "score": 144142.70057801588 }, { "content": "pub fn target() -> TargetResult {\n\n let mut base = super::windows_msvc_base::opts();\n\n base.cpu = \"x86-64\".to_string();\n\n\n\n Ok(Target {\n\n llvm_target: \"x86_64-pc-windows-msvc\".to_string(),\n\n target_os: \"windows\".to_string(),\n\n target_env: \"msvc\".to_string(),\n\n target_vendor: \"pc\".to_string(),\n\n arch: \"x86_64\".to_string(),\n\n linker_flavor: LinkerFlavor::Msvc,\n\n options: base,\n\n })\n\n}\n", "file_path": "crates/mun_target/src/spec/x86_64_pc_windows_msvc.rs", "rank": 71, "score": 144142.70057801588 }, { "content": "fn runtime(matches: &ArgMatches) -> Result<MunRuntime, failure::Error> {\n\n let mut builder = RuntimeBuilder::new(\n\n matches.value_of(\"LIBRARY\").unwrap(), // Safe because its a required arg\n\n );\n\n\n\n if let Some(delay) = matches.value_of(\"delay\") {\n\n let delay: u64 = delay.parse()?;\n\n builder.set_delay(Duration::from_millis(delay));\n\n }\n\n\n\n builder.spawn()\n\n}\n", "file_path": "crates/mun/src/main.rs", "rank": 72, "score": 142722.6986416026 }, { "content": "pub fn create_with_target(target: &spec::Target) -> Box<dyn Linker> {\n\n match target.linker_flavor {\n\n LinkerFlavor::Ld => Box::new(LdLinker::new(target)),\n\n LinkerFlavor::Ld64 => Box::new(Ld64Linker::new(target)),\n\n LinkerFlavor::Msvc => Box::new(MsvcLinker::new(target)),\n\n }\n\n}\n\n\n", "file_path": "crates/mun_codegen/src/code_gen/linker.rs", "rank": 73, "score": 137042.4954119059 }, { "content": "fn compiler_options(matches: &ArgMatches) -> Result<mun_compiler::CompilerOptions, failure::Error> {\n\n let optimization_lvl = match matches.value_of(\"opt-level\") {\n\n Some(\"0\") => mun_compiler::OptimizationLevel::None,\n\n Some(\"1\") => mun_compiler::OptimizationLevel::Less,\n\n None | Some(\"2\") => mun_compiler::OptimizationLevel::Default,\n\n Some(\"3\") => mun_compiler::OptimizationLevel::Aggressive,\n\n _ => return Err(format_err!(\"Only optimization levels 0-3 are supported\")),\n\n };\n\n\n\n Ok(mun_compiler::CompilerOptions {\n\n input: PathOrInline::Path(matches.value_of(\"INPUT\").unwrap().into()), // Safe because its a required arg\n\n target: matches.value_of(\"target\").map(|t| t.to_string()),\n\n optimization_lvl,\n\n out_dir: None,\n\n })\n\n}\n\n\n", "file_path": "crates/mun/src/main.rs", "rank": 74, "score": 136780.2508005245 }, { "content": " pub fn get(&self, name: &Name) -> Option<&Resolution> {\n\n self.items.get(name).or_else(|| BUILTIN_SCOPE.get(name))\n\n }\n\n}\n\n\n\npub(crate) fn module_scope_query(db: &impl HirDatabase, file_id: FileId) -> Arc<ModuleScope> {\n\n let mut scope = ModuleScope::default();\n\n let defs = db.module_data(file_id);\n\n for def in defs.definitions() {\n\n match def {\n\n ModuleDef::Function(f) => {\n\n scope.items.insert(\n\n f.name(db),\n\n Resolution {\n\n def: PerNs::values(*def),\n\n },\n\n );\n\n }\n\n _ => {}\n\n }\n\n }\n\n Arc::new(scope)\n\n}\n", "file_path": "crates/mun_hir/src/name_resolution.rs", "rank": 75, "score": 127738.17118065675 }, { "content": "\n\nstatic BUILTIN_SCOPE: Lazy<FxHashMap<Name, Resolution>> = Lazy::new(|| {\n\n BuiltinType::ALL\n\n .iter()\n\n .map(|(name, ty)| {\n\n (\n\n name.clone(),\n\n Resolution {\n\n def: PerNs::types(ty.clone().into()),\n\n },\n\n )\n\n })\n\n .collect()\n\n});\n\n\n\nimpl ModuleScope {\n\n pub fn entries<'a>(&'a self) -> impl Iterator<Item = (&'a Name, &'a Resolution)> + 'a {\n\n //FIXME: shadowing\n\n self.items.iter().chain(BUILTIN_SCOPE.iter())\n\n }\n", "file_path": "crates/mun_hir/src/name_resolution.rs", "rank": 76, "score": 127726.92054530475 }, { "content": "mod per_ns;\n\n\n\npub use self::per_ns::{Namespace, PerNs};\n\nuse crate::{code_model::BuiltinType, FileId, HirDatabase, ModuleDef, Name};\n\nuse once_cell::sync::Lazy;\n\nuse rustc_hash::FxHashMap;\n\nuse std::sync::Arc;\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Default)]\n\npub struct Resolution {\n\n /// None for unresolved\n\n pub def: PerNs<ModuleDef>,\n\n // /// ident by which this is imported into local scope.\n\n // pub import: Option<ImportId>,\n\n}\n\n\n\n#[derive(Debug, Default, PartialEq, Eq, Clone)]\n\npub struct ModuleScope {\n\n items: FxHashMap<Name, Resolution>,\n\n}\n", "file_path": "crates/mun_hir/src/name_resolution.rs", "rank": 77, "score": 127722.9582275894 }, { "content": "use crate::arena::map::ArenaMap;\n\nuse crate::arena::{Arena, RawId};\n\n///! HIR for references to types. These paths are not yet resolved. They can be directly created\n\n/// from an `ast::TypeRef`, without further queries.\n\nuse crate::Path;\n\nuse mun_syntax::ast;\n\nuse mun_syntax::AstPtr;\n\nuse rustc_hash::FxHashMap;\n\nuse std::ops::Index;\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub struct TypeRefId(RawId);\n\nimpl_arena_id!(TypeRefId);\n\n\n\n/// Compare ty::Ty\n\n#[derive(Clone, PartialEq, Eq, Hash, Debug)]\n\npub enum TypeRef {\n\n Path(Path),\n\n Empty,\n\n Error,\n", "file_path": "crates/mun_hir/src/type_ref.rs", "rank": 78, "score": 127558.42288608474 }, { "content": " type_refs: Arena<TypeRefId, TypeRef>,\n\n}\n\n\n\nimpl Index<TypeRefId> for TypeRefMap {\n\n type Output = TypeRef;\n\n\n\n fn index(&self, pat: TypeRefId) -> &Self::Output {\n\n &self.type_refs[pat]\n\n }\n\n}\n\n\n\n#[derive(Default, Debug, Eq, PartialEq)]\n\npub(crate) struct TypeRefBuilder {\n\n map: TypeRefMap,\n\n source_map: TypeRefSourceMap,\n\n}\n\n\n\nimpl TypeRefBuilder {\n\n fn alloc_type_ref(&mut self, type_ref: TypeRef, ptr: AstPtr<ast::TypeRef>) -> TypeRefId {\n\n let id = self.map.type_refs.alloc(type_ref);\n", "file_path": "crates/mun_hir/src/type_ref.rs", "rank": 79, "score": 127554.36111457921 }, { "content": " self.source_map.type_ref_map.insert(ptr, id);\n\n self.source_map.type_ref_map_back.insert(id, ptr);\n\n id\n\n }\n\n\n\n pub fn from_node_opt(&mut self, node: Option<&ast::TypeRef>) -> TypeRefId {\n\n if let Some(node) = node {\n\n self.from_node(node)\n\n } else {\n\n self.error()\n\n }\n\n }\n\n\n\n pub fn from_node(&mut self, node: &ast::TypeRef) -> TypeRefId {\n\n use mun_syntax::ast::TypeRefKind::*;\n\n let ptr = AstPtr::new(node);\n\n let type_ref = match node.kind() {\n\n PathType(path) => path\n\n .path()\n\n .and_then(Path::from_ast)\n", "file_path": "crates/mun_hir/src/type_ref.rs", "rank": 80, "score": 127553.98104772712 }, { "content": " .map(TypeRef::Path)\n\n .unwrap_or(TypeRef::Error),\n\n };\n\n self.alloc_type_ref(type_ref, ptr)\n\n }\n\n\n\n pub fn unit(&mut self) -> TypeRefId {\n\n self.map.type_refs.alloc(TypeRef::Empty)\n\n }\n\n\n\n pub fn error(&mut self) -> TypeRefId {\n\n self.map.type_refs.alloc(TypeRef::Error)\n\n }\n\n\n\n pub fn finish(self) -> (TypeRefMap, TypeRefSourceMap) {\n\n (self.map, self.source_map)\n\n }\n\n}\n", "file_path": "crates/mun_hir/src/type_ref.rs", "rank": 81, "score": 127544.8217969635 }, { "content": "}\n\n\n\n#[derive(Default, Debug, Eq, PartialEq)]\n\npub struct TypeRefSourceMap {\n\n type_ref_map: FxHashMap<AstPtr<ast::TypeRef>, TypeRefId>,\n\n type_ref_map_back: ArenaMap<TypeRefId, AstPtr<ast::TypeRef>>,\n\n}\n\n\n\nimpl TypeRefSourceMap {\n\n pub(crate) fn type_ref_syntax(&self, expr: TypeRefId) -> Option<AstPtr<ast::TypeRef>> {\n\n self.type_ref_map_back.get(expr).cloned()\n\n }\n\n\n\n pub(crate) fn syntax_type_ref(&self, ptr: AstPtr<ast::TypeRef>) -> Option<TypeRefId> {\n\n self.type_ref_map.get(&ptr).cloned()\n\n }\n\n}\n\n\n\n#[derive(Default, Debug, Eq, PartialEq)]\n\npub struct TypeRefMap {\n", "file_path": "crates/mun_hir/src/type_ref.rs", "rank": 82, "score": 127543.19889216461 }, { "content": "use crate::{BinaryOp, Ty, TypeCtor};\n\n\n\npub(super) fn binary_op_rhs_expectation(_op: BinaryOp, lhs_ty: Ty) -> Ty {\n\n lhs_ty\n\n}\n\n\n\npub(super) fn binary_op_return_ty(op: BinaryOp, rhs_ty: Ty) -> Ty {\n\n match op {\n\n BinaryOp::ArithOp(_) => rhs_ty,\n\n BinaryOp::CmpOp(_) | BinaryOp::LogicOp(_) => Ty::simple(TypeCtor::Bool),\n\n BinaryOp::Assignment => Ty::Empty,\n\n }\n\n}\n", "file_path": "crates/mun_hir/src/ty/op.rs", "rank": 83, "score": 127339.4221533384 }, { "content": "/// Parse given tokens into the given sink as a rust file.\n\nfn parse(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {\n\n parse_from_tokens(token_source, tree_sink, grammar::root);\n\n}\n", "file_path": "crates/mun_syntax/src/parsing.rs", "rank": 84, "score": 127129.39731557883 }, { "content": "use crate::{\n\n arena::map::ArenaMap,\n\n code_model::DefWithBody,\n\n diagnostics::DiagnosticSink,\n\n expr,\n\n expr::{Body, Expr, ExprId, Literal, Pat, PatId, Statement},\n\n name_resolution::Namespace,\n\n resolve::{Resolution, Resolver},\n\n ty::infer::diagnostics::InferenceDiagnostic,\n\n ty::infer::type_variable::TypeVariableTable,\n\n ty::lower::LowerDiagnostic,\n\n ty::op,\n\n ty::{Ty, TypableDef},\n\n type_ref::TypeRefId,\n\n Function, HirDatabase, Path, TypeCtor,\n\n};\n\nuse std::mem;\n\nuse std::ops::Index;\n\nuse std::sync::Arc;\n\n\n", "file_path": "crates/mun_hir/src/ty/infer.rs", "rank": 85, "score": 127047.07863480822 }, { "content": " })\n\n }\n\n for (&arg, param_ty) in args.iter().zip(param_tys.iter()) {\n\n self.infer_expr(arg, &Expectation::has_type(param_ty.clone()));\n\n }\n\n }\n\n\n\n fn infer_path_expr(&mut self, resolver: &Resolver, path: &Path, id: ExprOrPatId) -> Option<Ty> {\n\n let resolution = match resolver\n\n .resolve_path_without_assoc_items(self.db, path)\n\n .take_values()\n\n {\n\n Some(resolution) => resolution,\n\n None => {\n\n self.diagnostics\n\n .push(InferenceDiagnostic::UnresolvedValue { id });\n\n return None;\n\n }\n\n };\n\n\n", "file_path": "crates/mun_hir/src/ty/infer.rs", "rank": 86, "score": 127046.1450368421 }, { "content": " _ => Ty::Unknown,\n\n },\n\n Expr::Block { statements, tail } => self.infer_block(statements, *tail, expected),\n\n Expr::Call { callee: call, args } => self.infer_call(&tgt_expr, call, args, expected),\n\n Expr::Literal(lit) => match lit {\n\n Literal::String(_) => Ty::Unknown,\n\n Literal::Bool(_) => Ty::Unknown,\n\n Literal::Int(_) => Ty::simple(TypeCtor::Int),\n\n Literal::Float(_) => Ty::simple(TypeCtor::Float),\n\n },\n\n _ => Ty::Unknown,\n\n // Expr::UnaryOp { expr: _, op: _ } => {}\n\n // Expr::Block { statements: _, tail: _ } => {}\n\n };\n\n\n\n if expected.ty != Ty::Unknown && ty != Ty::Unknown && ty != expected.ty {\n\n self.diagnostics.push(InferenceDiagnostic::MismatchedTypes {\n\n expected: expected.ty.clone(),\n\n found: ty.clone(),\n\n id: tgt_expr,\n", "file_path": "crates/mun_hir/src/ty/infer.rs", "rank": 87, "score": 127041.80044656599 }, { "content": " type_of_pat: pat_types,\n\n diagnostics: self.diagnostics,\n\n }\n\n }\n\n\n\n fn infer_block(\n\n &mut self,\n\n statements: &[Statement],\n\n tail: Option<ExprId>,\n\n expected: &Expectation,\n\n ) -> Ty {\n\n for stmt in statements {\n\n match stmt {\n\n Statement::Let {\n\n pat,\n\n type_ref,\n\n initializer,\n\n } => {\n\n let decl_ty = type_ref\n\n .as_ref()\n", "file_path": "crates/mun_hir/src/ty/infer.rs", "rank": 88, "score": 127037.62935324149 }, { "content": " }\n\n\n\n result.ty\n\n }\n\n}\n\n\n\nimpl<'a, D: HirDatabase> InferenceResultBuilder<'a, D> {\n\n /// Collect all the parameter patterns from the body. After calling this method the `return_ty`\n\n /// will have a valid value, also all parameters are added inferred.\n\n fn infer_signature(&mut self) {\n\n let body = Arc::clone(&self.body); // avoid borrow checker problem\n\n\n\n // Iterate over all the parameters and associated types of the body and infer the types of\n\n // the parameters.\n\n for (pat, type_ref) in body.params().iter() {\n\n let ty = self.resolve_type(type_ref);\n\n self.infer_pat(*pat, ty);\n\n }\n\n\n\n // Resolve the return type\n", "file_path": "crates/mun_hir/src/ty/infer.rs", "rank": 89, "score": 127036.93891167185 }, { "content": " match resolution {\n\n Resolution::LocalBinding(pat) => {\n\n let ty = self.type_of_pat.get(pat)?.clone();\n\n //let ty = self.resolve_ty_as_possible(&mut vec![], ty);\n\n Some(ty)\n\n }\n\n Resolution::Def(def) => {\n\n let typable: Option<TypableDef> = def.into();\n\n let typable = typable?;\n\n let ty = self.db.type_for_def(typable, Namespace::Values);\n\n Some(ty)\n\n }\n\n }\n\n }\n\n\n\n fn resolve_all(mut self) -> InferenceResult {\n\n // FIXME resolve obligations as well (use Guidance if necessary)\n\n //let mut tv_stack = Vec::new();\n\n let mut expr_types = mem::replace(&mut self.type_of_expr, ArenaMap::default());\n\n for (expr, ty) in expr_types.iter_mut() {\n", "file_path": "crates/mun_hir/src/ty/infer.rs", "rank": 90, "score": 127033.98022233717 }, { "content": " }\n\n\n\n /// Infer the type of the given expression. Returns the type of the expression.\n\n fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {\n\n let body = Arc::clone(&self.body); // avoid borrow checker problem\n\n let mut ty = match &body[tgt_expr] {\n\n Expr::Missing => Ty::Unknown,\n\n Expr::Path(p) => {\n\n // FIXME this could be more efficient...\n\n let resolver = expr::resolver_for_expr(self.body.clone(), self.db, tgt_expr);\n\n self.infer_path_expr(&resolver, p, tgt_expr.into())\n\n .unwrap_or(Ty::Unknown)\n\n }\n\n Expr::BinaryOp { lhs, rhs, op } => match op {\n\n Some(op) => {\n\n let lhs_ty = self.infer_expr(*lhs, &Expectation::none());\n\n let rhs_expected = op::binary_op_rhs_expectation(*op, lhs_ty.clone());\n\n let rhs_ty = self.infer_expr(*rhs, &Expectation::has_type(rhs_expected));\n\n op::binary_op_return_ty(*op, rhs_ty)\n\n }\n", "file_path": "crates/mun_hir/src/ty/infer.rs", "rank": 91, "score": 127031.03511738029 }, { "content": " /// Given a `TypeRefId`, resolve the reference to an actual `Ty`. If the the type could not\n\n /// be resolved an error is emitted and `Ty::Error` is returned.\n\n fn resolve_type(&mut self, type_ref: &TypeRefId) -> Ty {\n\n // Try to resolve the type from the Hir\n\n let result = Ty::from_hir(\n\n self.db,\n\n // FIXME use right resolver for block\n\n &self.resolver,\n\n &self.body.type_refs(),\n\n type_ref,\n\n );\n\n\n\n // Convert the diagnostics from resolving the type reference\n\n for diag in result.diagnostics {\n\n let diag = match diag {\n\n LowerDiagnostic::UnresolvedType { id } => {\n\n InferenceDiagnostic::UnresolvedType { id }\n\n }\n\n };\n\n self.diagnostics.push(diag);\n", "file_path": "crates/mun_hir/src/ty/infer.rs", "rank": 92, "score": 127029.21272550187 }, { "content": " CannotApplyBinaryOp, ExpectedFunction, MismatchedType, ParameterCountMismatch,\n\n };\n\n use crate::{\n\n code_model::src::HasSource,\n\n diagnostics::{DiagnosticSink, UnresolvedType, UnresolvedValue},\n\n ty::infer::ExprOrPatId,\n\n type_ref::TypeRefId,\n\n ExprId, Function, HirDatabase, Ty,\n\n };\n\n\n\n #[derive(Debug, PartialEq, Eq, Clone)]\n\n pub(super) enum InferenceDiagnostic {\n\n UnresolvedValue {\n\n id: ExprOrPatId,\n\n },\n\n UnresolvedType {\n\n id: TypeRefId,\n\n },\n\n ExpectedFunction {\n\n id: ExprId,\n", "file_path": "crates/mun_hir/src/ty/infer.rs", "rank": 93, "score": 127027.34802485694 }, { "content": " });\n\n ty = expected.ty.clone();\n\n }\n\n\n\n self.set_expr_type(tgt_expr, ty.clone());\n\n ty\n\n }\n\n\n\n /// Inferences the type of a call expression.\n\n fn infer_call(\n\n &mut self,\n\n tgt_expr: &ExprId,\n\n callee: &ExprId,\n\n args: &Vec<ExprId>,\n\n _expected: &Expectation,\n\n ) -> Ty {\n\n let callee_ty = self.infer_expr(*callee, &Expectation::none());\n\n let (param_tys, ret_ty) = match callee_ty.callable_sig(self.db) {\n\n Some(sig) => (sig.params().to_vec(), sig.ret().clone()),\n\n None => {\n", "file_path": "crates/mun_hir/src/ty/infer.rs", "rank": 94, "score": 127027.07717263879 }, { "content": " .map(|tr| self.resolve_type(tr))\n\n .unwrap_or(Ty::Unknown);\n\n //let decl_ty = self.insert_type_vars(decl_ty);\n\n let ty = if let Some(expr) = initializer {\n\n self.infer_expr(*expr, &Expectation::has_type(decl_ty))\n\n } else {\n\n decl_ty\n\n };\n\n\n\n self.infer_pat(*pat, ty);\n\n }\n\n Statement::Expr(expr) => {\n\n self.infer_expr(*expr, &Expectation::none());\n\n }\n\n }\n\n }\n\n if let Some(expr) = tail {\n\n self.infer_expr(expr, expected)\n\n } else {\n\n Ty::Empty\n", "file_path": "crates/mun_hir/src/ty/infer.rs", "rank": 95, "score": 127026.08055311168 }, { "content": " pub(super) fn add_to(\n\n &self,\n\n db: &impl HirDatabase,\n\n owner: Function,\n\n sink: &mut DiagnosticSink,\n\n ) {\n\n match self {\n\n InferenceDiagnostic::UnresolvedValue { id } => {\n\n let file = owner.source(db).file_id;\n\n let body = owner.body_source_map(db);\n\n let expr = match id {\n\n ExprOrPatId::ExprId(id) => body.expr_syntax(*id),\n\n ExprOrPatId::PatId(id) => {\n\n body.pat_syntax(*id).map(|ptr| ptr.syntax_node_ptr())\n\n }\n\n }\n\n .unwrap();\n\n\n\n sink.push(UnresolvedValue { file, expr });\n\n }\n", "file_path": "crates/mun_hir/src/ty/infer.rs", "rank": 96, "score": 127025.24367655341 }, { "content": " self.return_ty = self.resolve_type(&body.ret_type())\n\n }\n\n\n\n /// Record the type of the specified pattern and all sub-patterns.\n\n fn infer_pat(&mut self, pat: PatId, ty: Ty) {\n\n let body = Arc::clone(&self.body); // avoid borrow checker problem\n\n match &body[pat] {\n\n Pat::Bind { .. } => {\n\n self.set_pat_type(pat, ty);\n\n }\n\n _ => {}\n\n }\n\n }\n\n\n\n /// Infer the types of all the expressions and sub-expressions in the body.\n\n fn infer_body(&mut self) {\n\n self.infer_expr(\n\n self.body.body_expr(),\n\n &Expectation::has_type(self.return_ty.clone()),\n\n );\n", "file_path": "crates/mun_hir/src/ty/infer.rs", "rank": 97, "score": 127024.78114165833 }, { "content": " //let resolved = self.resolve_ty_completely(&mut tv_stack, mem::replace(ty, Ty::Unknown));\n\n if *ty == Ty::Unknown {\n\n self.report_expr_inference_failure(expr);\n\n }\n\n //*ty = resolved;\n\n }\n\n let mut pat_types = mem::replace(&mut self.type_of_pat, ArenaMap::default());\n\n for (pat, ty) in pat_types.iter_mut() {\n\n //let resolved = self.resolve_ty_completely(&mut tv_stack, mem::replace(ty, Ty::Unknown));\n\n if *ty == Ty::Unknown {\n\n self.report_pat_inference_failure(pat);\n\n }\n\n //*ty = resolved;\n\n }\n\n InferenceResult {\n\n // method_resolutions: self.method_resolutions,\n\n // field_resolutions: self.field_resolutions,\n\n // variant_resolutions: self.variant_resolutions,\n\n // assoc_resolutions: self.assoc_resolutions,\n\n type_of_expr: expr_types,\n", "file_path": "crates/mun_hir/src/ty/infer.rs", "rank": 98, "score": 127023.55544471041 }, { "content": " self.diagnostics\n\n .push(InferenceDiagnostic::ExpectedFunction {\n\n id: *callee,\n\n found: callee_ty,\n\n });\n\n (Vec::new(), Ty::Unknown)\n\n }\n\n };\n\n self.check_call_arguments(tgt_expr, args, &param_tys);\n\n ret_ty\n\n }\n\n\n\n /// Checks whether the specified passed arguments match the parameters of a callable definition.\n\n fn check_call_arguments(&mut self, tgt_expr: &ExprId, args: &[ExprId], param_tys: &[Ty]) {\n\n if args.len() != param_tys.len() {\n\n self.diagnostics\n\n .push(InferenceDiagnostic::ParameterCountMismatch {\n\n id: *tgt_expr,\n\n found: args.len(),\n\n expected: param_tys.len(),\n", "file_path": "crates/mun_hir/src/ty/infer.rs", "rank": 99, "score": 127022.39332213819 } ]
Rust
src/algebra.rs
doxxx/raytracer
21c92437ffd07d0434f8e095ce787fe23f42658c
/* Transcribed from http://cosinekitty.com/raytrace/rtsource.zip. Original written by Don Cross. Adapted to Rust by Gordon Tyler. */ #![allow(non_snake_case)] use num_complex::Complex; use std::f64::consts::PI; const TOLERANCE: f64 = 1.0e-8; const TWO_PI: f64 = 2.0 * PI; fn complex(re: f64) -> Complex<f64> { Complex { re, im: 0.0 } } fn complex2(re: f64, im: f64) -> Complex<f64> { Complex { re, im } } fn is_zero(c: Complex<f64>) -> bool { c.re.abs() < TOLERANCE && c.im.abs() < TOLERANCE } fn filter_real(c: Vec<Complex<f64>>) -> Vec<f64> { c.into_iter().filter(|c| c.im.abs() < TOLERANCE).map(|c| c.re).collect() } fn cbrt(c: Complex<f64>, n: isize) -> Complex<f64> { let rho = c.norm().powf(1.0 / 3.0); let theta = ((TWO_PI * n as f64) + c.arg()) / 3.0; complex2( rho * theta.cos(), rho * theta.sin(), ) } pub fn solve_quadratic(a: Complex<f64>, b: Complex<f64>, c: Complex<f64>) -> Vec<Complex<f64>> { if is_zero(a) { if is_zero(b) { Vec::with_capacity(0) } else { vec![-c / b] } } else { let radicand = b * b - 4.0 * a * c; if is_zero(radicand) { vec![-b / (2.0 * a)] } else { let r = radicand.sqrt(); let d = 2.0 * a; vec![(-b + r) / d, (-b - r) / d] } } } pub fn solve_cubic(a: Complex<f64>, b: Complex<f64>, c: Complex<f64>, d: Complex<f64>) -> Vec<Complex<f64>> { if is_zero(a) { solve_quadratic(b, c, d) } else { let b = b / a; let c = c / a; let d = d / a; let S = b / 3.0; let D = c / 3.0 - S * S; let E = S * S * S + (d - S * c) / 2.0; let F_root = (E * E + D * D * D).sqrt(); let mut F = -F_root - E; if is_zero(F) { F = F_root - E; } (0..3).into_iter().map(|i| { let G = cbrt(F, i); G - D / G - S }).collect() } } pub fn solve_quartic( a: Complex<f64>, b: Complex<f64>, c: Complex<f64>, d: Complex<f64>, e: Complex<f64>, ) -> Vec<Complex<f64>> { if is_zero(a) { solve_cubic(b, c, d, e) } else { let b = b / a; let c = c / a; let d = d / a; let e = e / a; let b2 = b * b; let b3 = b * b2; let b4 = b * b3; let alpha = (-3.0 / 8.0) * b2 + c; let beta = b3 / 8.0 - b * c / 2.0 + d; let gamma = (-3.0 / 256.0) * b4 + b2 * c / 16.0 - b * d / 4.0 + e; let alpha2 = alpha * alpha; let t = -b / 4.0; if is_zero(beta) { let rad = (alpha2 - 4.0 * gamma).sqrt(); let r1 = ((-alpha + rad) / 2.0).sqrt(); let r2 = ((-alpha - rad) / 2.0).sqrt(); vec![t + r1, t - r1, t + r2, t - r2] } else { let alpha3 = alpha * alpha2; let P = -(alpha2 / 12.0 + gamma); let Q = -alpha3 / 108.0 + alpha * gamma / 3.0 - beta * beta / 8.0; let R = -Q / 2.0 + (Q * Q / 4.0 + P * P * P / 27.0).sqrt(); let U = cbrt(R, 0); let mut y = (-5.0 / 6.0) * alpha + U; if is_zero(U) { y -= cbrt(Q, 0); } else { y -= P / (3.0 * U); } let W = (alpha + 2.0 * y).sqrt(); let r1 = (-(3.0 * alpha + 2.0 * y + 2.0 * beta / W)).sqrt(); let r2 = (-(3.0 * alpha + 2.0 * y - 2.0 * beta / W)).sqrt(); vec![ t + (W - r1) / 2.0, t + (W + r1) / 2.0, t + (-W - r2) / 2.0, t + (-W + r2) / 2.0, ] } } } pub fn solve_quartic_f64(a: f64, b: f64, c: f64, d: f64, e: f64) -> Vec<f64> { filter_real(solve_quartic( complex(a), complex(b), complex(c), complex(d), complex(e), )) } #[cfg(test)] mod tests { use super::*; fn check_roots(known: &[Complex<f64>], found: &[Complex<f64>]) { const MAX_ROOTS: usize = 4; assert!(found.len() <= MAX_ROOTS, "num roots out of bounds: {}", found.len()); let mut used = [false, false, false, false]; for k in 0..found.len() { let mut ok = false; for f in 0..found.len() { if !used[f] && is_zero(known[k] - found[f]) { ok = true; used[f] = true; break; } } if !ok { panic!( "Solver produced incorrect root value(s)\n\ Known correct roots: {:?}\n\ Found roots: {:?}", known, found ); } } } fn validate_polynomial(order: usize, poly: &[Complex<f64>], root: Complex<f64>) { let mut power = complex2(1.0, 0.0); let mut sum = complex2(0.0, 0.0); for i in 0..order { sum += poly[i] * power; power *= root; } assert!(is_zero(sum), "invalid polynomial"); } fn test_known_quadratic_roots(M: Complex<f64>, K: Complex<f64>, L: Complex<f64>) { let a = M; let b = -M * (K + L); let c = M * K * L; let poly = [c, b, a]; validate_polynomial(3, &poly, K); validate_polynomial(3, &poly, L); let found = solve_quadratic(a, b, c); let expected_roots = if is_zero(K - L) { 1 } else { 2 }; assert_eq!(expected_roots, found.len()); let known = [K, L]; check_roots(&known, &found); } fn test_known_cubic_roots(M: Complex<f64>, K: Complex<f64>, L: Complex<f64>, N: Complex<f64>) { let a = M; let b = -M*(K+L+N); let c = M*(K*L + N*K + N*L); let d = -M*K*L*N; let poly = [d, c, b, a]; validate_polynomial(4, &poly, K); validate_polynomial(4, &poly, L); validate_polynomial(4, &poly, N); let found = solve_cubic(a, b, c, d); let expected_roots = 3; assert_eq!(expected_roots, found.len()); let known = [K, L, N]; check_roots(&known, &found); } fn test_known_quartic_roots(m: Complex<f64>, a: Complex<f64>, b: Complex<f64>, c: Complex<f64>, d: Complex<f64>) { let A = m; let B = -m*(a + b + c + d); let C = m*(a*b + c*d + (a + b)*(c + d)); let D = -m*(c*d*(a + b) + a*b*(c + d)); let E = m*a*b*c*d; let poly = [E, D, C, B, A]; validate_polynomial(5, &poly, a); validate_polynomial(5, &poly, b); validate_polynomial(5, &poly, c); validate_polynomial(5, &poly, d); let found = solve_quartic(A, B, C, D, E); let expected_roots = 4; assert_eq!(expected_roots, found.len()); let known = [a, b, c, d]; check_roots(&known, &found); } #[test] pub fn quadratic() { test_known_quadratic_roots(complex2(-2.3,4.8), complex2(3.2,-4.1), complex2(-2.5,7.7)); test_known_quadratic_roots(complex2(5.5,4.4), complex2(8.2,-2.1), complex2(8.2,-2.1)); } #[test] pub fn cubic() { test_known_cubic_roots(complex(1.0), complex(2.0), complex(3.0), complex(4.0)); test_known_cubic_roots(complex2(-2.3,4.8), complex2(3.2,-4.1), complex2(-2.5,7.7), complex2(53.0,-23.9)); } #[test] pub fn quartic() { test_known_quartic_roots(complex(1.0), complex(2.0), complex(3.0), complex(4.0), complex(5.0)); test_known_quartic_roots(complex(1.0), complex(3.2), complex(2.5), complex(53.0), complex(-8.7)); test_known_quartic_roots(complex2(-2.3,4.8), complex2(3.2,-4.1), complex2(-2.5,7.7), complex2(53.0,-23.9), complex2(-9.2,-8.7)); } }
/* Transcribed from http://cosinekitty.com/raytrace/rtsource.zip. Original written by Don Cross. Adapted to Rust by Gordon Tyler. */ #![allow(non_snake_case)] use num_complex::Complex; use std::f64::consts::PI; const TOLERANCE: f64 = 1.0e-8; const TWO_PI: f64 = 2.0 * PI; fn complex(re: f64) -> Complex<f64> { Complex { re, im: 0.0 } } fn complex2(re: f64, im: f64) -> Complex<f64> { Complex { re, im } } fn is_zero(c: Complex<f64>) -> bool { c.re.abs() < TOLERANCE && c.im.abs() < TOLERANCE } fn filter_real(c: Vec<Complex<f64>>) -> Vec<f64> { c.into_iter().filter(|c| c.im.abs() < TOLERANCE).map(|c| c.re).collect() } fn cbrt(c: Complex<f64>, n: isize) -> Complex<f64> { let rho = c.norm().powf(1.0 / 3.0); let theta = ((TWO_PI * n as f64) + c.arg()) / 3.0; complex2( rho * theta.cos(), rho * theta.sin(), ) } pub fn solve_quadratic(a: Complex<f64>, b: Complex<f64>, c: Complex<f64>) -> Vec<Complex<f64>> { if is_zero(a) { if is_zero(b) { Vec::with_capacity(0) } else { vec![-c / b] } } else { let radicand = b * b - 4.0 * a * c; if is_zero(radicand) { vec![-b / (2.0 * a)] } else { let r = radicand.sqrt(); let d = 2.0 * a; vec![(-b + r) / d, (-b - r) / d] } } } pub fn solve_cubic(a: Complex<f64>, b: Complex<f64>, c: Complex<f64>, d: Complex<f64>) -> Vec<Complex<f64>> { if is_zero(a) { solve_quadratic(b, c, d) } else { let b = b / a; let c = c / a; let d = d / a; let S = b / 3.0; let D = c / 3.0 - S * S; let E = S * S * S + (d - S * c) / 2.0; let F_root = (E * E + D * D * D).sqrt(); let mut F = -F_root - E; if is_zero(F) { F = F_root - E; } (0..3).into_iter().map(|i| { let G = cbrt(F, i); G - D / G - S }).collect() } } pub fn solve_quartic( a: Complex<f64>, b: Complex<f64>, c: Complex<f64>, d: Complex<f64>, e: Complex<f64>, ) -> Vec<Complex<f64>> { if is_zero(a) { solve_cubic(b, c, d, e) } else { let b = b / a; let c = c / a; let d = d / a; let e = e / a; let b2 = b * b; let b3 = b * b2; let b4 = b * b3; let alpha = (-3.0 / 8.0) * b2 + c; let beta = b3 / 8.0 - b * c / 2.0 + d; let gamma = (-3.0 / 256.0) * b4 + b2 * c / 16.0 - b * d / 4.0 + e; let alpha2 = alpha * alpha; let t = -b / 4.0; if is_zero(beta) { let rad = (alpha2 - 4.0 * gamma).sqrt(); let r1 = ((-alpha + rad) / 2.0).sqrt(); let r2 = ((-alpha - rad) / 2.0).sqrt(); vec![t + r1, t - r1, t + r2, t - r2] } else { let alpha3 = alpha * alpha2; let P = -(alpha2 / 12.0 + gamma); let Q = -alpha3 / 108.0 + alpha * gamma / 3.0 - beta * beta / 8.0; let R = -Q / 2.0 + (Q * Q / 4.0 + P * P * P / 27.0).sqrt(); let U = cbrt(R, 0); let mut y = (-5.0 / 6.0) * alpha + U; if is_zero(U) { y -= cbrt(Q, 0); } else { y -= P / (3.0 * U); } let W = (alpha + 2.0 * y).sqrt(); let r1 = (-(3.0 * alpha + 2.0 * y + 2.0 * beta / W)).sqrt(); let r2 = (-(3.0 * alpha + 2.0 * y - 2.0 * beta / W)).sqrt(); vec![ t + (W - r1) / 2.0, t + (W + r1) / 2.0, t + (-W - r2) / 2.0, t + (-W + r2) / 2.0, ] } } } pub fn solve_quartic_f64(a: f64, b: f64, c: f64, d: f64, e: f64) -> Vec<f64> { filter_real(solve_quartic( complex(a), complex(b), complex(c), complex(d), complex(e), )) } #[cfg(test)] mod tests { use super::*; fn check_roots(known: &[Complex<f64>], found: &[Complex<f64>]) { const MAX_ROOTS: usize = 4; assert!(found.len() <= MAX_ROOTS, "num roots out of bounds: {}", found.len()); let mut used = [false, false, false, false]; for k in 0..found.len() { let mut ok = false; for f in 0..found.len() { if !used[f] && is_zero(known[k] - found[f]) { ok = true; used[f] = true; break; } } if !ok { panic!( "Solver produced incorrect root value(s)\n\ Known correct roots: {:?}\n\ Found roots: {:?}", known, found ); } } } fn validate_polynomial(order: usize, poly: &[Complex<f64>], root: Complex<f64>) { let mut power = complex2(1.0, 0.0); let mut sum = complex2(0.0, 0.0); for i in 0..order { sum += poly[i] * power; power *= root; } assert!(is_zero(sum), "invalid polynomial"); } fn test_known_quadratic_roots(M: Complex<f64>, K: Complex<f64>, L: Complex<f64>) { let a = M; let b = -M * (K + L); let c = M * K * L; let poly = [c, b, a]; validate_polynomial(3, &poly, K); validate_polynomial(3, &poly, L); let found = solve_quadratic(a, b, c); let expected_roots = if is_zero(K - L) { 1 } else { 2 }; assert_eq!(expected_roots, found.len()); let known = [K, L]; check_roots(&known, &found); } fn test_known_cubic_roots(M: Complex<f64>, K: Complex<f64>, L: Complex<f64>, N: Complex<f64>) { let a = M; let b = -M*(K+L+N); let c = M*(K*L + N*K + N*L); let d = -M*K*L*N; let poly = [d, c, b, a]; validate_polynomial(4, &poly, K); validate_polynomial(4, &poly, L); validate_polynomial(4, &poly, N); let found = solve_cubic(a, b, c, d); let expected_roots = 3; assert_eq!(expected_roots, found.len()); let known = [K, L, N]; check_roots(&known, &found); } fn test_known_quartic_roots(m: Complex<f64>, a: Complex<f64>, b: Complex<f64>, c: Complex<f64>, d: Complex<f64>) { let A = m; let B = -m*(a + b + c + d); let C = m*(a*b + c*d + (a + b)*(c + d)); let D = -m*(c*d*(a + b) + a*b*(c + d)); let E = m*a*b*c*d; let poly = [E, D, C, B, A]; validate_polynomial(5, &poly, a); validate_polynomial(5, &poly, b); validate_polynomial(5, &poly, c); validate_polynomial(5, &poly, d); let found = solve_quartic(A, B, C, D, E); let expected_roots = 4; assert_eq!(expected_roots, found.len()); let known = [a, b, c, d]; check_roots(&known, &found); } #[test] pub fn quadratic() { test_known_quadratic_roots(complex2(-2.3,4.8), complex2(3.2,-4.1), complex2(-2.5,7.7)); test_known_quadratic_roots(complex2(5.5,4.4), complex2(8.2,-2.1), complex2(8.2,-2.1)); } #[test] pub fn cubic() { test_known_cubic_roots(complex(1.0), complex(2.0), complex(3.
complex(5.0)); test_known_quartic_roots(complex(1.0), complex(3.2), complex(2.5), complex(53.0), complex(-8.7)); test_known_quartic_roots(complex2(-2.3,4.8), complex2(3.2,-4.1), complex2(-2.5,7.7), complex2(53.0,-23.9), complex2(-9.2,-8.7)); } }
0), complex(4.0)); test_known_cubic_roots(complex2(-2.3,4.8), complex2(3.2,-4.1), complex2(-2.5,7.7), complex2(53.0,-23.9)); } #[test] pub fn quartic() { test_known_quartic_roots(complex(1.0), complex(2.0), complex(3.0), complex(4.0),
random
[ { "content": "fn solve_quadratic(a: f64, b: f64, c: f64) -> Option<(f64, f64)> {\n\n let discr = b * b - 4.0 * a * c;\n\n if discr < 0.0 {\n\n return None;\n\n } else if discr == 0.0 {\n\n let x = -0.5 * b / a;\n\n return Some((x, x));\n\n } else {\n\n let q = if b > 0.0 {\n\n -0.5 * (b + discr.sqrt())\n\n } else {\n\n -0.5 * (b - discr.sqrt())\n\n };\n\n Some((q / a, c / q))\n\n }\n\n}\n\n\n\nimpl Intersectable for Sphere {\n\n fn intersect(&self, ray: &Ray) -> Option<Intersection> {\n\n super::first_positive_intersection(self.intersection_intervals(ray))\n", "file_path": "src/shapes/sphere.rs", "rank": 8, "score": 160831.77459687885 }, { "content": "fn convert_render_result_to_image(renderbuf: &Vec<Vec<Color>>, num_samples: f64, imgbuf: &mut image::ImageBuffer<image::Rgb<u8>, Vec<u8>>) {\n\n for (x, y, pixel) in imgbuf.enumerate_pixels_mut() {\n\n let row = &renderbuf[y as usize];\n\n let c = (row[x as usize] / num_samples).gamma_2();\n\n *pixel = color_to_rgb(c);\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 9, "score": 155406.64110857388 }, { "content": "pub fn first_intersection(intervals: Vec<Interval>) -> Option<Intersection> {\n\n intervals.into_iter().nth(0).map(|i| i.0)\n\n}\n\n\n", "file_path": "src/shapes/mod.rs", "rank": 10, "score": 140522.90298097557 }, { "content": "pub fn first_positive_intersection(intervals: Vec<Interval>) -> Option<Intersection> {\n\n intervals\n\n .into_iter()\n\n .flat_map(|Interval(a, b)| vec![a, b])\n\n .skip_while(|i| i.t < 0.0)\n\n .nth(0)\n\n}\n\n\n", "file_path": "src/shapes/mod.rs", "rank": 11, "score": 137480.9602047186 }, { "content": "pub fn skip_negative_intervals(intervals: Vec<Interval>) -> impl Iterator<Item = Interval> {\n\n intervals\n\n .into_iter()\n\n .skip_while(|Interval(a, b)| a.t < 0.0 && b.t < 0.0)\n\n}\n\n\n", "file_path": "src/shapes/mod.rs", "rank": 12, "score": 128085.77958920346 }, { "content": "fn xyrect(x0: f64, y0: f64, x1: f64, y1: f64, z: f64, reverse_normal: bool) -> XYRectangle {\n\n XYRectangle::new(\n\n Point::new((x1 - x0) / 2.0 + x0, (y1 - y0) / 2.0 + y0, z),\n\n x1 - x0,\n\n y1 - y0,\n\n reverse_normal,\n\n )\n\n}\n\n\n", "file_path": "src/shapes/cube.rs", "rank": 13, "score": 127097.58993152731 }, { "content": "fn zyrect(z0: f64, y0: f64, z1: f64, y1: f64, x: f64, reverse_normal: bool) -> ZYRectangle {\n\n ZYRectangle::new(\n\n Point::new(x, (y1 - y0) / 2.0 + y0, (z1 - z0) / 2.0 + z0),\n\n z1 - z0,\n\n y1 - y0,\n\n reverse_normal,\n\n )\n\n}\n\n\n\nimpl Intersectable for Cube {\n\n fn intersect(&self, ray: &Ray) -> Option<Intersection> {\n\n super::first_intersection(self.intersection_intervals(ray))\n\n }\n\n}\n\n\n\nimpl Shape for Cube {\n\n fn transform(&mut self, m: Matrix44f) {\n\n self.tx.transform(m);\n\n }\n\n\n", "file_path": "src/shapes/cube.rs", "rank": 14, "score": 127097.58993152731 }, { "content": "fn xzrect(x0: f64, z0: f64, x1: f64, z1: f64, y: f64, reverse_normal: bool) -> XZRectangle {\n\n XZRectangle::new(\n\n Point::new((x1 - x0) / 2.0 + x0, y, (z1 - z0) / 2.0 + z0),\n\n x1 - x0,\n\n z1 - z0,\n\n reverse_normal,\n\n )\n\n}\n\n\n", "file_path": "src/shapes/cube.rs", "rank": 15, "score": 127097.58993152731 }, { "content": "pub fn combine_transforms(transforms: Vec<Matrix44f>) -> Matrix44f {\n\n transforms.iter().fold(Matrix44f::identity(), |acc, &m| acc * m)\n\n}\n", "file_path": "src/sdl.rs", "rank": 16, "score": 123937.78830015319 }, { "content": "pub fn render<T>(options: Options, scene: Scene, progress: &mut T)\n\nwhere T: RenderProgress,\n\n{\n\n progress.render_started(&options);\n\n\n\n let mut renderbuf = alloc_render_buf(options.width, options.height);\n\n\n\n let context = Arc::new(RenderContext {\n\n options,\n\n scene,\n\n });\n\n\n\n for current_sample in 0..options.samples {\n\n progress.sample_started(&options);\n\n\n\n renderbuf.par_iter_mut().enumerate().for_each(|(y, row)| {\n\n row.iter_mut().enumerate().for_each(|(x, pixel)| {\n\n pixel.add(&color_at_pixel(&context, x as u32, y as u32));\n\n });\n\n });\n\n\n\n progress.sample_finished(&options, &renderbuf, current_sample + 1);\n\n }\n\n\n\n progress.render_finished(&options, &renderbuf, options.samples)\n\n}\n", "file_path": "src/system.rs", "rank": 17, "score": 109680.97680664869 }, { "content": "fn clamp(lo: f64, hi: f64, val: f64) -> f64 {\n\n lo.max(hi.min(val))\n\n}\n\n\n", "file_path": "src/materials/dielectric.rs", "rank": 18, "score": 102562.83413749296 }, { "content": "fn mix(a: Color, b: Color, v: f64) -> Color {\n\n a * (1.0 - v) + b * v\n\n}\n", "file_path": "src/texture.rs", "rank": 19, "score": 99442.72080535594 }, { "content": "pub fn transform_shape(mut shape: Box<dyn Shape>, transform: Option<Matrix44f>) -> Box<dyn Shape> {\n\n shape.transform(transform.unwrap_or(Matrix44f::identity()));\n\n shape\n\n}\n\n\n", "file_path": "src/sdl.rs", "rank": 20, "score": 97428.53289439982 }, { "content": "fn plane_intersect(o: Point, n: Direction, ray: &Ray) -> Option<f64> {\n\n let denom = ray.direction.dot(n);\n\n if denom.abs() > 1e-6 {\n\n let w = o - ray.origin;\n\n let t = w.dot(n) / denom;\n\n Some(t)\n\n } else {\n\n None\n\n }\n\n}\n\n\n\npub struct Plane {\n\n origin: Point,\n\n normal: Direction,\n\n reverse_normal: Direction,\n\n uv: (Direction, Direction),\n\n reverse_uv: (Direction, Direction),\n\n tx: Transformation,\n\n}\n\n\n", "file_path": "src/shapes/plane.rs", "rank": 22, "score": 90537.29879409926 }, { "content": "/// incident, normal, index of reflection -> reflection factor\n\nfn fresnel(incident: Direction, normal: Direction, ior: f64) -> f64 {\n\n let mut cos_i = clamp(-1.0, 1.0, incident.dot(normal));\n\n let mut eta_i = 1.0;\n\n let mut eta_t = ior;\n\n if cos_i > 0.0 {\n\n mem::swap(&mut eta_i, &mut eta_t);\n\n }\n\n let sin_t = eta_i / eta_t * (1.0 - cos_i * cos_i).max(0.0).sqrt();\n\n\n\n if sin_t >= 1.0 {\n\n // total internal reflection\n\n 1.0\n\n } else {\n\n let cos_t = (1.0 - sin_t * sin_t).max(0.0).sqrt();\n\n cos_i = cos_i.abs();\n\n let r_s = ((eta_t * cos_i) - (eta_i * cos_t)) / ((eta_t * cos_i) + (eta_i * cos_t));\n\n let r_p = ((eta_i * cos_i) - (eta_t * cos_t)) / ((eta_i * cos_i) + (eta_t * cos_t));\n\n (r_s * r_s + r_p * r_p) / 2.0\n\n }\n\n}\n", "file_path": "src/materials/dielectric.rs", "rank": 23, "score": 89576.13582032509 }, { "content": "fn alloc_render_buf(width: u32, height: u32) -> Vec<Vec<Color>> {\n\n let mut renderbuf: Vec<Vec<Color>> = Vec::with_capacity(height as usize);\n\n let mut renderbuf_row: Vec<Color> = Vec::with_capacity(width as usize);\n\n renderbuf_row.resize(width as usize, Color::black());\n\n renderbuf.resize(height as usize, renderbuf_row);\n\n renderbuf\n\n}\n\n\n", "file_path": "src/system.rs", "rank": 24, "score": 89489.74097098099 }, { "content": "fn format_duration(mut d: time::Duration) -> String {\n\n let mut s = String::new();\n\n let hours = d.num_hours();\n\n d = d - time::Duration::hours(hours);\n\n if hours > 0 {\n\n s += &format!(\"{}h \", hours);\n\n }\n\n let minutes = d.num_minutes();\n\n d = d - time::Duration::minutes(minutes);\n\n if minutes > 0 {\n\n s += &format!(\"{}m \", minutes);\n\n }\n\n let seconds = d.num_seconds();\n\n d = d - time::Duration::seconds(seconds);\n\n let milliseconds = d.num_milliseconds();\n\n if seconds > 0 {\n\n s += &format!(\"{}.{:03}s\", seconds, milliseconds);\n\n }\n\n s\n\n}\n", "file_path": "src/main.rs", "rank": 25, "score": 84304.02866307169 }, { "content": "fn write_render_result_to_file(options: &Options, filename: &str, renderbuf: &Vec<Vec<Color>>, current_sample: u16) {\n\n let mut imgbuf = image::RgbImage::new(options.width, options.height);\n\n convert_render_result_to_image(&renderbuf, (current_sample + 1) as f64, &mut imgbuf);\n\n\n\n let ref mut fout = File::create(filename).expect(\"Could not open output file\");\n\n image::ImageRgb8(imgbuf).save(fout, image::PNG).expect(\"Could not write render result to output file\");\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 26, "score": 79856.02278750327 }, { "content": "pub fn load_image(path: &str) -> image::DynamicImage {\n\n let f = File::open(path).expect(\"could not open image file\");\n\n let r = BufReader::new(f);\n\n image::load(r, image::JPEG).expect(\"could not decode image file\")\n\n}\n\n\n", "file_path": "src/sdl.rs", "rank": 27, "score": 77866.20076662651 }, { "content": "pub fn load_mesh_file(path: &str) -> Box<dyn Shape> {\n\n let mut obj_file = File::open(path).expect(\"could not open object file\");\n\n let mut obj_file_contents = String::new();\n\n obj_file.read_to_string(&mut obj_file_contents).expect(\"could not read object file\");\n\n let obj_set = wavefront_obj::obj::parse(obj_file_contents).expect(\"Could not parse object file!\");\n\n convert_objs(&obj_set)\n\n}\n\n\n", "file_path": "src/sdl.rs", "rank": 28, "score": 74907.66048785561 }, { "content": "pub trait ApproxEq: Debug {\n\n fn approx_eq(&self, other: &Self) -> bool;\n\n}\n\n\n\nimpl ApproxEq for f64 {\n\n fn approx_eq(&self, other: &Self) -> bool {\n\n if self.is_nan() || other.is_nan() {\n\n return false;\n\n }\n\n (*self - *other).abs() < TEST_EPSILON\n\n }\n\n}\n\n\n\nimpl<T> ApproxEq for [T]\n\nwhere\n\n T: ApproxEq,\n\n{\n\n fn approx_eq(&self, other: &Self) -> bool {\n\n if self.len() != other.len() {\n\n return false;\n", "file_path": "src/test_utils.rs", "rank": 29, "score": 74857.54862915035 }, { "content": "fn refract(incident: Direction, normal: Direction, ior: f64) -> Direction {\n\n let mut cos_i = clamp(-1.0, 1.0, incident.dot(normal));\n\n let mut eta_i = 1.0;\n\n let mut eta_t = ior;\n\n let mut n = normal;\n\n if cos_i < 0.0 {\n\n cos_i = -cos_i;\n\n } else {\n\n mem::swap(&mut eta_i, &mut eta_t);\n\n n = -normal;\n\n }\n\n let eta = eta_i / eta_t;\n\n let k = 1.0 - eta * eta * (1.0 - cos_i * cos_i);\n\n if k < 0.0 {\n\n Direction::zero()\n\n } else {\n\n incident * eta + n * (eta * cos_i - k.sqrt())\n\n }\n\n}\n\n\n", "file_path": "src/materials/dielectric.rs", "rank": 30, "score": 74359.44972000981 }, { "content": "pub fn parse(options: &Options, s: &str) -> Result<Scene,String> {\n\n sdl_grammar::sdl_grammar::scene(&s, &options).map_err(|err| err.to_string())\n\n}\n\n\n", "file_path": "src/sdl.rs", "rank": 31, "score": 74162.47648874129 }, { "content": "pub trait Material: Send + Sync {\n\n fn scatter(&self, context: &RenderContext, hit: &RayHit) -> Option<ScatteredRay>;\n\n fn emit(&self, context: &RenderContext, hit: &RayHit) -> Color;\n\n fn box_clone(&self) -> Box<dyn Material>;\n\n}\n\n\n\nimpl Clone for Box<dyn Material> {\n\n fn clone(&self) -> Self {\n\n self.box_clone()\n\n }\n\n}\n\n\n\npub struct ScatteredRay {\n\n pub attenuation: Color,\n\n pub origin: Point,\n\n pub direction: Direction,\n\n}\n\n\n\nmod dielectric;\n\nmod diffuse_light;\n\nmod isotropic;\n\nmod lambertian;\n\nmod metal;\n\n\n\npub use self::dielectric::Dielectric;\n\npub use self::diffuse_light::DiffuseLight;\n\npub use self::isotropic::Isotropic;\n\npub use self::lambertian::Lambertian;\n\npub use self::metal::Metal;\n", "file_path": "src/materials/mod.rs", "rank": 32, "score": 73728.68078502688 }, { "content": "pub trait Shape: Intersectable + Send + Sync {\n\n fn transform(&mut self, m: Matrix44f);\n\n fn intersection_intervals(&self, ray: &Ray) -> Vec<Interval>;\n\n}\n\n\n\nimpl Intersectable for [Box<dyn Shape>] {\n\n fn intersect(&self, ray: &Ray) -> Option<Intersection> {\n\n if self.len() == 0 {\n\n return None;\n\n }\n\n\n\n self.iter()\n\n .flat_map(|s| s.intersect(ray))\n\n .min_by(|a, b| a.partial_cmp(b).unwrap())\n\n }\n\n}\n", "file_path": "src/shapes/mod.rs", "rank": 33, "score": 70747.38138950997 }, { "content": "fn usize_validator(s: String) -> Result<(), String> {\n\n if s.parse::<usize>().is_ok() { return Ok(()); }\n\n Err(String::from(\"The value must be a positive number.\"))\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 34, "score": 67291.9232806155 }, { "content": "pub fn new_object(name: Option<String>, shape: Box<dyn Shape>, material: Box<dyn Material>) -> Object {\n\n Object::new(&name.unwrap_or(String::from(\"object\")), shape, material)\n\n}\n\n\n", "file_path": "src/sdl.rs", "rank": 35, "score": 60299.021190499974 }, { "content": "fn plane_uv(n: Direction) -> (Direction, Direction) {\n\n let mut u = n.cross(Direction::new(1.0, 0.0, 0.0));\n\n if u.length_squared() < 1e-6 {\n\n u = n.cross(Direction::new(0.0, 1.0, 0.0));\n\n }\n\n if u.length_squared() < 1e-6 {\n\n u = n.cross(Direction::new(0.0, 0.0, 1.0));\n\n }\n\n u = u.normalize();\n\n let v = n.cross(u);\n\n\n\n (u, v)\n\n}\n\n\n", "file_path": "src/shapes/plane.rs", "rank": 36, "score": 59470.543397415655 }, { "content": "pub trait Intersectable {\n\n fn intersect(&self, ray: &Ray) -> Option<Intersection>;\n\n}\n\n\n", "file_path": "src/system.rs", "rank": 37, "score": 52034.62240521442 }, { "content": "pub trait Transformable {\n\n fn transform(&mut self, m: Matrix44f);\n\n}\n\n\n\npub struct RenderContext {\n\n pub options: Options,\n\n pub scene: Scene,\n\n}\n\n\n", "file_path": "src/system.rs", "rank": 38, "score": 52034.62240521442 }, { "content": "pub trait ColorSource {\n\n fn color_at_uv(&self, uv: Vector2f) -> Color;\n\n}\n\n\n\n#[derive(Clone)]\n\npub enum Texture {\n\n Solid(Color),\n\n Pattern(Pattern),\n\n Image(DynamicImage, f64),\n\n}\n\n\n\n\n\nimpl fmt::Debug for Texture {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n &Texture::Solid(ref c) =>\n\n f.debug_tuple(\"Texture::Solid\")\n\n .field(c)\n\n .finish(),\n\n &Texture::Pattern(ref p) =>\n", "file_path": "src/texture.rs", "rank": 39, "score": 50755.31619522803 }, { "content": "pub trait RenderProgress {\n\n fn render_started(&mut self, options: &Options);\n\n fn sample_started(&mut self, options: &Options);\n\n fn row_finished(&mut self, options: &Options);\n\n fn sample_finished(&mut self, options: &Options, renderbuf: &Vec<Vec<Color>>, num_samples: u16);\n\n fn render_finished(&mut self, options: &Options, renderbuf: &Vec<Vec<Color>>, num_samples: u16);\n\n}\n\n\n", "file_path": "src/system.rs", "rank": 40, "score": 50755.31619522803 }, { "content": "fn main() {\n\n let default_cpus = format!(\"{}\", num_cpus::get());\n\n let app = App::new(\"raytracer\")\n\n .version(\"0.1.0\")\n\n .author(\"Gordon Tyler <[email protected]>\")\n\n .about(\"Simple ray tracer\")\n\n .arg(\n\n Arg::with_name(\"width\")\n\n .short(\"w\")\n\n .value_name(\"WIDTH\")\n\n .help(\"Image width\")\n\n .takes_value(true)\n\n .validator(u32_validator)\n\n .default_value(\"1024\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"height\")\n\n .short(\"h\")\n\n .value_name(\"HEIGHT\")\n\n .help(\"Image height\")\n", "file_path": "src/main.rs", "rank": 41, "score": 47751.35841171126 }, { "content": "pub trait Dot<RHS=Self> {\n\n fn dot(&self, rhs: RHS) -> f64;\n\n}\n\n\n\nimpl Dot for Direction {\n\n fn dot(&self, rhs: Direction) -> f64 {\n\n self.x * rhs.x + self.y * rhs.y + self.z * rhs.z\n\n }\n\n}\n\n\n\nimpl Dot<Point> for Direction {\n\n fn dot(&self, rhs: Point) -> f64 {\n\n self.x * rhs.x + self.y * rhs.y + self.z * rhs.z\n\n }\n\n}\n\n\n\nimpl Add for Direction {\n\n type Output = Direction;\n\n\n\n fn add(self, rhs: Direction) -> Self::Output {\n", "file_path": "src/direction.rs", "rank": 42, "score": 47322.94772141887 }, { "content": "fn u16_validator(s: String) -> Result<(), String> {\n\n if s.parse::<u16>().is_ok() { return Ok(()); }\n\n Err(String::from(\"The value must be a positive number.\"))\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 43, "score": 39646.92043573659 }, { "content": "fn u32_validator(s: String) -> Result<(), String> {\n\n if s.parse::<u32>().is_ok() { return Ok(()); }\n\n Err(String::from(\"The value must be a positive number.\"))\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 44, "score": 39646.92043573659 }, { "content": "fn color_to_rgb(v: Color) -> image::Rgb<u8> {\n\n let r = (v.r * 255.0).min(255.0) as u8;\n\n let g = (v.g * 255.0).min(255.0) as u8;\n\n let b = (v.b * 255.0).min(255.0) as u8;\n\n image::Rgb([r, g, b])\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 45, "score": 38039.730514635616 }, { "content": "fn color_at_pixel(context: &RenderContext, x: u32, y: u32) -> Color {\n\n context.scene.camera.random_pixel_ray(x, y).cast(&context)\n\n}\n\n\n", "file_path": "src/system.rs", "rank": 46, "score": 35191.76904355381 }, { "content": "fn convert_objs(objs: &wavefront_obj::obj::ObjSet) -> Box<dyn Shape> {\n\n let shapes: Vec<Mesh> = objs.objects.iter().map(|o| {\n\n let vertices = o.vertices.iter().map(|v| Point::new(v.x, v.y, v.z)).collect();\n\n let normals = o.normals.iter().map(|n| Direction::new(n.x, n.y, n.z)).collect();\n\n let triangles = o.geometry\n\n .iter()\n\n .flat_map(|g| &g.shapes)\n\n .flat_map(|s| match s.primitive {\n\n wavefront_obj::obj::Primitive::Triangle(v0, v1, v2) => Some(MeshTriangle {\n\n vertex_indices: [v0.0, v1.0, v2.0],\n\n normal_indices: [v0.2.unwrap(), v1.2.unwrap(), v2.2.unwrap()],\n\n }),\n\n _ => None,\n\n })\n\n .collect();\n\n\n\n Mesh::new(vertices, normals, triangles, true)\n\n }).collect();\n\n\n\n let shapes: Vec<Box<dyn Shape>> = shapes.into_iter().map(|m| Box::new(m) as Box<dyn Shape>).collect();\n\n\n\n Box::new(Composite::new(shapes))\n\n}\n\n\n", "file_path": "src/sdl.rs", "rank": 47, "score": 32943.81749516551 }, { "content": "use std::fmt::Debug;\n\n\n\nuse direction::Direction;\n\nuse matrix::Matrix44f;\n\nuse point::Point;\n\n\n\nconst TEST_EPSILON: f64 = 0.000001;\n\n\n", "file_path": "src/test_utils.rs", "rank": 48, "score": 32572.632217324703 }, { "content": " }\n\n self.iter().zip(other.iter()).all(|(a, b)| a.approx_eq(b))\n\n }\n\n}\n\n\n\nimpl<T> ApproxEq for Vec<T>\n\nwhere\n\n T: ApproxEq,\n\n{\n\n fn approx_eq(&self, other: &Self) -> bool {\n\n self.as_slice().approx_eq(other.as_slice())\n\n }\n\n}\n\n\n\nimpl ApproxEq for Matrix44f {\n\n fn approx_eq(&self, other: &Self) -> bool {\n\n self.row(0).approx_eq(&other.row(0))\n\n && self.row(1).approx_eq(&other.row(1))\n\n && self.row(2).approx_eq(&other.row(2))\n\n && self.row(3).approx_eq(&other.row(3))\n", "file_path": "src/test_utils.rs", "rank": 49, "score": 32567.529107700746 }, { "content": " }\n\n}\n\n\n\nimpl ApproxEq for Direction {\n\n fn approx_eq(&self, other: &Self) -> bool {\n\n let a = [self.x, self.y, self.z];\n\n let b = [other.x, other.y, other.z];\n\n\n\n a.approx_eq(&b)\n\n }\n\n}\n\n\n\nimpl ApproxEq for Point {\n\n fn approx_eq(&self, other: &Self) -> bool {\n\n let a = [self.x, self.y, self.z];\n\n let b = [other.x, other.y, other.z];\n\n\n\n a.approx_eq(&b)\n\n }\n\n}\n", "file_path": "src/test_utils.rs", "rank": 50, "score": 32566.277065266884 }, { "content": "\n\nmacro_rules! assert_approx_eq {\n\n ($a:expr, $b:expr) => {\n\n if !$a.approx_eq(&$b) {\n\n panic!(\n\n \"assertion failed: `(left == right)`\\n left: `{:?}`,\\n right: `{:?}`\",\n\n $a, $b,\n\n )\n\n }\n\n };\n\n}\n", "file_path": "src/test_utils.rs", "rank": 51, "score": 32565.653914355455 }, { "content": "use matrix::Matrix44f;\n\nuse object::Transformation;\n\nuse system::{Intersectable, Intersection, Ray};\n\n\n\nmod bounding_box;\n\nmod composite;\n\nmod csg;\n\nmod cube;\n\nmod cylinder;\n\nmod homogenous_medium;\n\nmod mesh;\n\nmod plane;\n\nmod sphere;\n\nmod torus;\n\n\n\npub use self::bounding_box::*;\n\npub use self::composite::*;\n\npub use self::csg::*;\n\npub use self::cube::*;\n\npub use self::cylinder::*;\n", "file_path": "src/shapes/mod.rs", "rank": 52, "score": 32501.930388031098 }, { "content": "pub use self::homogenous_medium::*;\n\npub use self::mesh::*;\n\npub use self::plane::*;\n\npub use self::sphere::*;\n\npub use self::torus::*;\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, PartialOrd)]\n\npub struct Interval(Intersection, Intersection);\n\n\n\nimpl Interval {\n\n pub fn to_world(self, world_ray: &Ray, object_ray: &Ray, tx: &Transformation) -> Interval {\n\n Interval(\n\n self.0.to_world(world_ray, &object_ray, tx),\n\n self.1.to_world(world_ray, &object_ray, tx),\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/shapes/mod.rs", "rank": 53, "score": 32494.332126833186 }, { "content": "use color::Color;\n\nuse direction::Direction;\n\nuse point::Point;\n\nuse system::{RayHit, RenderContext};\n\n\n", "file_path": "src/materials/mod.rs", "rank": 54, "score": 32491.039170618704 }, { "content": "use point::Point;\n\nuse system::Ray;\n\n\n\npub struct BoundingBox {\n\n bounds: [Point; 2],\n\n}\n\n\n\nimpl BoundingBox {\n\n pub fn new(min: Point, max: Point) -> BoundingBox {\n\n BoundingBox { bounds: [min, max] }\n\n }\n\n\n\n pub fn intersect(&self, ray: &Ray) -> bool {\n\n let mut tmin = (self.bounds[ray.sign[0]].x - ray.origin.x) * ray.inverse_direction.x;\n\n let mut tmax = (self.bounds[1 - ray.sign[0]].x - ray.origin.x) * ray.inverse_direction.x;\n\n let tymin = (self.bounds[ray.sign[1]].y - ray.origin.y) * ray.inverse_direction.y;\n\n let tymax = (self.bounds[1 - ray.sign[1]].y - ray.origin.y) * ray.inverse_direction.y;\n\n\n\n if (tmin > tymax) || (tymin > tmax) {\n\n return false;\n", "file_path": "src/shapes/bounding_box.rs", "rank": 55, "score": 31237.71785545538 }, { "content": " }\n\n if tymin > tmin {\n\n tmin = tymin;\n\n }\n\n if tymax < tmax {\n\n tmax = tymax;\n\n }\n\n\n\n let tzmin = (self.bounds[ray.sign[2]].z - ray.origin.z) * ray.inverse_direction.z;\n\n let tzmax = (self.bounds[1 - ray.sign[2]].z - ray.origin.z) * ray.inverse_direction.z;\n\n\n\n if (tmin > tzmax) || (tzmin > tmax) {\n\n return false;\n\n }\n\n\n\n // if tzmin > tmin {\n\n // tmin = tzmin;\n\n // }\n\n // if tzmax < tmax {\n\n // tmax = tzmax;\n\n // }\n\n\n\n return true;\n\n }\n\n}\n", "file_path": "src/shapes/bounding_box.rs", "rank": 56, "score": 31229.746734617667 }, { "content": " let d = object_ray.direction;\n\n\n\n let R = self.radius1;\n\n let S = self.radius2;\n\n\n\n let T = 4.0 * R * R;\n\n let G = T * (d.x * d.x + d.y * d.y);\n\n let H = 2.0 * T * (o.x * d.x + o.y * d.y);\n\n let I = T * (o.x * o.x + o.y * o.y);\n\n let J = d.length_squared();\n\n let K = 2.0 * o.to_dir().dot(d);\n\n let L = o.to_dir().length_squared() + R * R - S * S;\n\n\n\n let mut roots: Vec<f64> =\n\n solve_quartic_f64(J * J, 2.0 * J * K, 2.0 * J * L + K * K - G, 2.0 * K * L - H, L * L - I)\n\n .into_iter()\n\n .collect();\n\n\n\n roots.sort_by(|a, b| a.partial_cmp(&b).unwrap());\n\n\n", "file_path": "src/shapes/torus.rs", "rank": 65, "score": 25.189501012745346 }, { "content": " _ => panic!(\"unexpected number of quartic roots: {:?}\", roots),\n\n };\n\n\n\n is.into_iter().map(|i| i.to_world(ray, &object_ray, &self.tx)).collect()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use direction::*;\n\n use point::*;\n\n use system::Ray;\n\n use test_utils::*;\n\n\n\n #[test]\n\n pub fn front_orthogonal_intersection() {\n\n let t = Torus::new(1.0, 0.1);\n\n let r = Ray::primary(Point::new(0.0, 1.0, 1.0), Direction::new(0.0, 0.0, -1.0), 0);\n\n let is: Vec<Intersection> = t\n", "file_path": "src/shapes/torus.rs", "rank": 66, "score": 22.960654422944824 }, { "content": "impl Intersectable for Torus {\n\n fn intersect(&self, ray: &Ray) -> Option<Intersection> {\n\n super::first_positive_intersection(self.intersection_intervals(ray))\n\n }\n\n}\n\n\n\nimpl Shape for Torus {\n\n fn transform(&mut self, m: Matrix44f) {\n\n self.tx.transform(m);\n\n }\n\n\n\n fn intersection_intervals(&self, ray: &Ray) -> Vec<Interval> {\n\n /*\n\n Transcribed from http://cosinekitty.com/raytrace/rtsource.zip.\n\n Original written by Don Cross.\n\n Adapted to Rust by Gordon Tyler.\n\n */\n\n\n\n let object_ray = ray.to_object(&self.tx);\n\n let o = object_ray.origin;\n", "file_path": "src/shapes/torus.rs", "rank": 68, "score": 22.46622528478569 }, { "content": " }\n\n\n\n fn intersection_intervals_with_bounds<F>(&self, ray: &Ray, out_of_bounds: F) -> Vec<Interval>\n\n where\n\n F: FnOnce(Point) -> bool,\n\n {\n\n self.intersect_with_bounds(ray, out_of_bounds)\n\n .map(|i| vec![Interval(i, i.clone())])\n\n .unwrap_or(Vec::with_capacity(0))\n\n }\n\n}\n\n\n\nimpl Intersectable for Plane {\n\n fn intersect(&self, ray: &Ray) -> Option<Intersection> {\n\n self.intersect_with_bounds(ray, |_| false)\n\n }\n\n}\n\n\n\nimpl Shape for Plane {\n\n fn transform(&mut self, m: Matrix44f) {\n", "file_path": "src/shapes/plane.rs", "rank": 69, "score": 17.182095689217967 }, { "content": " let i = is[0];\n\n vec![Interval(i.clone(), i.clone()).to_world(ray, &object_ray, &self.tx)]\n\n } else {\n\n Vec::with_capacity(0)\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use direction::*;\n\n use test_utils::*;\n\n\n\n #[test]\n\n pub fn outside_intersection() {\n\n let s = Cube::new(Point::new(-1.0, -1.0, -1.0), Point::new(1.0, 1.0, 1.0));\n\n let r = Ray::primary(Point::new(0.0, 0.0, 2.0), Direction::new(0.0, 0.0, -1.0), 0);\n\n let intersections: Vec<Intersection> = s.intersection_intervals(&r)\n\n .into_iter()\n", "file_path": "src/shapes/cube.rs", "rank": 70, "score": 16.700209481096856 }, { "content": " Color::new(self.r / rhs, self.g / rhs, self.b / rhs)\n\n }\n\n}\n\n\n\nimpl PartialEq for Color {\n\n fn eq(&self, other: &Color) -> bool {\n\n (self.r - other.r) <= f64::EPSILON && (self.g - other.g) <= f64::EPSILON && (self.b - other.b) <= f64::EPSILON\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn add() {\n\n let a = Color::new(0.1, 0.2, 0.3);\n\n let b = Color::new(0.4, 0.5, 0.6);\n\n let r = a + b;\n\n assert_eq!(Color::new(0.1 + 0.4, 0.2 + 0.5, 0.3 + 0.6), r);\n", "file_path": "src/color.rs", "rank": 71, "score": 16.42529231051439 }, { "content": "use matrix::Matrix44f;\n\nuse object::Transformation;\n\nuse std::f64;\n\n\n\nuse direction::{Direction, Dot};\n\nuse point::Point;\n\nuse shapes::bounding_box::BoundingBox;\n\nuse shapes::{Interval, Shape};\n\nuse system::{Intersectable, Intersection, Ray, Transformable};\n\nuse vector::Vector2f;\n\n\n\npub struct Mesh {\n\n vertices: Vec<Point>,\n\n normals: Vec<Direction>,\n\n triangles: Vec<MeshTriangle>,\n\n bounding_box: BoundingBox,\n\n smooth_shading: bool,\n\n tx: Transformation,\n\n}\n\n\n", "file_path": "src/shapes/mesh.rs", "rank": 72, "score": 16.251267921049916 }, { "content": " vec![Interval(\n\n self.intersection_for_t(&object_ray, t0).to_world(ray, &object_ray, &self.tx),\n\n self.intersection_for_t(&object_ray, t1).to_world(ray, &object_ray, &self.tx),\n\n )]\n\n } else {\n\n Vec::with_capacity(0)\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use direction::*;\n\n use test_utils::*;\n\n\n\n #[test]\n\n pub fn outside_intersection() {\n\n let s = Sphere::new(Point::zero(), 1.0);\n\n let r = Ray::primary(Point::new(0.0, 0.0, 2.0), Direction::new(0.0, 0.0, -1.0), 0);\n", "file_path": "src/shapes/sphere.rs", "rank": 73, "score": 16.227243782211605 }, { "content": " self.tx.transform(m);\n\n }\n\n\n\n fn intersection_intervals(&self, ray: &Ray) -> Vec<Interval> {\n\n self.intersection_intervals_with_bounds(ray, |_| false)\n\n }\n\n}\n\n\n\npub struct XYRectangle {\n\n plane: Plane,\n\n x0: f64,\n\n x1: f64,\n\n y0: f64,\n\n y1: f64,\n\n}\n\n\n\nimpl XYRectangle {\n\n pub fn new(origin: Point, width: f64, height: f64, reverse_normal: bool) -> XYRectangle {\n\n let mut normal = Direction::new(0.0, 0.0, 1.0);\n\n if reverse_normal {\n", "file_path": "src/shapes/plane.rs", "rank": 74, "score": 16.225639146903656 }, { "content": "\n\n true\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Matrix44f;\n\n use direction::Direction;\n\n use point::Point;\n\n use test_utils::ApproxEq;\n\n\n\n #[test]\n\n fn inverse_identity() {\n\n let m = Matrix44f::identity();\n\n let inv = m.inverse();\n\n assert_eq!(m, inv);\n\n }\n\n\n\n #[test]\n", "file_path": "src/matrix.rs", "rank": 75, "score": 15.600570499192253 }, { "content": "impl Plane {\n\n pub fn new(origin: Point, normal: Direction) -> Plane {\n\n let reverse_normal = normal * -1.0;\n\n let uv = plane_uv(normal);\n\n let reverse_uv = plane_uv(reverse_normal);\n\n Plane {\n\n origin,\n\n normal,\n\n reverse_normal,\n\n uv,\n\n reverse_uv,\n\n tx: Transformation::new(),\n\n }\n\n }\n\n\n\n fn intersect_with_bounds<F>(&self, ray: &Ray, out_of_bounds: F) -> Option<Intersection>\n\n where\n\n F: FnOnce(Point) -> bool,\n\n {\n\n let object_ray = ray.to_object(&self.tx);\n", "file_path": "src/shapes/plane.rs", "rank": 76, "score": 14.777863369907124 }, { "content": "\n\nimpl XZRectangle {\n\n pub fn new(origin: Point, width: f64, height: f64, reverse_normal: bool) -> XZRectangle {\n\n let mut normal = Direction::new(0.0, 1.0, 0.0);\n\n if reverse_normal {\n\n normal *= -1.0;\n\n }\n\n let plane = Plane::new(origin, normal);\n\n let x0 = origin.x - (width / 2.0);\n\n let x1 = origin.x + width / 2.0;\n\n let z0 = origin.z - (height / 2.0);\n\n let z1 = origin.z + height / 2.0;\n\n\n\n XZRectangle { plane, x0, x1, z0, z1 }\n\n }\n\n\n\n fn out_of_bounds(&self, p: Point) -> bool {\n\n p.x < self.x0 || p.x > self.x1 || p.z < self.z0 || p.z > self.z1\n\n }\n\n}\n", "file_path": "src/shapes/plane.rs", "rank": 77, "score": 14.646632686410596 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use direction::*;\n\n use test_utils::*;\n\n\n\n #[test]\n\n pub fn front_intersection() {\n\n let s = Plane::new(Point::zero(), Direction::new(0.0, 0.0, 1.0));\n\n let r = Ray::primary(Point::new(0.0, 0.0, 1.0), Direction::new(0.0, 0.0, -1.0), 0);\n\n let i = s.intersect(&r).unwrap();\n\n assert_approx_eq!(i.t, 1.0);\n\n assert_approx_eq!(i.n, Direction::new(0.0, 0.0, 1.0));\n\n }\n\n\n\n #[test]\n\n pub fn back_intersection() {\n", "file_path": "src/shapes/plane.rs", "rank": 78, "score": 14.561624171637838 }, { "content": " pub fn from_tuple((x, y, z): (f64, f64, f64)) -> Direction {\n\n Direction { x, y, z }\n\n }\n\n\n\n pub fn zero() -> Direction {\n\n Direction::new(0.0, 0.0, 0.0)\n\n }\n\n\n\n pub fn uniform_sphere_distribution() -> Direction {\n\n let mut rng = rand::thread_rng();\n\n\n\n let theta = 2.0 * f64::consts::PI * rng.gen::<f64>();\n\n let phi = (1.0 - 2.0 * rng.gen::<f64>()).acos();\n\n let x = phi.sin() * theta.cos();\n\n let y = phi.sin() * theta.sin();\n\n let z = phi.cos();\n\n\n\n Direction::new(x, y, z)\n\n }\n\n \n", "file_path": "src/direction.rs", "rank": 79, "score": 14.543208885579856 }, { "content": "pub struct MeshTriangle {\n\n pub vertex_indices: [usize; 3],\n\n pub normal_indices: [usize; 3],\n\n}\n\n\n\nimpl Mesh {\n\n pub fn new(\n\n vertices: Vec<Point>,\n\n normals: Vec<Direction>,\n\n triangles: Vec<MeshTriangle>,\n\n smooth_shading: bool,\n\n ) -> Mesh {\n\n let mut min = Point::zero();\n\n let mut max = Point::zero();\n\n\n\n for v in &vertices {\n\n min.x = min.x.min(v.x);\n\n min.y = min.y.min(v.y);\n\n min.z = min.z.min(v.z);\n\n max.x = max.x.max(v.x);\n", "file_path": "src/shapes/mesh.rs", "rank": 80, "score": 14.097172567849995 }, { "content": "extern crate image; \n\nextern crate clap; \n\nextern crate wavefront_obj; \n\nextern crate pbr; \n\nextern crate num_cpus; \n\nextern crate time; \n\nextern crate rand; \n\nextern crate rayon;\n\nextern crate num_complex;\n\nextern crate num_traits;\n\n\n\n#[cfg(test)]\n\n#[macro_use]\n\nmod test_utils;\n\n\n\nmod algebra;\n\nmod color;\n\nmod direction;\n\nmod materials;\n\nmod matrix;\n", "file_path": "src/main.rs", "rank": 81, "score": 14.09425850915938 }, { "content": " let is = match roots.as_slice() {\n\n [] => Vec::with_capacity(0),\n\n [a] => {\n\n let i = self.intersection_for(&object_ray, *a);\n\n let i2 = i.clone();\n\n vec![Interval(i, i2)]\n\n }\n\n [a, b] => vec![Interval(\n\n self.intersection_for(&object_ray, *a),\n\n self.intersection_for(&object_ray, *b),\n\n )],\n\n [a, b, c] => {\n\n // Calculate the Intersections and determine the facing of their surface normals.\n\n // -1 means facing towards ray origin.\n\n // +1 means facing away from ray origin.\n\n let is: Vec<(Intersection, isize)> = [*a, *b, *c]\n\n .iter()\n\n .map(|&t| self.intersection_for(&object_ray, t))\n\n .map(|i| (i, i.n.dot(d).signum() as isize))\n\n .collect();\n", "file_path": "src/shapes/torus.rs", "rank": 82, "score": 13.686225212851774 }, { "content": "}\n\n\n\nimpl Shape for XYRectangle {\n\n fn transform(&mut self, m: Matrix44f) {\n\n self.plane.transform(m);\n\n }\n\n\n\n fn intersection_intervals(&self, ray: &Ray) -> Vec<Interval> {\n\n self.plane\n\n .intersection_intervals_with_bounds(ray, |p| self.out_of_bounds(p))\n\n }\n\n}\n\n\n\npub struct XZRectangle {\n\n plane: Plane,\n\n x0: f64,\n\n x1: f64,\n\n z0: f64,\n\n z1: f64,\n\n}\n", "file_path": "src/shapes/plane.rs", "rank": 83, "score": 13.521565106251597 }, { "content": " #[test]\n\n pub fn coincident_intersection() {\n\n let s = Cube::new(Point::new(-1.0, -1.0, -1.0), Point::new(1.0, 1.0, 1.0));\n\n let r = Ray::primary(Point::new(0.0, 0.0, 1.0), Direction::new(0.0, 0.0, -1.0), 0);\n\n let intersections: Vec<Intersection> = s.intersection_intervals(&r)\n\n .into_iter()\n\n .flat_map(|Interval(a,b)| vec![a, b])\n\n .collect();\n\n let distances: Vec<f64> = intersections\n\n .iter()\n\n .map(|i| i.t)\n\n .collect();\n\n let normals: Vec<Direction> = intersections\n\n .iter()\n\n .map(|i| i.n)\n\n .collect();\n\n assert_approx_eq!(distances, vec![\n\n 0.0,\n\n 2.0,\n\n ]);\n", "file_path": "src/shapes/cube.rs", "rank": 84, "score": 13.024805494732316 }, { "content": " z0: f64,\n\n z1: f64,\n\n y0: f64,\n\n y1: f64,\n\n}\n\n\n\nimpl ZYRectangle {\n\n pub fn new(origin: Point, width: f64, height: f64, reverse_normal: bool) -> ZYRectangle {\n\n let mut normal = Direction::new(1.0, 0.0, 0.0);\n\n if reverse_normal {\n\n normal *= -1.0;\n\n }\n\n let plane = Plane::new(origin, normal);\n\n let z0 = origin.z - (width / 2.0);\n\n let z1 = origin.z + width / 2.0;\n\n let y0 = origin.y - (height / 2.0);\n\n let y1 = origin.y + height / 2.0;\n\n\n\n ZYRectangle { plane, z0, z1, y0, y1 }\n\n }\n", "file_path": "src/shapes/plane.rs", "rank": 85, "score": 12.966152288574001 }, { "content": "use std::f64;\n\nuse std::mem;\n\n\n\nuse direction::Dot;\n\nuse matrix::Matrix44f;\n\nuse object::Transformation;\n\nuse point::Point;\n\nuse shapes::{Interval, Shape};\n\nuse system::{Intersectable, Intersection, Ray, Transformable};\n\nuse vector::Vector2f;\n\n\n\npub struct Sphere {\n\n origin: Point,\n\n radius_squared: f64,\n\n tx: Transformation,\n\n}\n\n\n\nimpl Sphere {\n\n pub fn new(origin: Point, radius: f64) -> Sphere {\n\n Sphere {\n", "file_path": "src/shapes/sphere.rs", "rank": 86, "score": 12.836635130485245 }, { "content": " .intersection_intervals(&r)\n\n .into_iter()\n\n .flat_map(|Interval(a, b)| vec![a, b])\n\n .collect();\n\n let distances: Vec<f64> = is.iter().map(|i| i.t).collect();\n\n let normals: Vec<Direction> = is.iter().map(|i| i.n).collect();\n\n assert_approx_eq!(distances, vec![0.9, 1.1]);\n\n assert_approx_eq!(\n\n normals,\n\n vec![Direction::new(0.0, 0.0, 1.0), Direction::new(0.0, 0.0, -1.0)]\n\n );\n\n }\n\n\n\n #[test]\n\n pub fn front_oblique_intersection() {\n\n let t = Torus::new(1.0, 0.1);\n\n let o = Point::new(0.0, 0.0, 1.0);\n\n let d = (Point::new(0.0, 1.0, 0.0) - o).normalize();\n\n let r = Ray::primary(o, d, 0);\n\n let is: Vec<Intersection> = t\n", "file_path": "src/shapes/torus.rs", "rank": 87, "score": 12.811042859627332 }, { "content": "use std::cmp;\n\nuse std::f64;\n\nuse std::sync::Arc;\n\n\n\nuse rand::prelude::*;\n\nuse rand::distributions::Uniform;\n\nuse rayon::prelude::*;\n\n\n\nuse color::Color;\n\nuse direction::Direction;\n\nuse matrix::Matrix44f;\n\nuse object::Object;\n\nuse object::Transformation;\n\nuse point::Point;\n\nuse sdl::Scene;\n\nuse vector::Vector2f;\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Options {\n\n pub num_threads: usize,\n", "file_path": "src/system.rs", "rank": 88, "score": 12.731736466613363 }, { "content": " .intersection_intervals(&r)\n\n .into_iter()\n\n .flat_map(|Interval(a, b)| vec![a, b])\n\n .collect();\n\n let distances: Vec<f64> = is.iter().map(|i| i.t).collect();\n\n let normals: Vec<Direction> = is.iter().map(|i| i.n).collect();\n\n assert_approx_eq!(distances, vec![0.9, 1.1]);\n\n assert_approx_eq!(\n\n normals,\n\n vec![Direction::new(0.0, 0.0, 1.0), Direction::new(0.0, 0.0, -1.0)]\n\n );\n\n }\n\n\n\n #[test]\n\n pub fn top_lateral_intersection() {\n\n let t = Torus::new(1.0, 0.1);\n\n let r = Ray::primary(Point::new(0.0, 2.0, 0.0), Direction::new(0.0, -1.0, 0.0), 0);\n\n let is: Vec<Intersection> = t\n\n .intersection_intervals(&r)\n\n .into_iter()\n", "file_path": "src/shapes/torus.rs", "rank": 89, "score": 12.63512302175661 }, { "content": " }\n\n\n\n pub rule short_vec3() -> (f64, f64, f64) = \"<\" _ n:float() _ \">\" {\n\n (n, n, n)\n\n }\n\n\n\n pub rule float() -> f64 \n\n = quiet!{\n\n s:$(\"-\"? digit()+ ( \".\" digit()+ (['e' | 'E'] digit()+ )? )? ) {\n\n f64::from_str(s).unwrap()\n\n }\n\n }\n\n / expected!(\"float literal\")\n\n\n\n rule digit() = ['0'..='9']\n\n\n\n rule zero_or_more<E>(elem: rule<E>) -> Vec<E> = v:(e:elem() _ { e })* { v }\n\n rule one_or_more<E>(elem: rule<E>) -> Vec<E> = v:(e:elem() _ { e })+ { v }\n\n\n\n rule line_comment() = \"//\" [^'\\n']*\n\n rule block_comment() = \"/*\" [^'*']* \"*/\"\n\n rule whitespace() = quiet!{[' ' | '\\n' | '\\r' | '\\t']} / expected!(\"whitespace\")\n\n\n\n rule _() = (whitespace() / line_comment() / block_comment())*\n\n\n\n }\n\n\n\n}\n", "file_path": "src/sdl_grammar.rs", "rank": 90, "score": 12.59035811216388 }, { "content": " assert_approx_eq!(normals, vec![\n\n Direction::new(0.0, 0.0, 1.0),\n\n Direction::new(0.0, 0.0, -1.0),\n\n ]);\n\n }\n\n\n\n #[test]\n\n pub fn inside_intersection() {\n\n let s = Cube::new(Point::new(-1.0, -1.0, -1.0), Point::new(1.0, 1.0, 1.0));\n\n let r = Ray::primary(Point::new(0.0, 0.0, 0.9), Direction::new(0.0, 0.0, -1.0), 0);\n\n let intersections: Vec<Intersection> = s.intersection_intervals(&r)\n\n .into_iter()\n\n .flat_map(|Interval(a,b)| vec![a, b])\n\n .collect();\n\n let distances: Vec<f64> = intersections\n\n .iter()\n\n .map(|i| i.t)\n\n .collect();\n\n let normals: Vec<Direction> = intersections\n\n .iter()\n", "file_path": "src/shapes/cube.rs", "rank": 91, "score": 12.334465658338022 }, { "content": " .into_iter()\n\n .flat_map(|Interval(a, b)| vec![a, b])\n\n .collect();\n\n let distances: Vec<f64> = intersections.iter().map(|i| i.t).collect();\n\n let normals: Vec<Direction> = intersections.iter().map(|i| i.n).collect();\n\n assert_approx_eq!(distances, vec![0.0, 2.0]);\n\n assert_approx_eq!(\n\n normals,\n\n vec![Direction::new(0.0, 0.0, 1.0), Direction::new(0.0, 0.0, -1.0)]\n\n );\n\n }\n\n\n\n #[test]\n\n pub fn inside_intersection() {\n\n let s = Sphere::new(Point::zero(), 1.0);\n\n let r = Ray::primary(Point::new(0.0, 0.0, 0.9), Direction::new(0.0, 0.0, -1.0), 0);\n\n let intersections: Vec<Intersection> = s\n\n .intersection_intervals(&r)\n\n .into_iter()\n\n .flat_map(|Interval(a, b)| vec![a, b])\n", "file_path": "src/shapes/sphere.rs", "rank": 92, "score": 12.273077807806777 }, { "content": " let intersections: Vec<Intersection> = s\n\n .intersection_intervals(&r)\n\n .into_iter()\n\n .flat_map(|Interval(a, b)| vec![a, b])\n\n .collect();\n\n let distances: Vec<f64> = intersections.iter().map(|i| i.t).collect();\n\n let normals: Vec<Direction> = intersections.iter().map(|i| i.n).collect();\n\n assert_approx_eq!(distances, vec![1.0, 3.0]);\n\n assert_approx_eq!(\n\n normals,\n\n vec![Direction::new(0.0, 0.0, 1.0), Direction::new(0.0, 0.0, -1.0)]\n\n );\n\n }\n\n\n\n #[test]\n\n pub fn coincident_intersection() {\n\n let s = Sphere::new(Point::zero(), 1.0);\n\n let r = Ray::primary(Point::new(0.0, 0.0, 1.0), Direction::new(0.0, 0.0, -1.0), 0);\n\n let intersections: Vec<Intersection> = s\n\n .intersection_intervals(&r)\n", "file_path": "src/shapes/sphere.rs", "rank": 93, "score": 12.273077807806777 }, { "content": " pub width: u32,\n\n pub height: u32,\n\n pub bias: f64,\n\n pub max_depth: u16,\n\n pub samples: u16,\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub struct Camera {\n\n width: f64,\n\n height: f64,\n\n fov_factor: f64,\n\n camera_to_world: Matrix44f,\n\n}\n\n\n\nimpl Camera {\n\n pub fn new(width: f64, height: f64, fov: f64, origin: Point, look_at: Point) -> Camera {\n\n let up = Direction::new(0.0, 1.0, 0.0);\n\n let zaxis = (origin - look_at).normalize();\n\n let xaxis = up.normalize().cross(zaxis);\n", "file_path": "src/system.rs", "rank": 94, "score": 11.94377532273639 }, { "content": "use std::cmp::PartialEq;\n\nuse std::ops::{Index, IndexMut, Mul};\n\n\n\nuse direction::Direction;\n\nuse point::Point;\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct Matrix44f(pub [[f64; 4]; 4]);\n\n\n\nimpl Matrix44f {\n\n pub fn zero() -> Matrix44f {\n\n Matrix44f([\n\n [0.0, 0.0, 0.0, 0.0],\n\n [0.0, 0.0, 0.0, 0.0],\n\n [0.0, 0.0, 0.0, 0.0],\n\n [0.0, 0.0, 0.0, 0.0],\n\n ])\n\n }\n\n\n\n pub fn identity() -> Matrix44f {\n", "file_path": "src/matrix.rs", "rank": 95, "score": 11.791700971853022 }, { "content": "use std::f64;\n\nuse std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign};\n\n\n\nuse rand;\n\nuse rand::Rng;\n\n\n\nuse point::Point;\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Direction {\n\n pub x: f64,\n\n pub y: f64,\n\n pub z: f64,\n\n}\n\n\n\nimpl Direction {\n\n pub fn new(x: f64, y: f64, z: f64) -> Direction {\n\n Direction { x, y, z }\n\n }\n\n\n", "file_path": "src/direction.rs", "rank": 96, "score": 11.340165247238314 }, { "content": "use std::f64;\n\nuse std::ops::{Add, AddAssign, Div, Mul};\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Color {\n\n pub r: f64,\n\n pub g: f64,\n\n pub b: f64,\n\n}\n\n\n\nimpl Color {\n\n pub fn new(r: f64, g: f64, b: f64) -> Color {\n\n Color { r, g, b }\n\n }\n\n\n\n pub fn from_tuple((r, g, b): (f64, f64, f64)) -> Color {\n\n Color { r, g, b }\n\n }\n\n\n\n pub fn black() -> Color { Color::new(0.0, 0.0, 0.0) }\n", "file_path": "src/color.rs", "rank": 97, "score": 10.990645386796398 }, { "content": " }\n\n}\n\n\n\nimpl Shape for Sphere {\n\n fn transform(&mut self, m: Matrix44f) {\n\n self.tx.transform(m);\n\n }\n\n\n\n fn intersection_intervals(&self, ray: &Ray) -> Vec<Interval> {\n\n let object_ray = ray.to_object(&self.tx);\n\n let l = object_ray.origin - self.origin;\n\n let a = object_ray.direction.dot(object_ray.direction);\n\n let b = 2.0 * object_ray.direction.dot(l);\n\n let c = l.dot(l) - self.radius_squared;\n\n\n\n if let Some((mut t0, mut t1)) = solve_quadratic(a, b, c) {\n\n if t0 > t1 {\n\n mem::swap(&mut t0, &mut t1);\n\n }\n\n\n", "file_path": "src/shapes/sphere.rs", "rank": 98, "score": 10.912034515134085 }, { "content": " origin,\n\n radius_squared: radius.powi(2),\n\n tx: Transformation::new(),\n\n }\n\n }\n\n\n\n fn intersection_for_t(&self, ray: &Ray, t: f64) -> Intersection {\n\n let p = ray.origin + ray.direction * t;\n\n let n = (p - self.origin).normalize();\n\n let u = (1.0 - n.z.atan2(n.x) / f64::consts::PI) * 0.5;\n\n let v = n.y.acos() / f64::consts::PI;\n\n\n\n Intersection {\n\n t,\n\n n,\n\n uv: Vector2f(u, v),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/shapes/sphere.rs", "rank": 99, "score": 10.777883556730988 } ]
Rust
src/tables/parts.rs
jaredwolff/eagle-plm
aae5fd8f0ca6d30d295af99eed038b2195fa073b
extern crate diesel; use prettytable::{row, Table}; use serde::Deserialize; use crate::{models::*, *}; use diesel::prelude::*; use std::fs::File; use std::io::BufReader; #[derive(Debug, Deserialize)] struct Record { pn: String, mpn: String, desc: String, } pub fn create(app: &mut crate::Application) { let pn = app.prompt.ask_text_entry("Part Number: "); let mpn = app.prompt.ask_text_entry("Manufacturer Part Number: "); let desc = app.prompt.ask_text_entry("Description: "); let ver = app.prompt.ask_text_entry("Version: "); let ver: i32 = ver.trim().parse().expect("Invalid version number!"); let part = NewUpdatePart { pn: &pn, mpn: &mpn, descr: &desc, ver: &ver, mqty: &1, }; let found = find_part_by_pn(&app.conn, &pn); if let Ok(found) = found { let question = format!("{} already exists! Would you like to update it?", pn); let update = app.prompt.ask_yes_no_question(&question); if update { update_part(&app.conn, &found.id, &part).expect("Unable to update part!"); println!("{} updated!", pn); } } else { create_part(&app.conn, &part).expect("Unable to create part!"); } } pub fn rename(app: &mut crate::Application) { let pn = app.prompt.ask_text_entry("Part Number: "); let newpn = app.prompt.ask_text_entry("New Part Number: "); rename_part(&app.conn, &pn, &newpn).expect("Unable to change pn"); } pub fn create_by_csv(app: &mut crate::Application, filename: &str) { let file = File::open(filename).unwrap(); let file = BufReader::new(file); let mut records: Vec<Record> = Vec::new(); let mut rdr = csv::Reader::from_reader(file); for result in rdr.deserialize() { let record: Record = result.expect("Unable to deserialize."); println!("Processing: {:?}", record); records.push(record); } for record in records { let part = models::NewUpdatePart { pn: &record.pn, mpn: &record.mpn, descr: &record.desc, ver: &1, mqty: &1, }; let found = find_part_by_pn(&app.conn, &part.pn); if let Ok(found) = found { if found.mpn != part.mpn || found.descr != part.descr || found.ver != *part.ver { let question = format!("{} already exists! Would you like to update it?", part.pn); let mut table = Table::new(); table.add_row(row![ "Current:", found.pn, found.mpn, found.descr, found.ver ]); table.add_row(row!["Change to:", part.pn, part.mpn, part.descr, part.ver]); table.printstd(); let update = app.prompt.ask_yes_no_question(&question); if update { update_part(&app.conn, &found.id, &part).expect("Unable to update part!"); println!("{} updated!", part.pn); } } } else { println!("Creating: {:?}", part); create_part(&app.conn, &part).expect("Unable to create part!"); } } } pub fn delete(app: &mut crate::Application) { let part = app.prompt.ask_text_entry("Part Number: "); let part = find_part_by_pn(&app.conn, &part).expect("Unable to find part!"); let question = format!("Would you like to delete {}?", part.pn); let delete = app.prompt.ask_yes_no_question(&question); if delete { let res = delete_part(&app.conn, &part.id); if res.is_err() { panic!("Error deleting part {}.", part.pn); } else { println!("Deleted {}", part.pn); } } } pub fn show(app: &mut crate::Application) { use crate::schema::*; let mut table = Table::new(); let results = parts::dsl::parts .load::<models::Part>(&app.conn) .expect("Error loading parts"); println!("Displaying {} parts", results.len()); table.add_row(row!["PN", "MPN", "Desc", "Mqty", "Ver"]); for part in results { table.add_row(row![part.pn, part.mpn, part.descr, part.mqty, part.ver]); } table.printstd(); }
extern crate diesel; use prettytable::{row, Table}; use serde::Deserialize; use crate::{models::*, *}; use diesel::prelude::*; use std::fs::File; use std::io::BufReader; #[derive(Debug, Deserialize)] struct Record { pn: String, mpn: String, desc: String, } pub fn create(app: &mut crate::Application) { let pn = app.prompt.ask_text_entry("Part Number: "); let mpn = app.prompt.ask_text_entry("Manufacturer Part Number: "); let desc = app.prompt.ask_text_entry("Description: "); let ver = app.prompt.ask_text_entry("Version: "); let ver: i32 = ver.trim().parse().expect("Invalid version number!"); let part = NewUpdatePart { pn: &pn, mpn: &mpn, descr: &desc, ver: &ver, mqty: &1, }; let found = find_part_by_pn(&app.conn, &pn); if let Ok(found) = found { let question = format!("{} already exists! Would you like to update it?", pn); let update = app.prompt.ask_yes_no_question(&question); if update { update_part(&app.conn, &found.id, &part).expect("Unable to update part!"); println!("{} updated!", pn); } } else { create_part(&app.conn, &part).expect("Unable to create part!"); } } pub fn rename(app: &mut crate::Application) { let pn = app.prompt.ask_text_entry("Part Number: "); let newpn = app.prompt.ask_text_entry("New Part Number: "); rename_part(&app.conn, &pn, &newpn).expect("Unable to change pn"); } pub fn create_by_csv(app: &mut crate::Application, filename: &str) { let file = File::open(filename).unwrap(); let file = BufReader::new(file); let mut records: Vec<Record> = Vec::new(); let mut rdr = csv::Reader::from_reader(file); for result in rdr.deserialize() { let record: Record = result.expect("Unable to deserialize."); println!("Processing: {:?}", record); records.push(record); } for record in records { let part = models::NewUpdatePart { pn: &record.pn, mpn: &record.mpn, descr: &record.desc, ver: &1, mqty: &1, }; let found = find_part_by_pn(&app.conn, &part.pn); if let Ok(found) = found { if found.mpn != part.mpn || found.descr != part.descr || found.ver != *part.ver { let question = format!("{} already exists! Would you like to update it?", part.pn); let mut table = Table::new(); table.add_row(row![ "Current:", found.pn, found.mpn, found.descr, found.ver ]); table.add_row(row!["Change to:", part.pn, part.mpn, part.descr, part.ver]); table.printstd(); let update = app.prompt.ask_yes_no_question(&question); if update {
pub fn delete(app: &mut crate::Application) { let part = app.prompt.ask_text_entry("Part Number: "); let part = find_part_by_pn(&app.conn, &part).expect("Unable to find part!"); let question = format!("Would you like to delete {}?", part.pn); let delete = app.prompt.ask_yes_no_question(&question); if delete { let res = delete_part(&app.conn, &part.id); if res.is_err() { panic!("Error deleting part {}.", part.pn); } else { println!("Deleted {}", part.pn); } } } pub fn show(app: &mut crate::Application) { use crate::schema::*; let mut table = Table::new(); let results = parts::dsl::parts .load::<models::Part>(&app.conn) .expect("Error loading parts"); println!("Displaying {} parts", results.len()); table.add_row(row!["PN", "MPN", "Desc", "Mqty", "Ver"]); for part in results { table.add_row(row![part.pn, part.mpn, part.descr, part.mqty, part.ver]); } table.printstd(); }
update_part(&app.conn, &found.id, &part).expect("Unable to update part!"); println!("{} updated!", part.pn); } } } else { println!("Creating: {:?}", part); create_part(&app.conn, &part).expect("Unable to create part!"); } } }
function_block-function_prefix_line
[ { "content": "/// Function used to show parts in BOM\n\npub fn show(app: &mut crate::Application, part_number: &str, version: &Option<i32>) {\n\n use crate::schema::*;\n\n\n\n // Find the part\n\n let part = find_part_by_pn(&app.conn, &part_number);\n\n\n\n if part.is_err() {\n\n println!(\"{} was not found!\", part_number);\n\n std::process::exit(1);\n\n }\n\n\n\n // Transform the response into a Part\n\n let part = part.unwrap();\n\n\n\n // Create the table\n\n let mut table = Table::new();\n\n\n\n // Then either use the provided version or the latest\n\n let ver = match version {\n\n Some(x) => x,\n", "file_path": "src/tables/bom.rs", "rank": 0, "score": 255653.23835364124 }, { "content": "/// Function used to export BOM to CSV\n\npub fn export(app: &mut crate::Application, part_number: &str, version: &Option<i32>) {\n\n use crate::schema::*;\n\n\n\n // Find the part\n\n let part = find_part_by_pn(&app.conn, &part_number);\n\n\n\n if part.is_err() {\n\n println!(\"{} was not found!\", part_number);\n\n std::process::exit(1);\n\n }\n\n\n\n // Transform the response into a Part\n\n let part = part.unwrap();\n\n\n\n // Then either use the provided version or the latest\n\n let ver = match version {\n\n Some(x) => x,\n\n None => &part.ver,\n\n };\n\n\n", "file_path": "src/tables/bom.rs", "rank": 1, "score": 255650.50838468978 }, { "content": "// Update from inventory export file\n\npub fn update_from_file(app: &mut crate::Application, filename: &str) {\n\n // Get records from file\n\n let records: Vec<InventoryEntry> = match read_records(filename) {\n\n Ok(r) => r,\n\n Err(e) => {\n\n eprintln!(\"{}\\nNo changes have been made\", e);\n\n return;\n\n }\n\n };\n\n\n\n // Only updates records found!\n\n for record in &records {\n\n // Notes converted as necessary\n\n let notes = record.notes.as_deref();\n\n\n\n // Convert from InventoryRecord to NewUpdateInventoryEntry\n\n let update = NewUpdateInventoryEntry {\n\n quantity: &record.quantity,\n\n consumed: &record.consumed,\n\n unit_price: record.unit_price.as_ref(),\n", "file_path": "src/tables/inventory.rs", "rank": 2, "score": 237183.38611137483 }, { "content": "pub fn create_from_file(app: &mut crate::Application, filename: &str) {\n\n println!(\"{:?}\", app.config);\n\n println!(\"{:?}\", filename);\n\n\n\n // Get records from file\n\n let records: Vec<NewInventoryRecord> = match read_records(filename) {\n\n Ok(r) => r,\n\n Err(e) => {\n\n eprintln!(\"{}\\nNo changes have been made\", e);\n\n return;\n\n }\n\n };\n\n\n\n for record in &records {\n\n println!(\"Finding: \\\"{}\\\"\", record.mpn);\n\n\n\n // Check if part number exists\n\n // Uses MPN as it's the common denominator between this and Digikey/Arrow/Mouser etc.\n\n let part = find_part_by_mpn(&app.conn, &record.mpn);\n\n\n", "file_path": "src/tables/inventory.rs", "rank": 3, "score": 236953.83024122656 }, { "content": "// Export shortages to csv\n\npub fn export_shortages_to_file(app: &mut crate::Application, filename: &str) {\n\n let shortages = get_shortages(app, false).expect(\"Unable to get shortage report.\");\n\n\n\n let file = File::create(filename).unwrap();\n\n let file = BufWriter::new(file);\n\n\n\n // Create CSV writer\n\n let mut wtr = csv::Writer::from_writer(file);\n\n\n\n // Iterate and add to csv\n\n for shortage in shortages {\n\n wtr.serialize(shortage).expect(\"Unable to serialize.\");\n\n wtr.flush().expect(\"Unable to flush\");\n\n }\n\n\n\n println!(\"Shortages exported to {}\", filename);\n\n}\n\n\n", "file_path": "src/tables/inventory.rs", "rank": 5, "score": 212739.57266555564 }, { "content": "/// Function used to import parts from file\n\npub fn import(app: &mut crate::Application, filename: &str) {\n\n use crate::schema::parts::dsl::*;\n\n\n\n // Open the file\n\n let file = File::open(filename);\n\n\n\n // Make sure it's valid\n\n let file = match file {\n\n Ok(x) => x,\n\n Err(_) => {\n\n println!(\"Unable to open {}\", filename);\n\n std::process::exit(1);\n\n }\n\n };\n\n\n\n let file = BufReader::new(file);\n\n let eagle: schematic::Eagle = from_reader(file).expect(\"error parsing xml\");\n\n\n\n // println!(\"{:?}\",eagle);\n\n // let mut list: Vec<String> = [].to_vec();\n", "file_path": "src/tables/bom.rs", "rank": 7, "score": 203362.03591228073 }, { "content": "// Export inventory to csv\n\npub fn export_to_file(app: &mut crate::Application, filename: &str, export_all: bool) {\n\n use crate::schema::*;\n\n\n\n // Run the query\n\n let inventory = inventories::dsl::inventories\n\n .load::<Inventory>(&app.conn)\n\n .expect(\"Uanble to load inventory list.\");\n\n\n\n // File operations\n\n let file = File::create(filename).unwrap();\n\n let file = BufWriter::new(file);\n\n\n\n // Create CSV writer\n\n let mut wtr = csv::Writer::from_writer(file);\n\n\n\n // Iterate and add to csv\n\n for entry in inventory {\n\n // Skips this part if qty = 0 if export_all is false\n\n if !export_all && entry.quantity == 0 {\n\n continue;\n", "file_path": "src/tables/inventory.rs", "rank": 8, "score": 200919.06033720815 }, { "content": "pub fn create(app: &mut crate::Application) {\n\n // app.prompts for a part number\n\n let part_number = app.prompt.ask_text_entry(\"Enter part number: \");\n\n\n\n // Check if part number exists\n\n let part = find_part_by_pn(&app.conn, &part_number);\n\n\n\n // Make sure we're valid!\n\n let part = match part {\n\n Ok(x) => x,\n\n Err(_) => {\n\n println!(\"Unable to find {}\", part_number);\n\n std::process::exit(1);\n\n }\n\n };\n\n\n\n // Then an ajustment value\n\n let adj = app.prompt.ask_text_entry(\"Enter adjustment value: \");\n\n let adj: i32 = adj.trim().parse().expect(\"Invalid adjustment!\");\n\n\n", "file_path": "src/tables/inventory.rs", "rank": 9, "score": 183328.74138191086 }, { "content": "pub fn create(app: &mut crate::Application) {\n\n // Get the input from stdin\n\n let part_number = app.prompt.ask_text_entry(\"Part Number: \");\n\n let version = app.prompt.ask_text_entry(\"Version: \");\n\n let version: i32 = version.trim().parse().expect(\"Invalid version number!\");\n\n let quantity = app.prompt.ask_text_entry(\"Quantity: \");\n\n let quantity: i32 = quantity.trim().parse().expect(\"Invalid quantity!\");\n\n\n\n let part = find_part_by_pn(&app.conn, &part_number);\n\n\n\n if part.is_err() {\n\n println!(\"{} version {} was not found!\", part_number, version);\n\n std::process::exit(1);\n\n }\n\n\n\n // Transform the response into a Part\n\n let part = part.unwrap();\n\n\n\n if part.ver != version {\n\n println!(\n", "file_path": "src/tables/builds.rs", "rank": 10, "score": 183328.74138191086 }, { "content": "/// Function used to export BOM to CSV\n\npub fn export(app: &mut crate::Application, build_id: i32) {\n\n use crate::schema::*;\n\n\n\n // Get the build\n\n let build = find_build_by_id(&app.conn, &build_id).expect(\"Unable to find build!\");\n\n\n\n // Get build part number\n\n let build_pn = find_part_by_id(&app.conn, &build.part_id).expect(\"Unable to get part by id\");\n\n\n\n // Get partslist\n\n let bom_list = parts_parts::dsl::parts_parts\n\n .filter(parts_parts::dsl::bom_part_id.eq(build.part_id))\n\n .filter(parts_parts::dsl::bom_ver.eq(build.part_ver))\n\n .load::<PartsPart>(&app.conn)\n\n .expect(\"Error loading parts\");\n\n\n\n // Create filename\n\n let filename = format!(\n\n \"{}-v{}-BUILD={}-{}.csv\",\n\n build_pn.pn,\n", "file_path": "src/tables/builds.rs", "rank": 14, "score": 179729.50735268585 }, { "content": "pub fn delete(app: &mut crate::Application, build_id: i32) {\n\n delete_build(&app.conn, &build_id).expect(\"Unable to delete build.\");\n\n\n\n println!(\"Deleted build id: {} successfully!\", build_id);\n\n}\n\n\n", "file_path": "src/tables/builds.rs", "rank": 15, "score": 179726.39084379232 }, { "content": "pub fn complete(app: &mut crate::Application, build_id: i32) {\n\n use crate::schema::*;\n\n\n\n // Get the build\n\n let build = find_build_by_id(&app.conn, &build_id).expect(\"Unable to find build!\");\n\n\n\n // Get partslist\n\n let bom_list = parts_parts::dsl::parts_parts\n\n .filter(parts_parts::dsl::bom_part_id.eq(build.part_id))\n\n .filter(parts_parts::dsl::bom_ver.eq(build.part_ver))\n\n .load::<PartsPart>(&app.conn)\n\n .expect(\"Error loading parts\");\n\n\n\n // Get the shortages. Shorts only.\n\n let shortages = inventory::get_shortages(app, false).expect(\"Unable to get shortages.\");\n\n\n\n // Still track if we're short.\n\n let mut still_short = false;\n\n\n\n // Make sure that all parts are not short.\n", "file_path": "src/tables/builds.rs", "rank": 16, "score": 179726.3908437923 }, { "content": "/// Reads records from file using a generic type. Useful across create and update calls\n\nfn read_records<T>(filename: &str) -> anyhow::Result<Vec<T>>\n\nwhere\n\n T: DeserializeOwned + Debug,\n\n{\n\n // Open the file\n\n let file = File::open(filename).unwrap();\n\n let file = BufReader::new(file);\n\n\n\n let mut records: Vec<T> = Vec::new();\n\n\n\n let mut rdr = csv::Reader::from_reader(file);\n\n\n\n // Process each line entry.\n\n for (pos, result) in rdr.deserialize().enumerate() {\n\n // Notice that we need to provide a type hint for automatic\n\n // deserialization.\n\n let record: T = match result {\n\n Ok(r) => r,\n\n Err(e) => return Err(anyhow!(\"Unable to process line {}. Error: {}\", pos, e)),\n\n };\n\n\n\n println!(\"Processing: {:?}\", record);\n\n records.push(record);\n\n }\n\n\n\n Ok(records)\n\n}\n\n\n", "file_path": "src/tables/inventory.rs", "rank": 18, "score": 154262.82690422246 }, { "content": "pub fn find_part_by_pn_and_ver(\n\n conn: &SqliteConnection,\n\n pn: &str,\n\n ver: &i32,\n\n) -> std::result::Result<Part, diesel::result::Error> {\n\n use schema::parts;\n\n\n\n parts::dsl::parts\n\n .filter(parts::dsl::pn.eq(pn))\n\n .filter(parts::dsl::ver.eq(ver))\n\n .first(conn)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 19, "score": 149982.69894878607 }, { "content": "pub fn show(app: &mut crate::Application, show_all: bool) {\n\n use crate::schema::*;\n\n\n\n // Create the table\n\n let mut table = Table::new();\n\n\n\n let results: Vec<Build>;\n\n\n\n if show_all {\n\n results = builds::dsl::builds\n\n .load::<models::Build>(&app.conn)\n\n .expect(\"Error loading builds\");\n\n } else {\n\n results = builds::dsl::builds\n\n .filter(builds::dsl::complete.eq(0))\n\n .load::<models::Build>(&app.conn)\n\n .expect(\"Error loading builds\");\n\n }\n\n\n\n println!(\"Displaying {} builds\", results.len());\n", "file_path": "src/tables/builds.rs", "rank": 20, "score": 147599.57033661343 }, { "content": "pub fn show(app: &mut crate::Application, show_all_entries: bool) {\n\n use crate::schema::inventories::dsl::*;\n\n\n\n // Create the table\n\n let mut table = Table::new();\n\n\n\n let results = inventories\n\n .load::<Inventory>(&app.conn)\n\n .expect(\"Error loading parts\");\n\n\n\n table.add_row(row![\n\n \"PN\",\n\n \"Desc\",\n\n \"Qty\",\n\n \"Consumed\",\n\n \"Unit Price\",\n\n \"Notes\",\n\n \"Ver\"\n\n ]);\n\n for inventory in results {\n", "file_path": "src/tables/inventory.rs", "rank": 21, "score": 144238.5719902304 }, { "content": "// TODO: show shortage by build ID\n\n// Defualt hide non-short items. Option to view all.\n\npub fn show_shortage(app: &mut crate::Application, show_all_entries: bool) {\n\n // Create the table\n\n let mut table = Table::new();\n\n\n\n // Print out the shortages in table format.\n\n table.add_row(row![\"PID\", \"PN\", \"MPN\", \"Desc\", \"Have\", \"Needed\", \"Short\",]);\n\n\n\n let shortages = get_shortages(app, show_all_entries);\n\n\n\n let shortages = match shortages {\n\n Ok(x) => x,\n\n Err(e) => {\n\n println!(\"Error getting shortages: {:?}\", e);\n\n std::process::exit(1);\n\n }\n\n };\n\n\n\n for entry in shortages {\n\n table.add_row(row![\n\n entry.pid,\n", "file_path": "src/tables/inventory.rs", "rank": 22, "score": 141111.64445370017 }, { "content": "pub fn update_part(\n\n conn: &SqliteConnection,\n\n id: &i32,\n\n part: &NewUpdatePart,\n\n) -> std::result::Result<usize, diesel::result::Error> {\n\n use schema::parts;\n\n\n\n diesel::update(parts::dsl::parts.filter(parts::dsl::id.eq(id)))\n\n .set(part)\n\n .execute(conn)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 23, "score": 135000.10140918515 }, { "content": "// Part related\n\npub fn create_part(\n\n conn: &SqliteConnection,\n\n part: &NewUpdatePart,\n\n) -> std::result::Result<usize, diesel::result::Error> {\n\n use schema::parts;\n\n\n\n diesel::insert_into(parts::table).values(part).execute(conn)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 24, "score": 134704.51230105892 }, { "content": "pub fn find_part_by_mpn(\n\n conn: &SqliteConnection,\n\n mpn: &str,\n\n) -> std::result::Result<Part, diesel::result::Error> {\n\n use schema::parts;\n\n\n\n println!(\"mpn: \\\"{}\\\"\", mpn);\n\n\n\n parts::dsl::parts\n\n .filter(parts::dsl::mpn.eq(mpn))\n\n .first(conn)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 25, "score": 130570.37955177628 }, { "content": "pub fn find_part_by_pn(\n\n conn: &SqliteConnection,\n\n pn: &str,\n\n) -> std::result::Result<Part, diesel::result::Error> {\n\n use schema::parts;\n\n\n\n parts::dsl::parts.filter(parts::dsl::pn.eq(pn)).first(conn)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 26, "score": 130504.77749125012 }, { "content": "/// Calculate config path depending on input\n\npub fn get_config_path(config_path: &Option<String>) -> anyhow::Result<PathBuf> {\n\n match config_path {\n\n Some(c) => Ok(PathBuf::from(c)),\n\n None => {\n\n // Get config path\n\n let mut path = get_default_config_path()?;\n\n\n\n // Create the config path\n\n std::fs::create_dir_all(&path)?;\n\n\n\n // Add file to path\n\n path.push(\"config.toml\");\n\n\n\n // Return this guy\n\n Ok(path)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 27, "score": 126421.37060990545 }, { "content": "/// Compares a 'new' part with an existing Part. Prompts for user input.\n\n/// Responds witha boolean value of whether or not to update.\n\nfn prompt_to_update_part(\n\n prompt: &mut prompt::Prompt<StdinLock, Stdout>,\n\n new: &models::NewUpdatePart,\n\n existing: &models::Part,\n\n) -> bool {\n\n // Check for changes and ask if want to update.\n\n if new.mpn != existing.mpn || new.descr != existing.descr || *new.ver != existing.ver {\n\n let question = format!(\"{} found! Would you like to update it?\", existing.pn);\n\n\n\n // Create the table\n\n let mut table = Table::new();\n\n table.add_row(row![\"\", \"pn\", \"mpn\", \"decr\", \"mqty\", \"ver\"]);\n\n table.add_row(row![\n\n \"Current:\",\n\n existing.pn,\n\n existing.mpn,\n\n existing.descr,\n\n existing.mqty,\n\n existing.ver\n\n ]);\n", "file_path": "src/tables/bom.rs", "rank": 28, "score": 109956.88262303767 }, { "content": "pub fn create_build(\n\n conn: &SqliteConnection,\n\n build: &NewUpdateBuild,\n\n) -> std::result::Result<usize, diesel::result::Error> {\n\n use schema::builds;\n\n\n\n diesel::insert_into(builds::table)\n\n .values(build)\n\n .execute(conn)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 29, "score": 109870.26529826845 }, { "content": "pub fn create_inventory(\n\n conn: &SqliteConnection,\n\n entry: &NewUpdateInventoryEntry,\n\n) -> std::result::Result<usize, diesel::result::Error> {\n\n use schema::inventories;\n\n\n\n diesel::insert_into(inventories::table)\n\n .values(entry)\n\n .execute(conn)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 30, "score": 109870.26529826845 }, { "content": "pub fn rename_part(\n\n conn: &SqliteConnection,\n\n oldpn: &str,\n\n newpn: &str,\n\n) -> std::result::Result<usize, diesel::result::Error> {\n\n use schema::parts::dsl::*;\n\n\n\n let part = find_part_by_pn(&conn, &oldpn).expect(\"Old part not found\");\n\n\n\n diesel::update(parts.find(part.id))\n\n .set(pn.eq(newpn))\n\n .execute(conn)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 31, "score": 109179.85158423698 }, { "content": "pub fn delete_part(\n\n conn: &SqliteConnection,\n\n id: &i32,\n\n) -> std::result::Result<usize, diesel::result::Error> {\n\n use schema::parts;\n\n\n\n diesel::delete(parts::dsl::parts.filter(parts::dsl::id.eq(id))).execute(conn)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 32, "score": 109179.85158423698 }, { "content": "pub fn establish_connection(db_name: &str) -> SqliteConnection {\n\n // Get text version of configpath\n\n let mut database_url =\n\n config::get_default_config_path().unwrap_or_else(|_| panic!(\"Unable to get config path.\"));\n\n\n\n // Add database name\n\n database_url.push(db_name);\n\n\n\n // Establish the \"connection\" (We're using SQLite here so no connection excpet to the filesystem)\n\n let conn = SqliteConnection::establish(&database_url.to_string_lossy())\n\n .unwrap_or_else(|_| panic!(\"Error connecting to {}\", &database_url.to_string_lossy()));\n\n\n\n // This will run migrations and also create a DB if it doesn't exist.\n\n embedded_migrations::run(&conn).expect(\"Unable to run migration.\");\n\n\n\n conn\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 33, "score": 107485.21535922379 }, { "content": "pub fn find_builds_by_pn(\n\n conn: &SqliteConnection,\n\n pn: &str,\n\n) -> std::result::Result<Vec<Build>, diesel::result::Error> {\n\n use schema::builds;\n\n\n\n let part = find_part_by_pn(&conn, &pn).expect(\"Unable to run parts query.\");\n\n\n\n builds::dsl::builds\n\n .filter(builds::dsl::part_id.eq(part.id))\n\n .load::<Build>(conn)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 34, "score": 106816.82452098343 }, { "content": "pub fn update_inventory_by_id(\n\n conn: &SqliteConnection,\n\n id: &i32,\n\n entry: &NewUpdateInventoryEntry,\n\n) -> std::result::Result<usize, diesel::result::Error> {\n\n use schema::inventories;\n\n\n\n diesel::update(inventories::dsl::inventories.filter(inventories::dsl::id.eq(id)))\n\n .set(entry)\n\n .execute(conn)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 35, "score": 106754.55717386583 }, { "content": "pub fn update_build_by_id(\n\n conn: &SqliteConnection,\n\n id: &i32,\n\n entry: &NewUpdateBuild,\n\n) -> std::result::Result<usize, diesel::result::Error> {\n\n use schema::builds;\n\n\n\n diesel::update(builds::dsl::builds.filter(builds::dsl::id.eq(id)))\n\n .set(entry)\n\n .execute(conn)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 36, "score": 106754.55717386583 }, { "content": "pub fn find_part_by_id(\n\n conn: &SqliteConnection,\n\n id: &i32,\n\n) -> std::result::Result<Part, diesel::result::Error> {\n\n use schema::parts;\n\n\n\n parts::dsl::parts.filter(parts::dsl::id.eq(id)).first(conn)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 37, "score": 105811.1588280732 }, { "content": "/// Get default config path. ($HOME/.eagle-plm)\n\npub fn get_default_config_path() -> anyhow::Result<PathBuf> {\n\n // Get the config file from standard location\n\n let mut config_path = match home::home_dir() {\n\n Some(path) => path,\n\n None => {\n\n return Err(anyhow!(\"Impossible to get your home dir!\"));\n\n }\n\n };\n\n\n\n // Append config path to home directory\n\n config_path.push(\".eagle-plm\");\n\n\n\n // Return it\n\n Ok(config_path)\n\n}\n", "file_path": "src/config.rs", "rank": 38, "score": 105742.54723701044 }, { "content": "pub fn create_bom_line_item(\n\n conn: &SqliteConnection,\n\n part: &NewPartsParts,\n\n) -> std::result::Result<usize, diesel::result::Error> {\n\n use schema::parts_parts;\n\n\n\n diesel::insert_into(parts_parts::table)\n\n .values(part)\n\n .execute(conn)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 39, "score": 103368.66670674432 }, { "content": "pub fn find_inventories_by_part_id(\n\n conn: &SqliteConnection,\n\n id: &i32,\n\n) -> std::result::Result<Vec<Inventory>, diesel::result::Error> {\n\n use schema::inventories;\n\n\n\n inventories::dsl::inventories\n\n .filter(inventories::dsl::part_id.eq(id))\n\n .load::<Inventory>(conn)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 40, "score": 102739.03475635698 }, { "content": "pub fn get_shortages(\n\n app: &mut crate::Application,\n\n show_all_entries: bool,\n\n) -> std::result::Result<Vec<Shortage>, diesel::result::Error> {\n\n use crate::schema::*;\n\n\n\n let results = builds::dsl::builds\n\n .filter(builds::dsl::complete.eq(0)) // Only show un-finished builds\n\n .load::<Build>(&app.conn);\n\n\n\n // Return the error if there was an issue\n\n let results = match results {\n\n Ok(x) => x,\n\n Err(e) => return Err(e),\n\n };\n\n\n\n let mut shortages: Vec<Shortage> = Vec::new();\n\n\n\n // Iterate though the builds,\n\n // Create a table of all parts and computed inventory\n", "file_path": "src/tables/inventory.rs", "rank": 41, "score": 102330.5860969298 }, { "content": "pub fn delete_bom_list_by_id_and_ver(\n\n conn: &SqliteConnection,\n\n bom_id: &i32,\n\n ver: &i32,\n\n) -> std::result::Result<usize, diesel::result::Error> {\n\n use schema::parts_parts::dsl::*;\n\n\n\n // First get list of ids that match the bom_part_id\n\n let query = parts_parts\n\n .select(id)\n\n .filter(bom_part_id.eq(bom_id))\n\n .load::<i32>(conn)?;\n\n\n\n // Then make sure that the bom ver is equal. Match against the ids found in the first step\n\n let target = parts_parts.filter(bom_ver.eq(ver)).filter(id.eq_any(query));\n\n\n\n // Delete appropriately\n\n diesel::delete(target).execute(conn)\n\n}\n\n\n\n// Build related\n\n\n", "file_path": "src/lib.rs", "rank": 42, "score": 100875.92190283892 }, { "content": "/// Fetch the configuration from the provided folder path\n\npub fn load_config(config_path: &Path) -> anyhow::Result<Config> {\n\n // Read file to end\n\n let config = std::fs::read_to_string(&config_path)?;\n\n\n\n // Deserialize\n\n Ok(toml::from_str(&config)?)\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 43, "score": 100070.87000086415 }, { "content": "#[derive(Debug, Default, Clone)]\n\nstruct SimplePart {\n\n pn: String,\n\n mpn: String,\n\n descr: String,\n\n ver: i32,\n\n mqty: i32,\n\n nostuff: i32,\n\n}\n\n\n", "file_path": "src/tables/bom.rs", "rank": 44, "score": 98312.6313594924 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct NewInventoryRecord {\n\n mpn: String,\n\n quantity: Option<i32>,\n\n notes: Option<String>,\n\n unit_price: Option<f32>,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct InventoryEntry {\n\n pub id: i32,\n\n pub mpn: String,\n\n pub quantity: i32,\n\n pub consumed: i32,\n\n pub unit_price: Option<f32>,\n\n pub notes: Option<String>,\n\n pub part_ver: i32,\n\n pub part_id: i32,\n\n}\n\n\n\n#[derive(Debug, Serialize)]\n", "file_path": "src/tables/inventory.rs", "rank": 45, "score": 96014.04713331586 }, { "content": "/// Set config\n\npub fn save_config(config: &Config, config_path: &Path) -> anyhow::Result<()> {\n\n // With init data create config.toml\n\n let config_string = toml::to_string(config).unwrap();\n\n\n\n // Save config toml\n\n std::fs::write(config_path, config_string)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 46, "score": 95632.94261747148 }, { "content": "DROP TABLE IF EXISTS parts;\n", "file_path": "migrations/2020-05-19-012929_mqty_added/down.sql", "rank": 47, "score": 93254.83823672676 }, { "content": "-- This file should undo anything in `up.sql`\n\nCREATE TABLE new_parts (\n\n id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,\n\n created_at TIMESTAMP NOT NULL DEFAULT (datetime('now','localtime')),\n\n updated_at TIMESTAMP NOT NULL DEFAULT (datetime('now','localtime')),\n\n pn VARCHAR UNIQUE NOT NULL, -- part number\n\n mpn VARCHAR UNIQUE NOT NULL, -- manufacturer part number\n\n digikeypn VARCHAR UNIQUE, -- digikey part number\n\n descr VARCHAR NOT NULL, -- description\n\n ver INTEGER NOT NULL, -- version of part\n\n val VARCHAR -- stores the part value (if any)\n\n);\n\n\n\nINSERT INTO new_parts SELECT id, created_at, updated_at, pn, mpn, digikeypn, descr, ver, val FROM parts;\n", "file_path": "migrations/2020-05-19-012929_mqty_added/down.sql", "rank": 48, "score": 92845.48999044 }, { "content": "pub fn delete_build(\n\n conn: &SqliteConnection,\n\n id: &i32,\n\n) -> std::result::Result<usize, diesel::result::Error> {\n\n use schema::builds;\n\n\n\n diesel::delete(builds::dsl::builds.filter(builds::dsl::id.eq(id))).execute(conn)\n\n}\n\n\n\n// Inventory related\n\n\n", "file_path": "src/lib.rs", "rank": 49, "score": 84348.5357670193 }, { "content": "/// Using a list of parts, this function determines the line items for a BOM\n\nfn get_line_items_from_parts(\n\n parts: &[schematic::Part],\n\n variant: &VariantDef,\n\n ignore_list: &[String],\n\n) -> Vec<LineItem> {\n\n let mut list: Vec<LineItem> = Vec::new();\n\n\n\n // Process the part list\n\n 'outer: for part in parts {\n\n // Check to make sure it's not GND, FIDUCIAL, MOUNTING, FRAME, +3V3, etc\n\n for entry in ignore_list.iter() {\n\n if part.deviceset.contains(entry) {\n\n continue 'outer;\n\n }\n\n }\n\n\n\n // Technology is optional. So need to do a match here.\n\n let mut technology = part.technology.clone().unwrap_or_default();\n\n\n\n // Check if it's no stuff. If so skip over adding it.\n", "file_path": "src/tables/bom.rs", "rank": 50, "score": 82383.42058313302 }, { "content": "pub fn find_build_by_id(\n\n conn: &SqliteConnection,\n\n id: &i32,\n\n) -> std::result::Result<Build, diesel::result::Error> {\n\n use schema::builds;\n\n\n\n builds::dsl::builds\n\n .filter(builds::dsl::id.eq(id))\n\n .first(conn)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 51, "score": 82123.20585780652 }, { "content": "/// Helper function that prints a list of SimpleParts\n\nfn print_simple_part_list(list: &[SimplePart]) {\n\n let mut table = Table::new();\n\n table.add_row(row![\n\n \"PART NUMBER\",\n\n \"MPN\",\n\n \"DESCRIPTION\",\n\n \"MULTI QUANTITY\",\n\n \"VERSION\",\n\n \"NO STUFF\"\n\n ]);\n\n\n\n for part in list {\n\n table.add_row(row![\n\n part.pn,\n\n part.mpn,\n\n part.descr,\n\n part.mqty,\n\n part.ver,\n\n part.nostuff\n\n ]);\n", "file_path": "src/tables/bom.rs", "rank": 52, "score": 80448.5224492588 }, { "content": "-- Your SQL goes here\n\nALTER TABLE parts ADD COLUMN mqty INTEGER NOT NULL DEFAULT 1;", "file_path": "migrations/2020-05-19-012929_mqty_added/up.sql", "rank": 53, "score": 79133.6062497213 }, { "content": "pub fn test_connection() -> SqliteConnection {\n\n // Start a connection from memory\n\n let conn = SqliteConnection::establish(\":memory:\").expect(\"Unable to establish db in memory!\");\n\n\n\n // This will run the necessary migrations.\n\n embedded_migrations::run(&conn).expect(\"Unable to run test migration.\");\n\n\n\n // Return the active connection\n\n conn\n\n}\n\n\n\n/* START: Part Related Tests */\n\nmod part_tests {\n\n\n\n #[test]\n\n fn create_part_check_if_created() {\n\n use super::*;\n\n use models::Part;\n\n use schema::parts::dsl::*;\n\n\n", "file_path": "src/lib.rs", "rank": 54, "score": 77325.73823148251 }, { "content": "-- Used to track bom contents\n\nCREATE TABLE parts_parts ( -- i.e. boms\n\n id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,\n\n created_at TIMESTAMP NOT NULL DEFAULT (datetime('now','localtime')),\n\n updated_at TIMESTAMP NOT NULL DEFAULT (datetime('now','localtime')),\n\n quantity INTEGER NOT NULL, -- quantity that is used in this BOM\n\n bom_ver INTEGER NOT NULL, -- version of the bom that this is tied to\n\n refdes VARCHAR NOT NULL, -- tracking the refdes\n\n nostuff INTEGER NOT NULL DEFAULT 0, -- determines if stuff or no stuff\n\n bom_part_id INTEGER NOT NULL, -- this is simply a part that has a BOM associated with it\n\n part_id INTEGER NOT NULL, -- this table has entries that are associated with individual parts.\n\n FOREIGN KEY(bom_part_id) REFERENCES parts(id) ON DELETE CASCADE ON UPDATE CASCADE,\n\n FOREIGN KEY(part_id) REFERENCES parts(id) ON DELETE CASCADE ON UPDATE CASCADE\n\n);", "file_path": "migrations/2020-05-15-134125_initial_setup/up.sql", "rank": 55, "score": 73521.77118408414 }, { "content": "#[derive(Serialize)]\n\nstruct BomEntry {\n\n pn: String,\n\n quantity: i32,\n\n refdes: String,\n\n mpn: String,\n\n descr: String,\n\n ver: i32,\n\n inventory_qty: i32,\n\n no_stuff: i32,\n\n}\n\n\n", "file_path": "src/tables/bom.rs", "rank": 56, "score": 73481.31554227475 }, { "content": "#[derive(Eq, PartialEq, Debug)]\n\nstruct LineItem {\n\n name: String,\n\n pn: String,\n\n quantity: i32,\n\n nostuff: i32,\n\n}\n\n\n", "file_path": "src/tables/bom.rs", "rank": 57, "score": 73481.31554227475 }, { "content": "-- Your SQL goes here\n\nCREATE TABLE parts (\n\n id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,\n\n created_at TIMESTAMP NOT NULL DEFAULT (datetime('now','localtime')),\n\n updated_at TIMESTAMP NOT NULL DEFAULT (datetime('now','localtime')),\n\n pn VARCHAR UNIQUE NOT NULL, -- part number\n\n mpn VARCHAR UNIQUE NOT NULL, -- manufacturer part number\n\n digikeypn VARCHAR UNIQUE, -- digikey part number\n\n descr VARCHAR NOT NULL, -- description\n\n ver INTEGER NOT NULL, -- version of part\n\n val VARCHAR -- stores the part value (if any)\n\n);\n\n\n", "file_path": "migrations/2020-05-15-134125_initial_setup/up.sql", "rank": 58, "score": 71535.78921772842 }, { "content": "ALTER TABLE new_parts RENAME TO parts;", "file_path": "migrations/2020-05-19-012929_mqty_added/down.sql", "rank": 59, "score": 71052.66248434983 }, { "content": "/// Updates a SimplePart based on attribute data from the library.\n\nfn get_simplepart_from_library(\n\n item: &LineItem,\n\n eagle: &schematic::Eagle,\n\n library_name: &str,\n\n) -> SimplePart {\n\n // Flags\n\n // let mut found = false;\n\n // let mut is_alias = false;\n\n\n\n // Quantity local\n\n // let mut mqty_temp: i32 = 1;\n\n let mut part = SimplePart {\n\n pn: item.pn.clone(),\n\n nostuff: item.nostuff,\n\n mqty: item.quantity,\n\n ..Default::default()\n\n };\n\n\n\n for library in &eagle.drawing.schematic.libraries.library {\n\n // Check if it's the library we care about.\n", "file_path": "src/tables/bom.rs", "rank": 60, "score": 61634.99855530387 }, { "content": "DROP TABLE parts_parts;", "file_path": "migrations/2020-05-15-134125_initial_setup/down.sql", "rank": 61, "score": 52091.54507362673 }, { "content": "-- Used to keep a ledger of all part inventory changes.\n\nCREATE TABLE inventories (\n\n id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,\n\n created_at TIMESTAMP NOT NULL DEFAULT (datetime('now','localtime')),\n\n updated_at TIMESTAMP NOT NULL DEFAULT (datetime('now','localtime')),\n\n quantity INTEGER NOT NULL, -- how much there are available\n\n consumed INTEGER NOT NULL DEFAULT 0, -- how many that were consumed\n\n unit_price REAL, -- the unit price\n\n notes TEXT, -- notes\n\n part_ver INTEGER NOT NULL, -- the version of the part this is referring to..\n\n part_id INTEGER NOT NULL, -- the part that is associated with the inventory\n\n FOREIGN KEY(part_id) REFERENCES parts(id) --only one part associated with this inventory (many to one)\n\n);\n\n\n", "file_path": "migrations/2020-05-15-134125_initial_setup/up.sql", "rank": 66, "score": 46713.360887266455 }, { "content": "-- Used to track builds\n\nCREATE TABLE builds (\n\n id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,\n\n created_at TIMESTAMP NOT NULL DEFAULT (datetime('now','localtime')),\n\n updated_at TIMESTAMP NOT NULL DEFAULT (datetime('now','localtime')),\n\n estimated_completion TIMESTAMP NOT NULL DEFAULT (datetime('now','localtime')),\n\n quantity INTEGER NOT NULL, -- how much there are\n\n cost REAL, -- cost per unit\n\n complete INTEGER NOT NULL, -- how much there are\n\n notes TEXT, -- text for build details\n\n part_ver INTEGER NOT NULL, -- version of the BOM we're using\n\n part_id INTEGER NOT NULL, -- the part/BOM we're building\n\n FOREIGN KEY(part_id) REFERENCES parts(id)\n\n\n\n);\n\n\n", "file_path": "migrations/2020-05-15-134125_initial_setup/up.sql", "rank": 67, "score": 46707.73937529921 }, { "content": "-- This file should undo anything in `up.sql`\n\nDROP TABLE parts;\n", "file_path": "migrations/2020-05-15-134125_initial_setup/down.sql", "rank": 68, "score": 46017.355715582475 }, { "content": "#[test]\n\nfn test_yes_no_expect_false() {\n\n let input = b\"\\n\";\n\n let mut output = Vec::new();\n\n\n\n let answer = {\n\n let mut prompt = Prompt {\n\n reader: &input[..],\n\n writer: &mut output,\n\n };\n\n\n\n prompt.ask_yes_no_question(\"Would you like to do things?\")\n\n };\n\n\n\n let output = String::from_utf8(output).expect(\"Not UTF-8\");\n\n\n\n assert_eq!(\"Would you like to do things? (y/n) \", output);\n\n assert_eq!(false, answer);\n\n}\n", "file_path": "src/prompt.rs", "rank": 69, "score": 41422.08377577444 }, { "content": "#[test]\n\nfn test_yes_no_expect_true() {\n\n let input = b\"y\\n\";\n\n let mut output = Vec::new();\n\n\n\n let answer = {\n\n let mut prompt = Prompt {\n\n reader: &input[..],\n\n writer: &mut output,\n\n };\n\n\n\n prompt.ask_yes_no_question(\"Would you like to do things?\")\n\n };\n\n\n\n let output = String::from_utf8(output).expect(\"Not UTF-8\");\n\n\n\n assert_eq!(\"Would you like to do things? (y/n) \", output);\n\n assert_eq!(true, answer);\n\n}\n\n\n", "file_path": "src/prompt.rs", "rank": 70, "score": 41422.08377577444 }, { "content": "#[test]\n\nfn test_text_entry_with_whitespace() {\n\n let input = b\"I love cookies! \\n\";\n\n let mut output = Vec::new();\n\n\n\n let answer = {\n\n let mut prompt = Prompt {\n\n reader: &input[..],\n\n writer: &mut output,\n\n };\n\n\n\n prompt.ask_text_entry(\"Do you like cookies?\")\n\n };\n\n\n\n let output = String::from_utf8(output).expect(\"Not UTF-8\");\n\n\n\n assert_eq!(\"Do you like cookies?\", output); // output to stdout\n\n assert_eq!(\"I love cookies!\", answer); // input\n\n}\n\n\n", "file_path": "src/prompt.rs", "rank": 71, "score": 41422.08377577444 }, { "content": "-- Your SQL goes here\n", "file_path": "migrations/2020-05-19-012929_mqty_added/up.sql", "rank": 72, "score": 28543.102731846386 }, { "content": "-- This file should undo anything in `up.sql`\n", "file_path": "migrations/2020-05-19-012929_mqty_added/down.sql", "rank": 73, "score": 28543.102731846386 }, { "content": "pub mod bom;\n\npub mod builds;\n\npub mod inventory;\n\npub mod parts;\n", "file_path": "src/tables.rs", "rank": 74, "score": 24775.753762458862 }, { "content": " mpn: &part.mpn,\n\n descr: &part.descr,\n\n ver: &part.ver,\n\n mqty: &part.mqty,\n\n };\n\n\n\n // Not found, create\n\n match existing {\n\n Ok(e) => {\n\n // Check if can be updated\n\n if prompt_to_update_part(&mut app.prompt, &npart, &e) {\n\n update_part(&app.conn, &e.id, &npart).expect(\"Error updating part!\");\n\n }\n\n }\n\n Err(_) => {\n\n println!(\"Creating: {:?}\", npart);\n\n create_part(&app.conn, &npart).expect(\"Unable to create part!\");\n\n }\n\n }\n\n\n", "file_path": "src/tables/bom.rs", "rank": 75, "score": 23470.90336035415 }, { "content": " \"{} version {} was not found! Latest is: {}\",\n\n part_number, version, part.ver\n\n );\n\n std::process::exit(1);\n\n }\n\n\n\n let build = NewUpdateBuild {\n\n quantity: &quantity,\n\n complete: &0,\n\n notes: Some(\"\"),\n\n part_ver: &version,\n\n part_id: &part.id,\n\n };\n\n\n\n create_build(&app.conn, &build).expect(\"Unable to create build!\");\n\n\n\n println!(\n\n \"Created build of {} ver: {} with qty: {}\",\n\n part.pn, part.ver, quantity\n\n );\n\n}\n\n\n", "file_path": "src/tables/builds.rs", "rank": 76, "score": 23466.312807835006 }, { "content": " pn: &bom_pn,\n\n mpn: &bom_pn,\n\n descr: &bom_desc,\n\n ver: &revision,\n\n mqty: &1,\n\n };\n\n\n\n create_part(&app.conn, &part).expect(\"Unable to create BOM part!\");\n\n }\n\n }\n\n\n\n println!(\"\\nPARTS LIST:\");\n\n let list = get_line_items_from_parts(\n\n &eagle.drawing.schematic.parts.part,\n\n &variant,\n\n &app.config.part_number_ignore_list,\n\n );\n\n\n\n // Vector of SimpleParts\n\n let mut simple_part_list: Vec<SimplePart> = Vec::new();\n", "file_path": "src/tables/bom.rs", "rank": 77, "score": 23466.190819726806 }, { "content": "\n\n // Skip a line\n\n println!();\n\n\n\n // Different actions depending if it exists\n\n match res {\n\n Ok(bom) => {\n\n let question = format!(\"BOM {} found! Would you like to update it?\", bom_pn);\n\n let yes = app.prompt.ask_yes_no_question(&question);\n\n\n\n // If it already exists error/ask to update. Then runs the update routine instead\n\n if yes {\n\n // Ask if a new revision is requred\n\n let question =\n\n format!(\"BOM {} found! Would you like to up-rev the design?\", bom_pn);\n\n let yes = app.prompt.ask_yes_no_question(&question);\n\n\n\n if yes {\n\n // Increment the version\n\n revision = bom.ver + 1;\n", "file_path": "src/tables/bom.rs", "rank": 78, "score": 23466.10910505259 }, { "content": " None => continue,\n\n };\n\n\n\n // Get the notes\n\n let notes = record.notes.as_deref();\n\n\n\n // Check if part number exists\n\n let part = find_part_by_mpn(&app.conn, &record.mpn).expect(\"Unable to get part.\");\n\n\n\n // Commits change\n\n let entry = NewUpdateInventoryEntry {\n\n part_id: &part.id,\n\n part_ver: &part.ver,\n\n unit_price: record.unit_price.as_ref(),\n\n quantity: &quantity,\n\n consumed: &0,\n\n notes,\n\n };\n\n\n\n // Finally create the inventory if all look ok!\n\n create_inventory(&app.conn, &entry).expect(\"Unable to create inventory item.\");\n\n\n\n // Print out that it was successful\n\n println!(\"Created inventory for {}!\", part.pn);\n\n }\n\n}\n\n\n", "file_path": "src/tables/inventory.rs", "rank": 79, "score": 23465.808969076363 }, { "content": "\n\n // Then pop it into a serializeable struct\n\n let line = BomEntry {\n\n quantity: entry.quantity,\n\n refdes: entry.refdes,\n\n pn: details.pn,\n\n mpn: details.mpn,\n\n descr: details.descr,\n\n ver: details.ver,\n\n inventory_qty,\n\n no_stuff: entry.nostuff,\n\n };\n\n\n\n wtr.serialize(line).expect(\"Unable to serialize.\");\n\n wtr.flush().expect(\"Unable to flush\");\n\n }\n\n\n\n println!(\"Inventory list exported to {}\", filename);\n\n}\n\n\n\n// pub fn delete(part_number: &str, version: &i32) {\n\n// // TODO: confirm exists\n\n// // TODO: confirm delete\n\n// // TODO delete query\n\n// }\n", "file_path": "src/tables/bom.rs", "rank": 80, "score": 23465.738698797875 }, { "content": "\n\n // Udate build complete\n\n let update_build = NewUpdateBuild {\n\n quantity: &build.quantity,\n\n complete: &1,\n\n notes: Some(&notes),\n\n part_ver: &build.part_ver,\n\n part_id: &build.part_id,\n\n };\n\n\n\n // Update build by id\n\n update_build_by_id(&app.conn, &build.id, &update_build).expect(\"Unable to update build!\");\n\n\n\n // Push this inventory item\n\n create_inventory(&app.conn, &new_inventory).expect(\"Unable to create inventory.\");\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct BuildExport {\n\n pn: String,\n\n mpn: String,\n\n desc: String,\n\n quantity_in_stock: i32,\n\n quantity_needed: i32,\n\n checked: Option<bool>,\n\n}\n\n\n", "file_path": "src/tables/builds.rs", "rank": 81, "score": 23465.689282943662 }, { "content": " table.add_row(row![\n\n \"Change to:\",\n\n new.pn,\n\n new.mpn,\n\n new.descr,\n\n new.mqty,\n\n new.ver\n\n ]);\n\n table.printstd();\n\n\n\n // Return response to question\n\n return prompt.ask_yes_no_question(&question);\n\n }\n\n\n\n // Otherwise return false\n\n false\n\n}\n\n\n", "file_path": "src/tables/bom.rs", "rank": 82, "score": 23464.702937861475 }, { "content": "\n\n let mut found = false;\n\n let mut bom_pn = \"\".to_string();\n\n let mut bom_desc = \"\".to_string();\n\n let mut revision = 1;\n\n\n\n // Parses it to make sure it has a global variable defining the part # for the assembly\n\n for attribute in &eagle.drawing.schematic.attributes.attribute {\n\n // Get the part description\n\n if attribute.name == \"DESC\" {\n\n bom_desc = attribute.value.clone();\n\n println!(\"Desc: {}\", bom_desc);\n\n }\n\n\n\n // Get the part name\n\n if attribute.name == \"PN\" {\n\n found = true;\n\n bom_pn = attribute.value.clone();\n\n println!(\"Part name: {}\", bom_pn);\n\n }\n", "file_path": "src/tables/bom.rs", "rank": 83, "score": 23464.281153884418 }, { "content": "extern crate diesel;\n\n\n\nuse crate::{models::*, *};\n\nuse prettytable::Table;\n\n\n\nuse anyhow::anyhow;\n\n\n\nuse self::diesel::prelude::*;\n\n\n\nuse std::io::{BufReader, BufWriter};\n\nuse std::{fmt::Debug, fs::File};\n\n\n\nuse serde::{de::DeserializeOwned, Deserialize, Serialize};\n\n\n\n#[derive(Debug, Deserialize)]\n", "file_path": "src/tables/inventory.rs", "rank": 84, "score": 23463.819133063913 }, { "content": "extern crate diesel;\n\n\n\nuse chrono::Utc;\n\nuse prettytable::Table;\n\n\n\nuse self::diesel::prelude::*;\n\nuse crate::{models::*, *};\n\n\n\n// Borrowing shortage generation from inventory\n\nuse super::inventory;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse std::{\n\n fs::File,\n\n io::{self, BufWriter},\n\n};\n\n\n", "file_path": "src/tables/builds.rs", "rank": 85, "score": 23463.405337722008 }, { "content": " // Check if show_all_entries\n\n if !show_all_entries && inventory.quantity == 0 {\n\n continue;\n\n }\n\n\n\n // Check if part number exists\n\n let part = find_part_by_id(&app.conn, &inventory.part_id).expect(\"Unable to get part.\");\n\n\n\n table.add_row(row![\n\n part.pn,\n\n part.descr,\n\n inventory.quantity,\n\n inventory.consumed,\n\n inventory.unit_price.unwrap_or(0.0),\n\n inventory.notes.unwrap_or_else(|| \"\".to_string()),\n\n inventory.part_ver\n\n ]);\n\n }\n\n\n\n // Change output depending on how many parts (or lack thereof)\n\n if table.len() == 1 {\n\n println!(\"No inventory to display.\");\n\n } else {\n\n println!(\"Displaying {} parts\", table.len() - 1);\n\n table.printstd();\n\n }\n\n}\n\n\n", "file_path": "src/tables/inventory.rs", "rank": 86, "score": 23463.21639987959 }, { "content": " // // Sort it out\n\n // let alias = match alias {\n\n // Ok(x) => x,\n\n // Err(_) => {\n\n // println!(\"Unable to find alias {}!\", attribute.value);\n\n // std::process::exit(1);\n\n // }\n\n // };\n\n\n\n // // Clone these bits so the live on\n\n // part.pn = alias.pn.clone();\n\n // part.mpn = alias.mpn.clone();\n\n // part.descr = alias.descr.clone();\n\n // part.ver = alias.ver;\n\n // part.mqty = alias.mqty;\n\n }\n\n }\n\n }\n\n None => (),\n\n };\n", "file_path": "src/tables/bom.rs", "rank": 87, "score": 23463.13971614091 }, { "content": "pub struct Shortage {\n\n pub pid: i32,\n\n pub pn: String,\n\n pub mpn: String,\n\n pub desc: String,\n\n pub have: i32,\n\n pub needed: i32,\n\n pub short: i32,\n\n pub quantity: Option<i32>,\n\n pub notes: Option<String>,\n\n pub unit_price: Option<f32>,\n\n}\n\n\n", "file_path": "src/tables/inventory.rs", "rank": 88, "score": 23462.911037714544 }, { "content": " None => &part.ver,\n\n };\n\n\n\n // Get all the parts related to this BOM\n\n let mut results = parts_parts::dsl::parts_parts\n\n .filter(parts_parts::dsl::bom_part_id.eq(part.id))\n\n .filter(parts_parts::dsl::bom_ver.eq(ver))\n\n .load::<models::PartsPart>(&app.conn)\n\n .expect(\"Error loading parts\");\n\n\n\n // Sort the results by refdes\n\n results.sort_by(|a, b| a.refdes.cmp(&b.refdes));\n\n\n\n println!(\"Displaying {} parts\", results.len());\n\n\n\n println!(\n\n \"Part Number: {} BOM Id: {} Version: {}\",\n\n part.pn, part.id, ver\n\n );\n\n\n", "file_path": "src/tables/bom.rs", "rank": 89, "score": 23462.687841684277 }, { "content": " notes,\n\n part_ver: &record.part_ver,\n\n part_id: &record.part_id,\n\n };\n\n\n\n // Then update the entry as needed\n\n if let Err(e) = update_inventory_by_id(&app.conn, &record.id, &update) {\n\n eprintln!(\"Error updating inventory id: {}. Error: {}\", record.id, e);\n\n } else {\n\n println!(\"Updated: {}\", record.mpn);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/tables/inventory.rs", "rank": 90, "score": 23462.51392065421 }, { "content": "\n\n // Get MPN, DigikeyPn from Library exerpts\n\n for item in list {\n\n // Set part attributes from library\n\n let part = get_simplepart_from_library(&item, &eagle, &app.config.library_name);\n\n\n\n if part.mpn.is_empty() {\n\n println!(\"Manufacturer part number must be set for {}\", part.pn);\n\n std::process::exit(1);\n\n }\n\n\n\n // Add to list\n\n simple_part_list.push(part.clone());\n\n\n\n // Find part\n\n let existing = find_part_by_pn(&app.conn, &part.pn);\n\n\n\n // Create update object\n\n let npart = models::NewUpdatePart {\n\n pn: &part.pn,\n", "file_path": "src/tables/bom.rs", "rank": 91, "score": 23462.072355744855 }, { "content": " let mut nostuff = 0;\n\n for var in &part.variants {\n\n if var.name == variant.name {\n\n // Only update the technology if it exists!\n\n technology = match &var.technology {\n\n Some(t) => t.to_string(),\n\n None => technology,\n\n };\n\n\n\n // Set no stuff\n\n if var.populate == Some(\"no\".to_string()) {\n\n nostuff = 1;\n\n }\n\n }\n\n }\n\n\n\n // Concatinate all the elements to form the actual part number\n\n let part_number = format!(\"{}{}{}\", part.deviceset, technology, part.device,);\n\n\n\n // Create temp line item\n", "file_path": "src/tables/bom.rs", "rank": 92, "score": 23461.526573065745 }, { "content": "extern crate diesel;\n\nextern crate quick_xml;\n\nextern crate serde;\n\n\n\nuse crate::schematic::VariantDef;\n\nuse crate::*;\n\nuse prettytable::Table;\n\nuse quick_xml::de::from_reader;\n\n\n\nuse self::diesel::prelude::*;\n\n\n\nuse chrono::Utc;\n\nuse serde::Serialize;\n\nuse std::io::{BufReader, BufWriter};\n\nuse std::{\n\n fs::File,\n\n io::{StdinLock, Stdout},\n\n};\n\n\n\n#[derive(Eq, PartialEq, Debug)]\n", "file_path": "src/tables/bom.rs", "rank": 93, "score": 23461.355753024338 }, { "content": " // Unit price\n\n let price = app.prompt.ask_text_entry(\"Enter unit price: \");\n\n let price: f32 = price.trim().parse().expect(\"Invalid price!\");\n\n\n\n // Then any notes.\n\n let notes = app.prompt.ask_text_entry(\"Enter notes: \");\n\n\n\n println!(\"Part number: {}\", part.pn);\n\n println!(\"Ajustment: {}\", adj);\n\n println!(\"Price: ${}\", price);\n\n println!(\"Notes: {}\", notes);\n\n let proceed = app.prompt.ask_yes_no_question(\"Look ok?\");\n\n\n\n // Confirm change (y/n)\n\n if proceed {\n\n // Commits change\n\n let entry = NewUpdateInventoryEntry {\n\n part_id: &part.id,\n\n part_ver: &part.ver,\n\n unit_price: Some(&price),\n\n quantity: &adj,\n\n consumed: &0,\n\n notes: Some(&notes),\n\n };\n\n\n\n create_inventory(&app.conn, &entry).expect(\"Unable to create inventory item.\");\n\n }\n\n}\n\n\n", "file_path": "src/tables/inventory.rs", "rank": 94, "score": 23460.98417636827 }, { "content": " }\n\n\n\n // Warning about blank description\n\n if bom_desc.is_empty() {\n\n println!(\"Warning: Blank BOM description\");\n\n }\n\n\n\n // Error if PN is not found\n\n if !found {\n\n println!(\"Please add PN attribute to schematic!\");\n\n std::process::exit(1);\n\n }\n\n\n\n // Get the variant list\n\n let mut variant: Option<VariantDef> = None;\n\n for v in &eagle.drawing.schematic.variantdefs.variantdef {\n\n // Get the current def\n\n if v.current == Some(\"yes\".to_string()) {\n\n println!(\"Variant: {}\", v.name);\n\n variant = Some(v.clone());\n", "file_path": "src/tables/bom.rs", "rank": 95, "score": 23460.9267803933 }, { "content": " used = entry.quantity;\n\n quantity -= entry.quantity\n\n }\n\n\n\n // Get string from entry.notes\n\n let notes = match entry.notes {\n\n Some(x) => x,\n\n None => \"\".to_string(),\n\n };\n\n\n\n // Create update\n\n let update = NewUpdateInventoryEntry {\n\n quantity: &new_qty,\n\n consumed: &used,\n\n unit_price: entry.unit_price.as_ref(),\n\n notes: Some(&notes),\n\n part_ver: &entry.part_ver,\n\n part_id: &entry.part_id,\n\n };\n\n\n", "file_path": "src/tables/builds.rs", "rank": 96, "score": 23460.341019220974 }, { "content": " let item = LineItem {\n\n name: part.name.clone(),\n\n pn: part_number,\n\n quantity: 1,\n\n nostuff,\n\n };\n\n\n\n // TODO: Check if part has attribute (MQTY). This overrides MQTY from the library.\n\n\n\n // Check if list has\n\n let mut found = false;\n\n if let Some(entry) = list\n\n .iter_mut()\n\n .find(|part| part.pn == item.pn && part.nostuff == item.nostuff)\n\n {\n\n found = true;\n\n\n\n // Increase the quantity\n\n entry.name = format!(\"{} {}\", entry.name, item.name);\n\n entry.quantity += 1;\n", "file_path": "src/tables/bom.rs", "rank": 97, "score": 23460.242814608802 }, { "content": " // Get all the parts related to this BOM\n\n let mut results = parts_parts::dsl::parts_parts\n\n .filter(parts_parts::dsl::bom_part_id.eq(part.id))\n\n .filter(parts_parts::dsl::bom_ver.eq(ver))\n\n .load::<models::PartsPart>(&app.conn)\n\n .expect(\"Error loading parts\");\n\n\n\n // Sort the results by refdes\n\n results.sort_by(|a, b| {\n\n let first = a.refdes.chars().next().unwrap();\n\n let second = b.refdes.chars().next().unwrap();\n\n first.cmp(&second)\n\n });\n\n\n\n // Create filename\n\n let filename = format!(\"{}-v{}-{}.csv\", part_number, ver, Utc::now().to_rfc3339());\n\n\n\n // File operations\n\n let file = File::create(&filename).unwrap();\n\n let file = BufWriter::new(file);\n", "file_path": "src/tables/bom.rs", "rank": 98, "score": 23460.124972178044 }, { "content": " // If theres an error exit so the user can fix the problem.\n\n match part {\n\n Err(e) => {\n\n println!(\n\n \"{} was not found! No changes were made. Error: {}\",\n\n record.mpn, e\n\n );\n\n std::process::exit(1);\n\n }\n\n _ => {\n\n continue;\n\n }\n\n }\n\n }\n\n\n\n // Re iterate now that we know the parts are all valid\n\n for record in &records {\n\n // We need at least a quantity to add a new record\n\n let quantity = match record.quantity {\n\n Some(q) => q,\n", "file_path": "src/tables/inventory.rs", "rank": 99, "score": 23459.25179706184 } ]
Rust
2018/dec03/rs/src/main.rs
henrywallace/advent-of-code
f50d00b47603117e25c18d4f645fadd451fb6be2
extern crate clap; #[macro_use] extern crate lazy_static; extern crate regex; use std::error::Error; use std::fs::File; use std::io::{BufReader, BufRead}; use clap::{Arg, App}; use std::collections::{HashMap, HashSet}; use std::str::FromStr; use regex::Regex; #[derive(Debug)] struct Claim { id: u32, offset: (u32, u32), size: (u32, u32), } impl FromStr for Claim { type Err = Box<Error>; fn from_str(s: &str) -> Result<Self, Self::Err> { lazy_static! { static ref RE: Regex = Regex::new(r"(?x) \#(?P<id>\d+)\s+@\s+ (?P<x>\d+),(?P<y>\d+):\s+ (?P<w>\d+)x(?P<h>\d+) ").unwrap(); } match RE.captures(s) { Some(caps) => { Ok(Claim{ id: caps["id"].parse()?, offset: (caps["x"].parse()?, caps["y"].parse()?), size: (caps["w"].parse()?, caps["h"].parse()?), }) }, None => Err(Box::<Error>::from(format!("failed to create claim from: {:?}", s))), } } } impl Claim { fn update_fabric(&self, fabric: &mut Fabric) { for i in self.offset.0..(self.offset.0+self.size.0) { for j in self.offset.1..(self.offset.1+self.size.1) { fabric.entry((i, j)) .or_insert(vec![]) .push(self.id); } } } } #[cfg(test)] mod tests { use super::*; #[test] fn test_fabric_one_simple() { let claim: Claim = "#1 @ 1,2: 1x2".parse().unwrap(); let mut actual = Fabric::new(); claim.update_fabric(&mut actual); let expect: Fabric = [ ((1, 2), vec![1]), ((1, 3), vec![1]), ].iter().cloned().collect(); assert_eq!(expect, actual); } #[test] fn test_fabric_two_overlap() { let claim1: Claim = "#1 @ 0,0: 2x1".parse().unwrap(); let claim2: Claim = "#2 @ 1,0: 2x1".parse().unwrap(); let mut actual = Fabric::new(); claim1.update_fabric(&mut actual); claim2.update_fabric(&mut actual); let expect: Fabric = [ ((0, 0), vec![1]), ((1, 0), vec![1, 2]), ((2, 0), vec![2]), ].iter().cloned().collect(); assert_eq!(expect, actual); } } type Fabric = HashMap<(u32, u32), Vec<u32>>; fn part1(input: &str) -> Result<(), Box<Error>> { let mut fabric = Fabric::new(); let f = File::open(input)?; let buf = BufReader::new(f); for line in buf.lines() { let claim: Claim = line?.parse()?; claim.update_fabric(&mut fabric); } let mut total = 0; for v in fabric.values() { if v.len() > 1 { total += 1; } } println!("{}", total); Ok(()) } fn part2(input: &str) -> Result<(), Box<Error>> { let mut fabric = Fabric::new(); let f = File::open(input)?; let buf = BufReader::new(f); for line in buf.lines() { let claim: Claim = line?.parse()?; claim.update_fabric(&mut fabric); } let mut alone: HashSet<u32> = HashSet::new(); let mut not_alone: HashSet<u32> = HashSet::new(); for v in fabric.values() { if v.len() == 1 { alone.insert(v[0]); } else { for id in v.iter() { not_alone.insert(*id); } } } println!("{:?}", alone.difference(&not_alone)); Ok(()) } fn main() { let matches = App::new("dec01") .arg(Arg::with_name("input").required(true)) .arg(Arg::with_name("part").long("part").required(true) .takes_value(true) .possible_values(&["1", "2"])) .get_matches(); let input = matches.value_of("input").unwrap(); match matches.value_of("part").unwrap() { "1" => part1(input).unwrap(), "2" => part2(input).unwrap(), _ => unreachable!(), } }
extern crate clap; #[macro_use] extern crate lazy_static; extern crate regex; use std::error::Error; use std::fs::File; use std::io::{BufReader, BufRead}; use clap::{Arg, App}; use std::collections::{HashMap, HashSet}; use std::str::FromStr; use regex::Regex; #[derive(Debug)] struct Claim { id: u32, offset: (u32, u32), size: (u32, u32), } impl FromStr for Claim { type Err = Box<Error>; fn from_str(s: &str) -> Result<Self, Self::Err> { lazy_static! { static ref RE: Regex = Regex::new(r"(?x) \#(?P<id>\d+)\s+@\s+ (?P<x>\d+),(?P<y>\d+):\s+ (?P<w>\d+)x(?P<h>\d+) ").unwrap(); } match RE.captures(s) { Some(caps) => { Ok(Claim{ id: caps["id"].parse()?, offset: (caps["x"].parse()?, caps["y"].parse()?), size: (caps["w"].parse()?, caps["h"].parse()?), }) }, None => Err(Box::<Error>::from(format!("failed to create claim from: {:?}", s))), } } } impl Claim { fn update_fabric(&self, fabric: &mut Fabric) { for i in self.offset.0..(self.offset.0+self.size.0) { for j in self.offset.1..(self.offset.1+self.size.1) { fabric.entry((i, j)) .or_insert(vec![]) .push(self.id); } } } } #[cfg(test)] mod tests { use super::*; #[test] fn test_fabric_one_simple() { let claim: Claim = "#1 @ 1,2: 1x2".parse().unwrap(); let mut actual = Fabric::new(); claim.update_fabric(&mut actual); let expect: Fabric = [ ((1, 2), vec![1]), ((1, 3), vec![1]), ].iter().cloned().collect(); assert_eq!(expect, actual); } #[test] fn test_fabric_two_overlap() { let claim1: Claim = "#1 @ 0,0: 2x1".parse().unwrap(); let claim2: Claim = "#2 @ 1,0: 2x1".parse().unwrap(); let mut actual = Fabric::new(); claim1.update_fabric(&mut actual); claim2.update_fabric(&mut actual); let expect: Fabric = [ ((0, 0), vec![1]), ((1, 0), vec![1, 2]), ((2, 0), vec![2]), ].iter().cloned().collect(); assert_eq!(expect, actual); } } type Fabric = HashMap<(u32, u32), Vec<u32>>; fn part1(input: &str) -> Result<(), Box<Error>> { let mut fabric = Fabric::new(); let f = File::open(input)?; let buf = BufReader::new(f); for line in buf.lines() { let claim: Claim = line?.parse()?; claim.update_fabric(&mut fabric); } let mut total = 0; for v in fabric.values() { if v.len() > 1 { total += 1; } } println!("{}", total); Ok(()) } fn part2(input: &str) -> Result<(), Box<Error>> { let mut fabric = Fabric::new(); let f = File::open(input)?; let buf = BufReader::new(f); for line in buf.lines() { let claim: Claim = line?.parse()?; claim.update_fabric(&mut fabric); } let mut alone: HashSet<u32> = HashSet::new(); let mut
} println!("{:?}", alone.difference(&not_alone)); Ok(()) } fn main() { let matches = App::new("dec01") .arg(Arg::with_name("input").required(true)) .arg(Arg::with_name("part").long("part").required(true) .takes_value(true) .possible_values(&["1", "2"])) .get_matches(); let input = matches.value_of("input").unwrap(); match matches.value_of("part").unwrap() { "1" => part1(input).unwrap(), "2" => part2(input).unwrap(), _ => unreachable!(), } }
not_alone: HashSet<u32> = HashSet::new(); for v in fabric.values() { if v.len() == 1 { alone.insert(v[0]); } else { for id in v.iter() { not_alone.insert(*id); } }
function_block-random_span
[ { "content": "type GuardID = u32;\n\n\n", "file_path": "2018/dec04/rs/src/main.rs", "rank": 1, "score": 92777.30177134293 }, { "content": "fn part1(input: &str) -> Result<(), Box<Error>> {\n\n let f = File::open(input)?;\n\n let buf = BufReader::new(f);\n\n let mut records = vec![];\n\n for line in buf.lines() {\n\n let rec: Record = line?.parse()?;\n\n records.push(rec);\n\n }\n\n records.sort_by(|rec1, rec2| rec1.timestamp.cmp(&rec2.timestamp));\n\n let totals = total_asleep(records)?;\n\n let mut top: GuardID = 0;\n\n let mut most_sleepy = Duration::zero();\n\n for (id, dur) in totals {\n\n if dur > most_sleepy {\n\n top = id;\n\n most_sleepy = dur;\n\n }\n\n }\n\n println!(\"Guard {} slept the most at {}min\", top, most_sleepy.num_minutes());\n\n Ok(())\n\n}\n\n\n", "file_path": "2018/dec04/rs/src/main.rs", "rank": 2, "score": 82053.26065875971 }, { "content": "fn part1(input: &str) -> Result<i32, Box<Error>> {\n\n let f = File::open(input)?;\n\n let buf = BufReader::new(f);\n\n let mut total = 0;\n\n for line in buf.lines() {\n\n total += line?.parse::<i32>()?\n\n }\n\n Ok(total)\n\n}\n\n\n", "file_path": "2018/dec01/rs/src/main.rs", "rank": 6, "score": 79047.12266659638 }, { "content": "fn part1(input: &str) -> Result<i32, Box<Error>> {\n\n let f = File::open(input)?;\n\n let buf = BufReader::new(f);\n\n let (mut total_2, mut total_3) = (0, 0);\n\n for line in buf.lines() {\n\n let (exactly_2, exactly_3) = exactly_23(line?.as_str());\n\n total_2 += exactly_2 as i32;\n\n total_3 += exactly_3 as i32;\n\n }\n\n Ok(total_2 * total_3)\n\n}\n\n\n", "file_path": "2018/dec02/rs/src/main.rs", "rank": 7, "score": 79047.12266659638 }, { "content": "fn part2(input: &str) -> Result<String, Box<Error>> {\n\n let mut box_ids = Vec::new();\n\n let f = File::open(input)?;\n\n let buf = BufReader::new(f);\n\n for line in buf.lines() {\n\n box_ids.push(line?);\n\n }\n\n\n\n for i in 0..box_ids.len() {\n\n for j in i+1..box_ids.len() {\n\n if let Some(common) = are_correct(&box_ids[i], &box_ids[j]) {\n\n return Ok(common);\n\n }\n\n }\n\n }\n\n\n\n Ok(\"\".to_string())\n\n}\n\n\n", "file_path": "2018/dec02/rs/src/main.rs", "rank": 8, "score": 79047.12266659638 }, { "content": "fn part2(input: &str) -> Result<i32, Box<Error>> {\n\n let mut freqs = Vec::new();\n\n let f = File::open(input)?;\n\n let buf = BufReader::new(f);\n\n for line in buf.lines() {\n\n freqs.push(line?.parse::<i32>()?);\n\n }\n\n\n\n let mut seen = HashSet::new();\n\n seen.insert(0);\n\n\n\n let mut total = 0;\n\n loop {\n\n for freq in freqs.iter() {\n\n total += freq;\n\n if seen.contains(&total) {\n\n return Ok(total);\n\n }\n\n seen.insert(total);\n\n }\n\n }\n\n}\n\n\n", "file_path": "2018/dec01/rs/src/main.rs", "rank": 9, "score": 79047.12266659638 }, { "content": "fn total_asleep(records: Vec<Record>) -> Result<HashMap<GuardID, Duration>, Box<Error>> {\n\n let mut totals = HashMap::new();\n\n if records.is_empty() {\n\n return Ok(totals)\n\n }\n\n let mut it = records.iter();\n\n let head = it.next().unwrap(); // TODO: handle empty records\n\n let mut curr_id: GuardID;\n\n let mut last_asleep: Option<chrono::NaiveDateTime> = None;\n\n match head.event {\n\n Event::Begin{guard_id} => curr_id = guard_id,\n\n _ => return Err(Box::<Error>::from(format!(\"unexpected first record: {:?}\", head))),\n\n }\n\n\n\n for rec in it {\n\n match rec.event {\n\n Event::Begin{guard_id} => {\n\n if let Some(ts) = last_asleep {\n\n let dur = rec.timestamp.signed_duration_since(ts);\n\n let prev = *totals.entry(curr_id).or_insert(Duration::zero());\n", "file_path": "2018/dec04/rs/src/main.rs", "rank": 10, "score": 72356.94299589038 }, { "content": "fn exactly_23(box_id: &str) -> (bool, bool) {\n\n let mut counts = HashMap::new();\n\n for c in box_id.chars() {\n\n *counts.entry(c).or_insert(0) += 1;\n\n }\n\n let (mut exactly_2, mut exactly_3) = (false, false);\n\n for count in counts.values() {\n\n match count {\n\n 2 => exactly_2 = true,\n\n 3 => exactly_3 = true,\n\n _ => {},\n\n }\n\n if exactly_2 && exactly_3 {\n\n break\n\n }\n\n }\n\n (exactly_2, exactly_3)\n\n}\n\n\n", "file_path": "2018/dec02/rs/src/main.rs", "rank": 11, "score": 70790.32441914076 }, { "content": "fn are_correct(box_id1: &str, box_id2: &str) -> Option<String> {\n\n let mut common = Vec::new();\n\n let mut diff = false;\n\n for (c1, c2) in box_id1.chars().zip(box_id2.chars()) {\n\n if c1 != c2 {\n\n if diff {\n\n return None\n\n }\n\n diff = true;\n\n } else {\n\n common.push(c1.to_string());\n\n }\n\n }\n\n println!(\"{:?}\", common);\n\n Some(common.join(\"\"))\n\n}\n\n\n", "file_path": "2018/dec02/rs/src/main.rs", "rank": 12, "score": 59153.29606850951 }, { "content": "#[derive(Debug)]\n\nstruct Record {\n\n timestamp: chrono::NaiveDateTime,\n\n event: Event,\n\n}\n\n\n", "file_path": "2018/dec04/rs/src/main.rs", "rank": 13, "score": 39800.93393531142 }, { "content": "fn main() {\n\n let matches = App::new(\"dec01\")\n\n .arg(Arg::with_name(\"input\").required(true))\n\n .arg(Arg::with_name(\"part\").long(\"part\").required(true)\n\n .takes_value(true)\n\n .possible_values(&[\"1\", \"2\"]))\n\n .get_matches();\n\n\n\n let input = matches.value_of(\"input\").unwrap();\n\n match matches.value_of(\"part\").unwrap() {\n\n \"1\" => println!(\"{}\", part1(input).unwrap()),\n\n \"2\" => println!(\"{}\", part2(input).unwrap()),\n\n _ => unreachable!(),\n\n }\n\n}\n", "file_path": "2018/dec01/rs/src/main.rs", "rank": 15, "score": 30478.202938502072 }, { "content": "fn main() {\n\n let matches = App::new(\"dec04\")\n\n .arg(Arg::with_name(\"input\").required(true))\n\n .arg(Arg::with_name(\"part\").long(\"part\").required(true)\n\n .takes_value(true)\n\n .possible_values(&[\"1\", \"2\"]))\n\n .get_matches();\n\n\n\n let input = matches.value_of(\"input\").unwrap();\n\n match matches.value_of(\"part\").unwrap() {\n\n \"1\" => part1(input).unwrap(),\n\n // \"2\" => part2(input).unwrap(),\n\n _ => unreachable!(),\n\n }\n\n}\n", "file_path": "2018/dec04/rs/src/main.rs", "rank": 16, "score": 30478.202938502072 }, { "content": "fn main() {\n\n let matches = App::new(\"dec01\")\n\n .arg(Arg::with_name(\"input\").required(true))\n\n .arg(Arg::with_name(\"part\").long(\"part\").required(true)\n\n .takes_value(true)\n\n .possible_values(&[\"1\", \"2\"]))\n\n .get_matches();\n\n\n\n let input = matches.value_of(\"input\").unwrap();\n\n match matches.value_of(\"part\").unwrap() {\n\n \"1\" => println!(\"{}\", part1(input).unwrap()),\n\n \"2\" => println!(\"{}\", part2(input).unwrap()),\n\n _ => unreachable!(),\n\n }\n\n}\n", "file_path": "2018/dec02/rs/src/main.rs", "rank": 17, "score": 30478.202938502072 }, { "content": "extern crate clap;\n\nextern crate chrono;\n\n\n\n#[macro_use]\n\nextern crate lazy_static;\n\nextern crate regex;\n\n\n\nuse std::error::Error;\n\nuse std::fs::File;\n\nuse std::io::{BufReader, BufRead};\n\nuse clap::{Arg, App};\n\nuse chrono::{Duration, NaiveDateTime};\n\nuse std::str::FromStr;\n\nuse regex::Regex;\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Debug)]\n", "file_path": "2018/dec04/rs/src/main.rs", "rank": 22, "score": 11.634699972040144 }, { "content": "extern crate clap;\n\n\n\nuse std::error::Error;\n\nuse std::fs::File;\n\nuse std::io::{BufReader, BufRead};\n\nuse clap::{Arg, App};\n\nuse std::collections::HashMap;\n\n\n", "file_path": "2018/dec02/rs/src/main.rs", "rank": 23, "score": 8.104900984869873 }, { "content": "extern crate clap;\n\n\n\nuse std::error::Error;\n\nuse std::fs::File;\n\nuse std::io::{BufReader, BufRead};\n\nuse clap::{Arg, App};\n\nuse std::collections::HashSet;\n\n\n\n\n", "file_path": "2018/dec01/rs/src/main.rs", "rank": 24, "score": 8.104900984869873 }, { "content": " None => return Err(Box::<Error>::from(format!(\"failed to parse Record from: {:?}\", s))),\n\n Some(caps) => caps,\n\n };\n\n let ts = NaiveDateTime::parse_from_str(&caps[\"timestamp\"], \"%Y-%m-%d %H:%M\")?;\n\n let event: Event =\n\n if let Some(id) = caps.name(\"id\") {\n\n Event::Begin{guard_id: id.as_str().parse()?}\n\n } else if let Some(repose) = caps.name(\"repose\") {\n\n match repose.as_str() {\n\n \"falls asleep\" => Event::FallsAsleep,\n\n \"wakes up\" => Event::WakesUp,\n\n _ => unreachable!(\"oh no!\"),\n\n }\n\n } else {\n\n return Err(Box::<Error>::from(format!(\"failed to parse Event from: {:?}\", s)))\n\n };\n\n\n\n Ok(Record{\n\n timestamp: ts,\n\n event: event,\n\n })\n\n }\n\n}\n\n\n", "file_path": "2018/dec04/rs/src/main.rs", "rank": 25, "score": 6.260674112181578 }, { "content": " totals.insert(curr_id, prev + dur);\n\n };\n\n curr_id = guard_id;\n\n },\n\n Event::FallsAsleep => last_asleep = Some(rec.timestamp),\n\n Event::WakesUp => {\n\n if let Some(ts) = last_asleep {\n\n let dur = rec.timestamp.signed_duration_since(ts);\n\n let prev = *totals.entry(curr_id).or_insert(Duration::zero());\n\n totals.insert(curr_id, prev + dur);\n\n };\n\n last_asleep = None;\n\n }\n\n }\n\n }\n\n Ok(totals)\n\n}\n\n\n", "file_path": "2018/dec04/rs/src/main.rs", "rank": 26, "score": 5.296420354602602 } ]
Rust
src/post.rs
Lonami/pagong
bb5b24c733c435e8c041e5a91112e3ae5ad8cacc
use crate::config::{ Config, DATE_FMT, META_KEY_CATEGORY, META_KEY_CREATION_DATE, META_KEY_MODIFIED_DATE, META_KEY_TAGS, META_KEY_TEMPLATE, META_KEY_TITLE, META_TAG_SEPARATOR, META_VALUE_SEPARATOR, SOURCE_META_KEY, }; use pulldown_cmark::{CodeBlockKind, Event, Parser, Tag}; use std::collections::HashMap; use std::fs; use std::io; use std::path::{Path, PathBuf}; use std::time::UNIX_EPOCH; use chrono::offset::Local; use chrono::{Date, NaiveDate, NaiveDateTime, TimeZone}; const ZWNBSP: &str = "\u{FEFF}"; #[derive(Debug, Clone)] pub struct Post { pub path: PathBuf, pub markdown: String, pub meta: HashMap<String, String>, pub title: String, pub date: Date<Local>, pub updated: Date<Local>, pub category: String, pub tags: Vec<String>, pub template: Option<PathBuf>, pub uri: String, pub toc: Vec<(String, u8)>, } impl Post { pub fn new(config: &Config, root: &Path, path: PathBuf) -> io::Result<Self> { let mut markdown = fs::read_to_string(&path)?.replace(ZWNBSP, ""); let mut meta = HashMap::new(); if let Some((Event::Start(Tag::CodeBlock(CodeBlockKind::Fenced(lang))), start_range)) = Parser::new(&markdown).into_offset_iter().next() { if lang.as_ref() == SOURCE_META_KEY { meta.extend(markdown[start_range.clone()].lines().filter_map(|line| { let mut kv = line.splitn(2, META_VALUE_SEPARATOR); kv.next() .zip(kv.next()) .map(|(k, v)| (k.trim().to_owned(), v.trim().to_owned())) })); markdown.replace_range(start_range, ""); } } let title = meta .get(META_KEY_TITLE) .cloned() .or_else(|| { let mut wait_title = false; Parser::new(&markdown).find_map(|event| { match event { Event::Start(Tag::Heading(1)) => wait_title = true, Event::Text(s) if wait_title => { return Some(s.to_string()); } _ => {} } None }) }) .unwrap_or_else(|| { path.file_name() .unwrap() .to_str() .expect("bad md file name") .to_owned() }); let metadata = fs::metadata(&path)?; let date = meta .get(META_KEY_CREATION_DATE) .and_then(|date| NaiveDate::parse_from_str(date, DATE_FMT).ok()) .or_else(|| { metadata .created() .ok() .and_then(|date| date.duration_since(UNIX_EPOCH).ok()) .map(|duration| { NaiveDateTime::from_timestamp( duration.as_secs() as i64, duration.subsec_nanos(), ) .date() }) }) .and_then(|date| Local.from_local_date(&date).latest()) .unwrap_or_else(|| Local::now().date()); let updated = meta .get(META_KEY_MODIFIED_DATE) .and_then(|date| NaiveDate::parse_from_str(date, DATE_FMT).ok()) .or_else(|| { metadata .modified() .ok() .and_then(|date| date.duration_since(UNIX_EPOCH).ok()) .map(|duration| { NaiveDateTime::from_timestamp( duration.as_secs() as i64, duration.subsec_nanos(), ) .date() }) }) .and_then(|date| Local.from_local_date(&date).latest()) .unwrap_or(date); let category = meta.get(META_KEY_CATEGORY).cloned().unwrap_or_else(|| { path.parent() .expect("post file had no parent") .file_name() .expect("post parent had no name") .to_str() .expect("post parent had non-utf8 name") .to_owned() }); let tags = meta .get(META_KEY_TAGS) .map(|tags| { tags.split(META_TAG_SEPARATOR) .map(|s| s.trim().to_owned()) .collect() }) .unwrap_or_else(Vec::new); let template = meta .get(META_KEY_TEMPLATE) .map(|s| crate::utils::get_abs_path(root, &path, s)); let uri = crate::utils::path_to_uri(root, &path.with_extension(&config.dist_ext)); let toc = { let mut toc_depth = None; Parser::new(&markdown) .filter_map(|event| { match event { Event::Start(Tag::Heading(depth)) => toc_depth = Some(depth as u8), Event::Text(s) if toc_depth.is_some() => { return Some((s.to_string(), toc_depth.take().unwrap())); } _ => {} } None }) .collect() }; Ok(Self { path, markdown, meta, title, date, updated, category, tags, template, uri, toc, }) } }
use crate::config::{ Config, DATE_FMT, META_KEY_CATEGORY, META_KEY_CREATION_DATE, META_KEY_MODIFIED_DATE, META_KEY_TAGS, META_KEY_TEMPLATE, META_KEY_TITLE, META_TAG_SEPARATOR, META_VALUE_SEPARATOR, SOURCE_META_KEY, }; use pulldown_cmark::{CodeBlockKind, Event, Parser, Tag}; use std::collections::HashMap; use std::fs; use std::io; use std::path::{Path, PathBuf}; use std::time::UNIX_EPOCH; use chrono::offset::Local; use chrono::{Date, NaiveDate, NaiveDateTime, TimeZone}; const ZWNBSP: &str = "\u{FEFF}"; #[derive(Debug, Clone)] pub struct Post { pub path: PathBuf, pub markdown: String, pub meta: HashMap<String, String>, pub title: String, pub date: Date<Local>, pub updated: Date<Local>, pub category: String, pub tags: Vec<String>, pub template: Option<PathBuf>, pub uri: String, pub toc: Vec<(String, u8)>, } impl Post {
}
pub fn new(config: &Config, root: &Path, path: PathBuf) -> io::Result<Self> { let mut markdown = fs::read_to_string(&path)?.replace(ZWNBSP, ""); let mut meta = HashMap::new(); if let Some((Event::Start(Tag::CodeBlock(CodeBlockKind::Fenced(lang))), start_range)) = Parser::new(&markdown).into_offset_iter().next() { if lang.as_ref() == SOURCE_META_KEY { meta.extend(markdown[start_range.clone()].lines().filter_map(|line| { let mut kv = line.splitn(2, META_VALUE_SEPARATOR); kv.next() .zip(kv.next()) .map(|(k, v)| (k.trim().to_owned(), v.trim().to_owned())) })); markdown.replace_range(start_range, ""); } } let title = meta .get(META_KEY_TITLE) .cloned() .or_else(|| { let mut wait_title = false; Parser::new(&markdown).find_map(|event| { match event { Event::Start(Tag::Heading(1)) => wait_title = true, Event::Text(s) if wait_title => { return Some(s.to_string()); } _ => {} } None }) }) .unwrap_or_else(|| { path.file_name() .unwrap() .to_str() .expect("bad md file name") .to_owned() }); let metadata = fs::metadata(&path)?; let date = meta .get(META_KEY_CREATION_DATE) .and_then(|date| NaiveDate::parse_from_str(date, DATE_FMT).ok()) .or_else(|| { metadata .created() .ok() .and_then(|date| date.duration_since(UNIX_EPOCH).ok()) .map(|duration| { NaiveDateTime::from_timestamp( duration.as_secs() as i64, duration.subsec_nanos(), ) .date() }) }) .and_then(|date| Local.from_local_date(&date).latest()) .unwrap_or_else(|| Local::now().date()); let updated = meta .get(META_KEY_MODIFIED_DATE) .and_then(|date| NaiveDate::parse_from_str(date, DATE_FMT).ok()) .or_else(|| { metadata .modified() .ok() .and_then(|date| date.duration_since(UNIX_EPOCH).ok()) .map(|duration| { NaiveDateTime::from_timestamp( duration.as_secs() as i64, duration.subsec_nanos(), ) .date() }) }) .and_then(|date| Local.from_local_date(&date).latest()) .unwrap_or(date); let category = meta.get(META_KEY_CATEGORY).cloned().unwrap_or_else(|| { path.parent() .expect("post file had no parent") .file_name() .expect("post parent had no name") .to_str() .expect("post parent had non-utf8 name") .to_owned() }); let tags = meta .get(META_KEY_TAGS) .map(|tags| { tags.split(META_TAG_SEPARATOR) .map(|s| s.trim().to_owned()) .collect() }) .unwrap_or_else(Vec::new); let template = meta .get(META_KEY_TEMPLATE) .map(|s| crate::utils::get_abs_path(root, &path, s)); let uri = crate::utils::path_to_uri(root, &path.with_extension(&config.dist_ext)); let toc = { let mut toc_depth = None; Parser::new(&markdown) .filter_map(|event| { match event { Event::Start(Tag::Heading(depth)) => toc_depth = Some(depth as u8), Event::Text(s) if toc_depth.is_some() => { return Some((s.to_string(), toc_depth.take().unwrap())); } _ => {} } None }) .collect() }; Ok(Self { path, markdown, meta, title, date, updated, category, tags, template, uri, toc, }) }
function_block-full_function
[ { "content": "pub fn get_relative_uri(relative_to: &str, uri: &str) -> String {\n\n let relative_to = relative_to.as_bytes();\n\n let uri = uri.as_bytes();\n\n\n\n let mut count_after = relative_to.len();\n\n let mut last_shared_slash = 0;\n\n for i in 0..relative_to.len().max(uri.len()) {\n\n if relative_to.get(i) != uri.get(i) {\n\n count_after = i;\n\n break;\n\n } else if let Some(b'/') = uri.get(i) {\n\n last_shared_slash = i;\n\n }\n\n }\n\n\n\n let up_count = relative_to\n\n .iter()\n\n .skip(count_after)\n\n .filter(|c| **c == b'/')\n\n .count();\n\n\n\n let mut result = String::new();\n\n (0..up_count).for_each(|_| result.push_str(\"../\"));\n\n uri[last_shared_slash + 1..]\n\n .iter()\n\n .for_each(|c| result.push(*c as _));\n\n result\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 0, "score": 122915.22711122791 }, { "content": "pub fn path_to_uri(root: &Path, path: &Path) -> String {\n\n replace_root(\n\n &root.to_str().unwrap().to_owned(),\n\n &std::path::MAIN_SEPARATOR.to_string(),\n\n &path.to_str().unwrap().to_owned(),\n\n )\n\n .to_str()\n\n .unwrap()\n\n .replace(std::path::MAIN_SEPARATOR, \"/\")\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 1, "score": 116726.79064968269 }, { "content": "/// Replace's `path`'s `source` root with `destination`. Panics if `path` does not start with `source`.\n\n///\n\n/// Rust's path (and `OsString`) manipulation is pretty lacking, so the method falls back to `String`.\n\npub fn replace_root(source: &str, destination: &str, path: &str) -> PathBuf {\n\n assert!(path.starts_with(source));\n\n let rel = &path[(source.len() + 1).min(path.len())..]; // +1 to skip path separator\n\n let mut dir = PathBuf::from(&destination);\n\n dir.push(rel);\n\n dir\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 2, "score": 105447.94205427784 }, { "content": "/// Get the absolute path out of value given the root and the path of the file being processed.\n\npub fn get_abs_path(root: &Path, path: &Path, value: &str) -> PathBuf {\n\n if let Some(absolute) = value.strip_prefix('/') {\n\n let mut p = root.to_path_buf();\n\n p.push(absolute);\n\n p\n\n } else {\n\n let mut p = path.parent().unwrap().to_owned();\n\n p.push(value);\n\n p\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 3, "score": 101820.84345607067 }, { "content": "pub fn fill_atom_feed(feed: &Meta, md_files: &[Post]) -> String {\n\n let parent = feed.path.parent().unwrap();\n\n\n\n let mut entries = Vec::new();\n\n let mut last_updated = None;\n\n\n\n for md in md_files {\n\n if md.path.starts_with(parent) {\n\n if let Some(updated) = last_updated {\n\n last_updated = Some(md.updated.max(updated));\n\n } else {\n\n last_updated = Some(md.updated);\n\n }\n\n\n\n entries.push(atom::Entry {\n\n title: md.title.clone().into(),\n\n id: {\n\n let mut s = feed.link.clone();\n\n s.push_str(&md.uri);\n\n s\n", "file_path": "src/feed.rs", "rank": 4, "score": 101140.94766442249 }, { "content": "/// Parses the next value in the given string. `value` is left at the next value. Parsed value is returned.\n\npub fn parse_next_value(string: &mut &str) -> Option<String> {\n\n let bytes = string.as_bytes();\n\n\n\n let mut offset = 0;\n\n while offset < bytes.len() {\n\n if bytes[offset].is_ascii_whitespace() {\n\n offset += 1;\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n if offset == bytes.len() {\n\n *string = &string[offset..];\n\n return None;\n\n }\n\n\n\n let (value, end_offset) = if bytes[offset] == b'\"' {\n\n let mut value = Vec::with_capacity(bytes.len() - offset);\n\n let mut escape = false;\n", "file_path": "src/utils.rs", "rank": 5, "score": 88351.56401905691 }, { "content": "pub fn load_atom_feed(path: &Path) -> quick_xml::Result<Meta> {\n\n let mut reader = Reader::from_file(path)?;\n\n let mut buffer = Vec::new();\n\n let mut state = State::Feed;\n\n\n\n let mut title = None;\n\n let mut link = None;\n\n let mut lang = None;\n\n let mut generator = None;\n\n let mut generator_uri = None;\n\n\n\n loop {\n\n buffer.clear();\n\n let event = reader.read_event(&mut buffer)?;\n\n state = match state {\n\n State::Feed => {\n\n let e = match_or_continue!(Start(event) if event.name() == b\"feed\");\n\n for attr in e.attributes() {\n\n let attr = attr?;\n\n if attr.key == b\"xml:lang\" {\n", "file_path": "src/feed.rs", "rank": 6, "score": 87424.80855018292 }, { "content": "pub fn generate_heading_id(heading: &str) -> String {\n\n let lowercase = heading.to_lowercase();\n\n let mut result = String::with_capacity(lowercase.len());\n\n let mut first = true;\n\n for word in lowercase.split_whitespace() {\n\n if first {\n\n first = false;\n\n } else {\n\n result.push('-');\n\n }\n\n result.push_str(word);\n\n }\n\n result\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n mod parse_value {\n", "file_path": "src/utils.rs", "rank": 7, "score": 84703.154221921 }, { "content": "/// Scan a directory containing a blog made up of markdown files, templates and assets.\n\npub fn scan_dir(config: &Config, root: PathBuf) -> io::Result<Scan> {\n\n let mut dirs_to_create = Vec::new();\n\n let mut css_files = Vec::new();\n\n let mut atom_files = Vec::new();\n\n let mut files_to_copy = Vec::new();\n\n let mut md_files = Vec::new();\n\n let mut templates = HashSet::new();\n\n\n\n let mut pending = vec![root.clone()];\n\n while let Some(src) = pending.pop() {\n\n for entry in fs::read_dir(src)? {\n\n let entry = entry?;\n\n\n\n if entry.file_type()?.is_dir() {\n\n pending.push(entry.path());\n\n // Detects all directories that need to be created.\n\n dirs_to_create.push(entry.path());\n\n } else {\n\n let filename = entry.file_name();\n\n let filename = filename.to_str().expect(\"bad filename\");\n", "file_path": "src/blog.rs", "rank": 8, "score": 81645.61028134663 }, { "content": "/// Generate a blog from a previous `Scan`, turning all source files into HTML.\n\npub fn generate_from_scan(config: &Config, scan: Scan, destination: PathBuf) -> io::Result<()> {\n\n if !destination.is_dir() {\n\n fs::create_dir(&destination)?;\n\n }\n\n\n\n let source = scan\n\n .root\n\n .clone()\n\n .into_os_string()\n\n .into_string()\n\n .expect(\"bad source path\");\n\n\n\n let destination = destination\n\n .into_os_string()\n\n .into_string()\n\n .expect(\"bad destination path\");\n\n\n\n // Creates all directories that need creating.\n\n for dir in scan.dirs_to_create.iter() {\n\n // Replace dir's prefix (source) with destination.\n", "file_path": "src/blog.rs", "rank": 9, "score": 78473.20915504475 }, { "content": "#[derive(Clone)]\n\nstruct Replacement {\n\n range: Range<usize>,\n\n rule: PreprocessorRule,\n\n}\n\n\n\npub struct HtmlTemplate {\n\n html: String,\n\n replacements: Vec<Replacement>,\n\n}\n\n\n\nimpl MetaKey {\n\n fn new(value: String) -> Self {\n\n if value == META_KEY_TITLE {\n\n Self::Title\n\n } else if value == META_KEY_CREATION_DATE {\n\n Self::CreationDate\n\n } else if value == META_KEY_MODIFIED_DATE {\n\n Self::ModifiedDate\n\n } else if value == META_KEY_CATEGORY {\n\n Self::Category\n", "file_path": "src/template.rs", "rank": 10, "score": 64161.34456553216 }, { "content": "pub fn parse_cli_args() -> io::Result<Config> {\n\n let config = App::new(\"pagong\")\n\n .version(\"0.1.1\")\n\n .author(\"expectocode <[email protected]>, Lonami Exo <[email protected]>\")\n\n .about(\"A static site generator for slow connections\")\n\n .arg(Arg::with_name(\"root\")\n\n .value_name(\"SOURCE ROOT\")\n\n .help(\"Sets the root directory where the program should run [default: current directory]\"))\n\n .arg(Arg::with_name(\"template\")\n\n .value_name(\"TEMPLATE\")\n\n .short(\"t\")\n\n .long(\"default-template\")\n\n .help(\"Sets the default HTML template for the source Markdown files [default: basic embedded template]\"))\n\n .arg(Arg::with_name(\"dist_ext\")\n\n .value_name(\"EXT\")\n\n .short(\"e\")\n\n .long(\"generated-extension\")\n\n .help(\"Sets the file extension for the converted Markdown files\")\n\n .default_value(\"html\"))\n\n .arg(Arg::with_name(\"feed_ext\")\n", "file_path": "src/config.rs", "rank": 11, "score": 63227.220658266204 }, { "content": "#[derive(Clone)]\n\nenum MetaKey {\n\n Title,\n\n CreationDate,\n\n ModifiedDate,\n\n Category,\n\n Tags,\n\n Template,\n\n Meta(String),\n\n}\n\n\n", "file_path": "src/template.rs", "rank": 12, "score": 42681.271773494664 }, { "content": "pub trait AdaptorExt<'a>\n\nwhere\n\n Self: Sized + Iterator<Item = md::Event<'a>>,\n\n{\n\n fn hyperlink_headings(self) -> HyperlinkHeadings<'a, Self> {\n\n HyperlinkHeadings {\n\n head: None,\n\n iter: self,\n\n generated_ids: HashSet::new(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, I> AdaptorExt<'a> for I where I: Iterator<Item = md::Event<'a>> {}\n\n\n\npub struct HyperlinkHeadings<'a, I>\n\nwhere\n\n I: Iterator<Item = md::Event<'a>>,\n\n{\n\n head: Option<md::Event<'a>>,\n", "file_path": "src/adaptor.rs", "rank": 13, "score": 31800.84315487594 }, { "content": "use crate::HtmlTemplate;\n\n\n\nuse clap::{arg_enum, value_t, App, Arg};\n\nuse std::env;\n\nuse std::io;\n\nuse std::path::PathBuf;\n\n\n\n// Program defaults.\n\npub const SOURCE_PATH: &str = \"content\";\n\npub const TARGET_PATH: &str = \"dist\";\n\n\n\n// Source file metadata.\n\npub const SOURCE_META_KEY: &str = \"meta\";\n\npub const DATE_FMT: &str = \"%F\";\n\npub const META_KEY_TITLE: &str = \"title\";\n\npub const META_KEY_CREATION_DATE: &str = \"date\";\n\npub const META_KEY_MODIFIED_DATE: &str = \"updated\";\n\npub const META_KEY_CATEGORY: &str = \"category\";\n\npub const META_KEY_TAGS: &str = \"tags\";\n\npub const META_KEY_TEMPLATE: &str = \"template\";\n", "file_path": "src/config.rs", "rank": 14, "score": 24377.156603497428 }, { "content": "pub const META_VALUE_SEPARATOR: &str = \"=\";\n\npub const META_TAG_SEPARATOR: &str = \",\";\n\n\n\n// Template defaults.\n\npub const DEFAULT_HTML_TEMPLATE: &str = std::include_str!(\"../template.html\");\n\npub const TEMPLATE_OPEN_MARKER: &str = \"<!--P/\";\n\npub const TEMPLATE_CLOSE_MARKER: &str = \"/P-->\";\n\npub const INCLUDE_RAW_EXTENSIONS: [&str; 4] = [\"html\", \"htm\", \"xhtml\", \"xht\"];\n\npub const DEFAULT_MINIFY_LEVEL: &str = \"yes\";\n\n\n\n// Blog options.\n\npub const SOURCE_FILE_EXT: &str = \"md\";\n\npub const DIST_FILE_EXT: &str = \"html\";\n\npub const STYLE_FILE_EXT: &str = \"css\";\n\npub const FEED_FILE_EXT: &str = \"atom\";\n\n\n\n// Feed defaults.\n\npub const FEED_CONTENT_TYPE: &str = \"html\";\n\npub const FEED_REL: &str = \"self\";\n\npub const FEED_TYPE: &str = \"application/atom+xml\";\n", "file_path": "src/config.rs", "rank": 15, "score": 24363.163766995076 }, { "content": "\n\narg_enum! {\n\n #[derive(PartialEq, Debug)]\n\n #[allow(non_camel_case_types)]\n\n pub enum Minify {\n\n no,\n\n yes,\n\n full\n\n }\n\n}\n\n\n\npub struct Config {\n\n pub root: PathBuf,\n\n pub template: HtmlTemplate,\n\n pub dist_ext: String,\n\n pub feed_ext: String,\n\n pub minify: Minify,\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 16, "score": 24362.64554227449 }, { "content": " a context object (in `ctx`), the replacement type string (in `ty`), the options \\\n\n object corresponding to this replacement type (in `options`, if any), and a value \\\n\n (the things you're supposed to use to fill this replacement, in `value`).\"\n\n )\n\n .last(true))\n\n .get_matches();\n\n\n\n let root = match config.value_of(\"root\") {\n\n Some(path) => path.into(),\n\n None => env::current_dir()?,\n\n };\n\n\n\n let template = match config.value_of(\"template\") {\n\n Some(path) => HtmlTemplate::from_file(path)?,\n\n None => HtmlTemplate::from_string(DEFAULT_HTML_TEMPLATE.to_string()),\n\n };\n\n\n\n let dist_ext = match config.value_of(\"dist_ext\") {\n\n Some(ext) => ext.to_string(),\n\n None => DIST_FILE_EXT.to_string(),\n", "file_path": "src/config.rs", "rank": 17, "score": 24362.645371878083 }, { "content": " };\n\n\n\n let feed_ext = match config.value_of(\"feed_ext\") {\n\n Some(ext) => ext.to_string(),\n\n None => FEED_FILE_EXT.to_string(),\n\n };\n\n\n\n let minify = value_t!(config, \"minify\", Minify).unwrap_or_else(|e| e.exit());\n\n\n\n Ok(Config {\n\n root,\n\n template,\n\n dist_ext,\n\n feed_ext,\n\n minify,\n\n })\n\n}\n", "file_path": "src/config.rs", "rank": 18, "score": 24357.996986874372 }, { "content": " .value_name(\"EXT\")\n\n .short(\"a\")\n\n .long(\"feed-extension\")\n\n .help(\"Sets the file extension used for the Atom feed files\")\n\n .default_value(\"atom\"))\n\n .arg(Arg::with_name(\"minify\")\n\n .value_name(\"MIN\")\n\n .short(\"m\")\n\n .long(\"minify\")\n\n .help(\"Configures the minification level (recommended for certain HTML elements)\")\n\n .possible_values(&Minify::variants())\n\n .case_insensitive(true)\n\n .default_value(DEFAULT_MINIFY_LEVEL))\n\n .arg(Arg::with_name(\"processor\")\n\n .value_name(\"PROCESSOR\")\n\n .help(\"Configures a program that will be used to additionally process the template replacements.\")\n\n .long_help(\n\n \"Configures the program that will be used to additionall process the template \\\n\n replacements. This program will be executed with any parameters you specify.\\n\\n\\\n\n It will receive one JSON object per line via its standard input, consisting of \\\n", "file_path": "src/config.rs", "rank": 19, "score": 24354.660252877333 }, { "content": "use crate::config::{\n\n INCLUDE_RAW_EXTENSIONS, META_KEY_CATEGORY, META_KEY_CREATION_DATE, META_KEY_MODIFIED_DATE,\n\n META_KEY_TAGS, META_KEY_TEMPLATE, META_KEY_TITLE, TEMPLATE_CLOSE_MARKER, TEMPLATE_OPEN_MARKER,\n\n};\n\nuse crate::{utils, AdaptorExt as _, Post};\n\n\n\nuse pulldown_cmark::{self as md, Parser};\n\nuse std::cmp::Ordering;\n\nuse std::fs;\n\nuse std::io;\n\nuse std::ops::Range;\n\nuse std::path::Path;\n\n\n\nconst RULE_CONTENTS: &str = \"CONTENTS\";\n\nconst RULE_CSS: &str = \"CSS\";\n\nconst RULE_TOC: &str = \"TOC\";\n\nconst RULE_LIST: &str = \"LIST\";\n\nconst RULE_META: &str = \"META\";\n\nconst RULE_INCLUDE: &str = \"INCLUDE\";\n\n\n\n#[derive(Clone)]\n", "file_path": "src/template.rs", "rank": 30, "score": 23552.91185426821 }, { "content": " MetaKey::Tags => a.tags.cmp(&b.tags),\n\n MetaKey::Template => a.template.cmp(&b.template),\n\n MetaKey::Meta(key) => a.meta.get(key).cmp(&b.meta.get(key)),\n\n };\n\n\n\n if asc {\n\n ordering\n\n } else {\n\n ordering.reverse()\n\n }\n\n });\n\n files = sorted_files.as_slice();\n\n }\n\n\n\n let mut res = String::new();\n\n res.push_str(\"<ul>\");\n\n 'files: for file in files {\n\n if file.path.starts_with(&path) {\n\n let rel = utils::get_relative_uri(&md.uri, &file.uri);\n\n if let Some(depth) = max_depth {\n", "file_path": "src/template.rs", "rank": 31, "score": 23543.332639486973 }, { "content": " res\n\n }\n\n PreprocessorRule::Listing {\n\n path,\n\n sort_by,\n\n max_depth,\n\n skip,\n\n } => {\n\n let path = utils::get_abs_path(root, &md.path, &path);\n\n\n\n let mut sorted_files;\n\n let mut files = files;\n\n if let Some((key, asc)) = sort_by {\n\n sorted_files = files.to_vec();\n\n sorted_files.sort_by(|a, b| {\n\n let ordering = match &key {\n\n MetaKey::Title => a.title.cmp(&b.title),\n\n MetaKey::CreationDate => a.date.cmp(&b.date),\n\n MetaKey::ModifiedDate => a.updated.cmp(&b.updated),\n\n MetaKey::Category => a.category.cmp(&b.category),\n", "file_path": "src/template.rs", "rank": 32, "score": 23543.232399454304 }, { "content": " } else if value == META_KEY_TAGS {\n\n Self::Tags\n\n } else if value == META_KEY_TEMPLATE {\n\n Self::Template\n\n } else {\n\n Self::Meta(value)\n\n }\n\n }\n\n}\n\n\n\nimpl PreprocessorRule {\n\n fn new(mut string: &str) -> Option<Self> {\n\n let parsing = &mut string;\n\n let rule = utils::parse_next_value(parsing)?;\n\n Some(match rule.as_str() {\n\n RULE_CONTENTS => PreprocessorRule::Contents,\n\n RULE_CSS => PreprocessorRule::Css,\n\n RULE_TOC => {\n\n let depth = match utils::parse_next_value(parsing) {\n\n Some(value) => match value.parse() {\n", "file_path": "src/template.rs", "rank": 33, "score": 23541.7775183893 }, { "content": " eprint!(\n\n \"note: could not understand preprocessor rule {}: {:?}\",\n\n rule, path\n\n );\n\n }\n\n }\n\n\n\n offset = rule_end + TEMPLATE_CLOSE_MARKER.len();\n\n }\n\n Self { html, replacements }\n\n }\n\n\n\n pub fn apply(\n\n &self,\n\n root: &Path,\n\n md: &Post,\n\n files: &[Post],\n\n css_files: &[String],\n\n ) -> io::Result<String> {\n\n let mut html = self.html.clone();\n", "file_path": "src/template.rs", "rank": 34, "score": 23540.636485801395 }, { "content": " PreprocessorRule::Include { path }\n\n }\n\n _ => return None,\n\n })\n\n }\n\n}\n\n\n\nimpl HtmlTemplate {\n\n pub fn from_file<P: AsRef<Path>>(path: P) -> io::Result<Self> {\n\n let html = fs::read_to_string(path.as_ref())?;\n\n Ok(Self::new(html, Some(path.as_ref())))\n\n }\n\n\n\n pub fn from_string(html: String) -> Self {\n\n Self::new(html, None)\n\n }\n\n\n\n fn new(html: String, path: Option<&Path>) -> Self {\n\n let mut replacements = Vec::new();\n\n let mut offset = 0;\n", "file_path": "src/template.rs", "rank": 35, "score": 23538.86597097887 }, { "content": " }\n\n PreprocessorRule::Meta { key } => {\n\n md.meta.get(&key).cloned().unwrap_or_else(String::new)\n\n }\n\n PreprocessorRule::Include { path } => {\n\n let path = utils::get_abs_path(root, &md.path, &path);\n\n\n\n match fs::read_to_string(&path) {\n\n Ok(s) => {\n\n if INCLUDE_RAW_EXTENSIONS.contains(\n\n &path\n\n .extension()\n\n .unwrap()\n\n .to_ascii_lowercase()\n\n .to_str()\n\n .unwrap(),\n\n ) {\n\n s\n\n } else {\n\n // Add a fourth to the capacity to leave some room for the escapes.\n", "file_path": "src/template.rs", "rank": 36, "score": 23538.338617169713 }, { "content": " let mut replacements = self.replacements.clone();\n\n replacements.sort_by_key(|r| r.range.start);\n\n\n\n for replacement in replacements.into_iter().rev() {\n\n let value = match replacement.rule {\n\n PreprocessorRule::Contents => {\n\n let mut res = String::new();\n\n pulldown_cmark::html::push_html(\n\n &mut res,\n\n Parser::new_ext(&md.markdown, md::Options::all()).hyperlink_headings(),\n\n );\n\n res\n\n }\n\n PreprocessorRule::Css => {\n\n let mut res = String::new();\n\n for css in css_files {\n\n let parent = &css[..css.rfind('/').unwrap()];\n\n if md.uri.starts_with(parent) {\n\n res.push_str(r#\"<link rel=\"stylesheet\" type=\"text/css\" href=\"\"#);\n\n res.push_str(&utils::get_relative_uri(&md.uri, css));\n", "file_path": "src/template.rs", "rank": 37, "score": 23537.63140290874 }, { "content": " if rel.matches('/').count() >= depth {\n\n continue 'files;\n\n }\n\n }\n\n\n\n for uri in skip.iter() {\n\n if rel.starts_with(uri) {\n\n continue 'files;\n\n }\n\n }\n\n\n\n res.push_str(\"<li><a href=\\\"\");\n\n res.push_str(&rel);\n\n res.push_str(\"\\\">\");\n\n res.push_str(&file.title);\n\n res.push_str(\"</a></li>\");\n\n }\n\n }\n\n res.push_str(\"</ul>\");\n\n res\n", "file_path": "src/template.rs", "rank": 38, "score": 23535.35652424866 }, { "content": " Some(uri) => skip.push(uri),\n\n None => eprintln!(\"note: skip requires a relative uri\"),\n\n },\n\n _ => eprintln!(\"note: unrecognized list argument: {}\", arg),\n\n }\n\n }\n\n\n\n PreprocessorRule::Listing {\n\n path,\n\n sort_by,\n\n max_depth,\n\n skip,\n\n }\n\n }\n\n RULE_META => {\n\n let key = utils::parse_next_value(parsing)?;\n\n PreprocessorRule::Meta { key }\n\n }\n\n RULE_INCLUDE => {\n\n let path = utils::parse_next_value(parsing)?;\n", "file_path": "src/template.rs", "rank": 39, "score": 23534.948933363754 }, { "content": " Ok(depth) => depth,\n\n Err(_) => {\n\n eprintln!(\"note: could not parse depth as a number: {}\", string);\n\n u8::MAX\n\n }\n\n },\n\n None => u8::MAX,\n\n };\n\n PreprocessorRule::Toc { depth }\n\n }\n\n RULE_LIST => {\n\n let path = utils::parse_next_value(parsing)?;\n\n\n\n let mut sort_by = None;\n\n let mut max_depth = None;\n\n let mut skip = Vec::new();\n\n while let Some(arg) = utils::parse_next_value(parsing) {\n\n match arg.as_ref() {\n\n \"sort\" => {\n\n match (\n", "file_path": "src/template.rs", "rank": 40, "score": 23534.75681426029 }, { "content": " res.push_str(\"\\\">\");\n\n }\n\n }\n\n res\n\n }\n\n PreprocessorRule::Toc { depth: max_depth } => {\n\n let mut res = String::new();\n\n let mut cur_depth = 0;\n\n for (heading, depth) in md.toc.iter() {\n\n let depth = *depth;\n\n if depth > max_depth {\n\n continue;\n\n }\n\n\n\n match cur_depth.cmp(&depth) {\n\n Ordering::Less => {\n\n while cur_depth != depth {\n\n res.push_str(\"<ul>\");\n\n cur_depth += 1;\n\n }\n", "file_path": "src/template.rs", "rank": 41, "score": 23534.15941879426 }, { "content": " while let Some(index) = html[offset..].find(TEMPLATE_OPEN_MARKER) {\n\n let rule_start = offset + index + TEMPLATE_OPEN_MARKER.len();\n\n let rule_end = match html[rule_start..].find(TEMPLATE_CLOSE_MARKER) {\n\n Some(i) => rule_start + i,\n\n None => {\n\n eprintln!(\n\n \"note: html template without close marker after byte offset {}: {:?}\",\n\n rule_start, path\n\n );\n\n break;\n\n }\n\n };\n\n\n\n let rule = &html[rule_start..rule_end];\n\n match PreprocessorRule::new(rule) {\n\n Some(rule) => replacements.push(Replacement {\n\n range: (offset + index)..(rule_end + TEMPLATE_CLOSE_MARKER.len()),\n\n rule,\n\n }),\n\n None => {\n", "file_path": "src/template.rs", "rank": 42, "score": 23531.709830005108 }, { "content": " // This is merely a best-effort guess to avoid re-allocating.\n\n let mut escaped = String::with_capacity(s.len() + s.len() / 4);\n\n md::escape::escape_html(&mut escaped, &s).unwrap();\n\n escaped\n\n }\n\n }\n\n Err(_) => {\n\n eprintln!(\"note: failed to include {:?}\", path);\n\n continue;\n\n }\n\n }\n\n }\n\n };\n\n\n\n html.replace_range(replacement.range, &value);\n\n }\n\n\n\n Ok(html)\n\n }\n\n}\n", "file_path": "src/template.rs", "rank": 43, "score": 23531.03234120917 }, { "content": " }\n\n Ordering::Greater => {\n\n while cur_depth != depth {\n\n res.push_str(\"</ul>\");\n\n cur_depth -= 1;\n\n }\n\n }\n\n _ => {}\n\n }\n\n\n\n res.push_str(\"<li>\");\n\n res.push_str(heading);\n\n res.push_str(\"</li>\");\n\n }\n\n\n\n while cur_depth != 0 {\n\n res.push_str(\"</ul>\");\n\n cur_depth -= 1;\n\n }\n\n\n", "file_path": "src/template.rs", "rank": 44, "score": 23531.02475479361 }, { "content": " utils::parse_next_value(parsing),\n\n utils::parse_next_value(parsing),\n\n ) {\n\n (Some(key), Some(order)) if order == \"asc\" || order == \"desc\" => {\n\n sort_by = Some((MetaKey::new(key), order == \"asc\"));\n\n }\n\n (key, order) => eprintln!(\n\n \"note: sort requires key and asc/desc order, but got: {:?}, {:?}\",\n\n key, order\n\n ),\n\n }\n\n }\n\n \"depth\" => match utils::parse_next_value(parsing) {\n\n Some(depth) => match depth.parse() {\n\n Ok(depth) => max_depth = Some(depth),\n\n Err(e) => eprintln!(\"note: depth was not a number: {}\", e),\n\n },\n\n None => eprintln!(\"note: depth requires a number\"),\n\n },\n\n \"skip\" => match utils::parse_next_value(parsing) {\n", "file_path": "src/template.rs", "rank": 45, "score": 23528.969287347198 }, { "content": "#[derive(Clone)]\n\nenum PreprocessorRule {\n\n Contents,\n\n Css,\n\n Toc {\n\n depth: u8,\n\n },\n\n Listing {\n\n path: String,\n\n /// (meta key, ascending?)\n\n sort_by: Option<(MetaKey, bool)>,\n\n max_depth: Option<usize>,\n\n skip: Vec<String>,\n\n },\n\n Meta {\n\n key: String,\n\n },\n\n Include {\n\n path: String,\n\n },\n\n}\n\n\n", "file_path": "src/template.rs", "rank": 46, "score": 20781.30670872609 }, { "content": "### Metadata\n\n\n\nYour `.md` files may contain the following syntax at the very top:\n\n\n\n````\n\n```meta\n\nkey = value\n\n```\n\n\n\n**Markdown** content follows as usual…\n\n````\n\n\n\nThe code-block with `meta` lang must be the first markdown element in the file. The supported keys are:\n\n\n\n* `title`: Post title (e.g. \"Hello, world!\"). If not specified, the first heading in the document is considered the title. If there is no first heading, the file name is used.\n\n* `date`: Published date, `YYYY-MM-DD` (Year, Month, Day) format (e.g. \"2020-02-20\"). If not specified, the file's creation date will be used. If it cannot be fetched, the current date will be used.\n\n* `updated`: Updated date, `YYYY-MM-DD` format. If not specified, the file's modification date will be used. If it cannot be fetched, `date` will be used.\n\n* `category`: Category where the post belongs to (e.g. \"computing\"). If not specified, the parent folder name will be used (e.g. \"blog\").\n\n* `tags`: Comma-separated list of tags (e.g. \"rust, ssg\"). If not specified, an empty list of tags is produced.\n\n* `template`: Path to the HTML file to be used as the template for this file, UNIX-style path, relative wherever the current file is (e.g. \"/_blog.html\" or \"../_template.html\").\n\n\n\nAny other key will be ignored by `pagong`, but may be used for your own needs.\n\n\n\n### CSS\n\n\n\nAny `.css` file will be copied to `dist/`, and any `.md` will load all the `.css` files in the same directory or above.\n\n\n\n```\n\nmyblog/\n\n└── content/\n\n ├── index.md\n\n ├── sitewide.css\n\n └── blog/\n\n    ├── hello-world.md\n\n └── blogwide.css\n\n```\n\n\n\nThe HTML generated for `index.md` will use `sitewide.css`, and the HTML generated for `hello-world.md` will include `sitewide.css` and then `blogwide.css`.\n\n\n", "file_path": "README.md", "rank": 47, "score": 17.69487692508357 }, { "content": "use crate::config::{Config, Minify, SOURCE_FILE_EXT, STYLE_FILE_EXT};\n\nuse crate::{feed, utils, HtmlTemplate, Post};\n\n\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::fs;\n\nuse std::io;\n\nuse std::path::PathBuf;\n\n\n\npub struct Scan {\n\n /// Root path of the source directory.\n\n root: PathBuf,\n\n /// Directories to create in the destination.\n\n dirs_to_create: Vec<PathBuf>,\n\n /// Files to copy to the destination without any special treatment.\n\n files_to_copy: Vec<PathBuf>,\n\n /// URIs to the CSS files found.\n\n css_files: Vec<String>,\n\n /// HTML templates found.\n\n html_templates: HashMap<PathBuf, HtmlTemplate>,\n\n /// Markdown files to parse and generate HTML from.\n\n md_files: Vec<Post>,\n\n /// ATOM feeds to fill.\n\n atom_files: Vec<feed::Meta>,\n\n}\n\n\n\n/// Scan a directory containing a blog made up of markdown files, templates and assets.\n", "file_path": "src/blog.rs", "rank": 48, "score": 15.855367153712171 }, { "content": " },\n\n updated: md.updated.and_hms(0, 0, 0).into(),\n\n published: Some(md.date.and_hms(0, 0, 0).into()),\n\n categories: vec![atom::Category {\n\n term: md.category.clone(),\n\n ..atom::Category::default()\n\n }],\n\n content: Some(atom::Content {\n\n value: {\n\n let mut html = String::new();\n\n md::html::push_html(&mut html, md::Parser::new(&md.markdown));\n\n let mut escaped = String::new();\n\n md::escape::escape_html(&mut escaped, &html).unwrap();\n\n Some(escaped)\n\n },\n\n content_type: Some(FEED_CONTENT_TYPE.to_string()),\n\n ..atom::Content::default()\n\n }),\n\n ..atom::Entry::default()\n\n });\n", "file_path": "src/feed.rs", "rank": 49, "score": 13.298682541992001 }, { "content": "use crate::config::{FEED_CONTENT_TYPE, FEED_REL, FEED_TYPE};\n\nuse crate::Post;\n\n\n\nuse atom_syndication as atom;\n\nuse pulldown_cmark as md;\n\nuse quick_xml::events::Event;\n\nuse quick_xml::Reader;\n\nuse std::path::{Path, PathBuf};\n\n\n", "file_path": "src/feed.rs", "rank": 50, "score": 12.563341889671952 }, { "content": "mod adaptor;\n\nmod blog;\n\nmod config;\n\nmod feed;\n\nmod post;\n\nmod processor;\n\nmod template;\n\nmod utils;\n\n\n\nuse adaptor::AdaptorExt;\n\nuse post::Post;\n\nuse processor::Processor;\n\nuse template::HtmlTemplate;\n\n\n\nuse std::io;\n\n\n", "file_path": "src/main.rs", "rank": 51, "score": 11.229962412483552 }, { "content": " .path\n\n .clone()\n\n .with_extension(&config.dist_ext)\n\n .into_os_string()\n\n .into_string()\n\n .expect(\"bad md path\");\n\n let dst = utils::replace_root(&source, &destination, &src);\n\n\n\n let template = file\n\n .template\n\n .as_ref()\n\n .and_then(|t| scan.html_templates.get(t))\n\n .unwrap_or(&config.template);\n\n\n\n let html = template.apply(&scan.root, file, &scan.md_files, &scan.css_files)?;\n\n\n\n let html = match config.minify {\n\n Minify::no => html,\n\n Minify::yes | Minify::full => {\n\n let mut html = html.into_bytes();\n", "file_path": "src/blog.rs", "rank": 52, "score": 11.094903906874663 }, { "content": "### HTML\n\n\n\nAny `.html` file will be copied to `dist/` as-is, with the exception files mentioned in the metadata of any of the `.md` files. If `hello-world.md` includes `template = /templates/base.html`, then `base.html` won't be copied over as-is, and instead, it will be used as a template. You're encouraged to follow your own convention as to where to place the templates or how they should be named.\n\n\n\nHTML files used as templates offer some very minimal \"pre-processor\" rules, which are HTML comments with a few adornments:\n\n\n\n```html\n\nThis comment will tell pagong to insert the generated HTML in this spot:\n\n<!--P/ CONTENTS /P-->\n\n\n\nThis comment will tell pagong to insert references to any CSS files in this spot:\n\n<!--P/ CSS /P-->\n\n\n\nThis comment will tell pagong to automatically generate a Table of Contents for the current page (based on Markdown headings). You may optionally set the maximum depth:\n\n<!--P/ TOC /P-->\n\n<!--P/ TOC 3 /P-->\n\n\n\nThis comment will tell pagong to automatically generate a list of files in the given path (relative to the current markdown file). You may optionally sort by a meta key in either asc-ending or desc-ending order. You may optionally set a maximum depth. You may optionally skip certain relative URIs. You may combine all of these settings:\n\n<!--P/ LIST path /P-->\n\n<!--P/ LIST path sort updated desc /P-->\n\n<!--P/ LIST path depth 2 /P-->\n\n<!--P/ LIST path skip index.html skip secret.html /P-->\n\n\n\nThis comment will get replaced with whatever was put in the specified metadata key (in this example, the title):\n\n<!--P/ META title /P-->\n\n\n\nThis comment will get replaced with the contents of whatever path is specified (relative to the current markdown file). HTML files won't be escaped, but everything else will:\n\n<!--P/ INCLUDE path /P-->\n", "file_path": "README.md", "rank": 53, "score": 10.905491247492787 }, { "content": "\n\n Ok(Meta {\n\n path: path.to_path_buf(),\n\n title,\n\n link,\n\n lang,\n\n generator,\n\n generator_uri,\n\n })\n\n}\n\n\n", "file_path": "src/feed.rs", "rank": 54, "score": 10.68550569940643 }, { "content": " // Marks every file as needing a copy except for MD files.\n\n files_to_copy.push(entry.path());\n\n } else {\n\n // Parses all MD files.\n\n let md = Post::new(config, &root, entry.path())?;\n\n if let Some(template) = md.template.as_ref() {\n\n templates.insert(template.clone());\n\n }\n\n md_files.push(md);\n\n }\n\n }\n\n }\n\n }\n\n\n\n // Removes the HTML templates from the files that need copying.\n\n files_to_copy.retain(|path| !templates.contains(path));\n\n\n\n // Parse templates.\n\n let html_templates = templates\n\n .into_iter()\n", "file_path": "src/blog.rs", "rank": 55, "score": 10.638363266905952 }, { "content": " }\n\n }\n\n\n\n let mut self_link = feed.link.trim_end_matches('/').to_owned();\n\n self_link.push('/');\n\n self_link.push_str(&feed.path.file_name().unwrap().to_str().unwrap());\n\n\n\n if let Some(lang) = feed.lang.as_ref() {\n\n eprintln!(\n\n \"note: feed lang '{}' is currently ignored: see gh/atom/issues/54\",\n\n lang\n\n );\n\n }\n\n\n\n atom::Feed {\n\n title: feed.title.clone().into(),\n\n id: feed.link.clone(),\n\n updated: last_updated\n\n .map(|d| d.and_hms(0, 0, 0).into())\n\n .unwrap_or_else(|| chrono::offset::Local::now().into()),\n", "file_path": "src/feed.rs", "rank": 56, "score": 10.366381790801809 }, { "content": " lang = Some(attr.unescape_and_decode_value(&reader)?);\n\n }\n\n }\n\n State::Info\n\n }\n\n State::Info => match event {\n\n Event::Start(e) if e.name() == b\"title\" => State::Title,\n\n Event::Start(e) if e.name() == b\"generator\" => {\n\n for attr in e.attributes() {\n\n let attr = attr?;\n\n if attr.key == b\"uri\" {\n\n generator_uri = Some(attr.unescape_and_decode_value(&reader)?);\n\n }\n\n }\n\n State::Generator\n\n }\n\n Event::Start(e) | Event::Empty(e) if e.name() == b\"link\" => {\n\n for attr in e.attributes() {\n\n let attr = attr?;\n\n if attr.key == b\"href\" {\n", "file_path": "src/feed.rs", "rank": 57, "score": 7.854500514267361 }, { "content": " iter: I,\n\n generated_ids: HashSet<String>,\n\n}\n\n\n\nimpl<'a, I> Iterator for HyperlinkHeadings<'a, I>\n\nwhere\n\n I: Iterator<Item = md::Event<'a>>,\n\n{\n\n type Item = md::Event<'a>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n match self.head.take() {\n\n Some(item) => return Some(item),\n\n None => {}\n\n }\n\n\n\n match self.iter.next() {\n\n Some(md::Event::Start(md::Tag::Heading(level))) => match self.iter.next() {\n\n Some(md::Event::Text(text)) => {\n\n let mut id = utils::generate_heading_id(&text);\n", "file_path": "src/adaptor.rs", "rank": 58, "score": 7.741320516680199 }, { "content": "use std::path::{Path, PathBuf};\n\n\n\n/// Parses the next value in the given string. `value` is left at the next value. Parsed value is returned.\n", "file_path": "src/utils.rs", "rank": 59, "score": 7.423828766783812 }, { "content": " let dir = dir\n\n .clone()\n\n .into_os_string()\n\n .into_string()\n\n .expect(\"bad dir path\");\n\n let dir = utils::replace_root(&source, &destination, &dir);\n\n if !dir.is_dir() {\n\n fs::create_dir(dir)?;\n\n }\n\n }\n\n\n\n // Copies all files that need copying.\n\n for file in scan.files_to_copy.iter() {\n\n let src = file\n\n .clone()\n\n .into_os_string()\n\n .into_string()\n\n .expect(\"bad file path\");\n\n let dst = utils::replace_root(&source, &destination, &src);\n\n if !dst.is_file() {\n", "file_path": "src/blog.rs", "rank": 60, "score": 7.048848993522436 }, { "content": " Some(t) => t,\n\n None => {\n\n eprintln!(\n\n \"note: atom feed lacks title tag, treating as invalid: {:?}\",\n\n path\n\n );\n\n return Err(quick_xml::Error::TextNotFound);\n\n }\n\n };\n\n\n\n let link = match link {\n\n Some(t) => t,\n\n None => {\n\n eprintln!(\n\n \"note: atom feed lacks link tag, treating as invalid: {:?}\",\n\n path\n\n );\n\n return Err(quick_xml::Error::TextNotFound);\n\n }\n\n };\n", "file_path": "src/feed.rs", "rank": 61, "score": 6.674734155347497 }, { "content": " entries,\n\n generator: feed.generator.clone().map(|value| atom::Generator {\n\n value,\n\n uri: feed.generator_uri.clone(),\n\n ..atom::Generator::default()\n\n }),\n\n links: vec![\n\n atom::Link {\n\n href: feed.link.clone(),\n\n ..atom::Link::default()\n\n },\n\n atom::Link {\n\n href: self_link,\n\n rel: FEED_REL.into(),\n\n mime_type: Some(FEED_TYPE.to_owned()),\n\n ..atom::Link::default()\n\n },\n\n ],\n\n ..atom::Feed::default()\n\n }\n\n .to_string()\n\n}\n", "file_path": "src/feed.rs", "rank": 62, "score": 6.638327988210438 }, { "content": " link = Some(attr.unescape_and_decode_value(&reader)?);\n\n }\n\n }\n\n continue;\n\n }\n\n Event::Eof => break,\n\n _ => continue,\n\n },\n\n State::Title => {\n\n title = Some(match_or_continue!(Text(event)).unescape_and_decode(&reader)?);\n\n State::Info\n\n }\n\n State::Generator => {\n\n generator = Some(match_or_continue!(Text(event)).unescape_and_decode(&reader)?);\n\n State::Info\n\n }\n\n };\n\n }\n\n\n\n let title = match title {\n", "file_path": "src/feed.rs", "rank": 63, "score": 6.102182423917691 }, { "content": " if self.generated_ids.contains(&id) {\n\n let original_id = id.clone();\n\n let mut i = 1;\n\n while self.generated_ids.contains(&id) {\n\n i += 1;\n\n id = format!(\"{}{}\", original_id, i);\n\n }\n\n }\n\n\n\n let heading = Some(md::Event::Html(\n\n format!(\"<h{} id=\\\"{}\\\">\", level, id).into(),\n\n ));\n\n self.head = Some(md::Event::Text(text));\n\n self.generated_ids.insert(id);\n\n heading\n\n }\n\n Some(item) => {\n\n self.head = Some(item);\n\n Some(md::Event::Start(md::Tag::Heading(level)))\n\n }\n\n None => None,\n\n },\n\n item => item,\n\n }\n\n }\n\n}\n", "file_path": "src/adaptor.rs", "rank": 64, "score": 6.06906753478637 }, { "content": " .filter_map(|path| match HtmlTemplate::from_file(&path) {\n\n Ok(template) => Some((path, template)),\n\n Err(_) => {\n\n eprintln!(\"note: failed to parse html template: {:?}\", path);\n\n None\n\n }\n\n })\n\n .collect();\n\n\n\n Ok(Scan {\n\n root,\n\n dirs_to_create,\n\n files_to_copy,\n\n css_files,\n\n html_templates,\n\n md_files,\n\n atom_files,\n\n })\n\n}\n\n\n", "file_path": "src/blog.rs", "rank": 65, "score": 5.9446099921458195 }, { "content": " fs::copy(src, dst)?;\n\n }\n\n }\n\n\n\n // Generate all feeds.\n\n for atom in scan.atom_files.iter() {\n\n let src = atom\n\n .path\n\n .clone()\n\n .into_os_string()\n\n .into_string()\n\n .expect(\"bad file path\");\n\n\n\n let dst = utils::replace_root(&source, &destination, &src);\n\n fs::write(dst, feed::fill_atom_feed(atom, &scan.md_files))?;\n\n }\n\n\n\n // Converts every MD file to HTML and places it in the destination.\n\n for file in scan.md_files.iter() {\n\n let src = file\n", "file_path": "src/blog.rs", "rank": 66, "score": 5.64264620242637 }, { "content": " match hyperbuild::hyperbuild(\n\n &mut html,\n\n &hyperbuild::Cfg {\n\n minify_js: matches!(config.minify, Minify::full),\n\n },\n\n ) {\n\n Ok(n) => html.truncate(n),\n\n Err((e, n)) => eprintln!(\n\n \"note: could not minify: pos={}, cause={:?}, file={:?}\",\n\n n, e, file.path\n\n ),\n\n };\n\n String::from_utf8(html).unwrap()\n\n }\n\n };\n\n\n\n fs::write(dst, html)?;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/blog.rs", "rank": 67, "score": 5.353208511673275 }, { "content": "```\n\n\n\nWhen replacing the \"pre-processor\" rules, the code will look exactly for the strings `<!--P/` and `/P-->`, so make sure to not introduce spaces in-between. If any of the values to the pre-processor rules contain spaces, surround them in double-quotes (`\"`). The only escape sequences allowed inside double-quotes are `\\\"` in order to escape a quote, and `\\\\` in order to escape the backslash character.\n\n\n\nA default [`template.html`] file is embedded withing `pagong` itself. It will be used when no other template file is specified, in order to generate valid HTML5 (your HTML needs a body, after all).\n\n\n\n[`template.html`]: https://github.com/Lonami/pagong/blob/master/template.html\n\n\n", "file_path": "README.md", "rank": 68, "score": 5.340073944345123 }, { "content": "### Feed\n\n\n\nAny `.atom` file will be copied to `dist/`, but its root `feed` tag will be filled with `entry` tags automatically. Here's a basic `.atom` file which would do the trick (and you're free to remove the `generator` tag):\n\n\n\n```xml\n\n<feed xml:lang=\"en\">\n\n <title>Example's Blog</title>\n\n <link href=\"https://example.com/blog/\"/>\n\n <generator uri=\"https://github.com/expectocode/pagong\">pagong with atom_syndication</generator>\n\n</feed>\n\n```\n\n\n\n### Media\n\n\n\nAny other file will be copied over without any processing done to it, with the same path and name as it existed in the `content/` directory.\n\n\n\n## Contributing\n\n\n\nThe number of features this project offers is intentionally small. Issues and pull requests regarding bugs or possible enhancements are welcome. New features or substantial changes must first be discussed in the issues section. Pull requests of new features without previous discussion will be rejected, but you are welcome to maintain your own fork.\n\n\n\n## License\n\n\n\nPagong is licensed under either of Apache License, Version 2.0 or MIT license at your option.\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted for inclusion in this crate by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions.\n", "file_path": "README.md", "rank": 69, "score": 5.220844509144052 }, { "content": " let ext_idx = filename\n\n .rfind('.')\n\n .map(|i| i + 1)\n\n .unwrap_or_else(|| filename.len());\n\n let ext = &filename[ext_idx..];\n\n\n\n if ext.eq_ignore_ascii_case(STYLE_FILE_EXT) {\n\n // Detects all CSS files.\n\n css_files.push(utils::path_to_uri(&root, &entry.path()));\n\n }\n\n\n\n if ext.eq_ignore_ascii_case(&config.feed_ext) {\n\n match feed::load_atom_feed(&entry.path()) {\n\n Ok(atom) => atom_files.push(atom),\n\n Err(e) => {\n\n eprintln!(\"note: failed to load atom feed: {}: {:?}\", e, entry.path());\n\n files_to_copy.push(entry.path());\n\n }\n\n }\n\n } else if !ext.eq_ignore_ascii_case(SOURCE_FILE_EXT) {\n", "file_path": "src/blog.rs", "rank": 70, "score": 5.16959042332995 }, { "content": " use super::*;\n\n\n\n #[test]\n\n fn simple() {\n\n let mut string = \"simple\";\n\n assert_eq!(parse_next_value(&mut string), Some(\"simple\".to_owned()));\n\n }\n\n\n\n #[test]\n\n fn quoted() {\n\n let mut string = \"\\\"quoted\\\"\";\n\n assert_eq!(parse_next_value(&mut string), Some(\"quoted\".to_owned()));\n\n }\n\n\n\n #[test]\n\n fn good_escape() {\n\n let mut string = \"\\\"good\\\\\\\" \\\\\\\"escape\\\"\";\n\n assert_eq!(\n\n parse_next_value(&mut string),\n\n Some(\"good\\\" \\\"escape\".to_owned())\n", "file_path": "src/utils.rs", "rank": 71, "score": 4.704587272063625 }, { "content": " Event::Eof => break,\n\n _ => continue,\n\n }\n\n };\n\n ( $event_ty:ident ( $event:ident ) if $guard:expr ) => {\n\n match $event {\n\n Event::$event_ty($event) if $guard => $event,\n\n Event::Eof => break,\n\n _ => continue,\n\n }\n\n };\n\n}\n\n\n", "file_path": "src/feed.rs", "rank": 72, "score": 3.8138244044925425 }, { "content": "use crate::utils;\n\n\n\nuse pulldown_cmark as md;\n\nuse std::collections::HashSet;\n\n\n", "file_path": "src/adaptor.rs", "rank": 73, "score": 3.4235170251476337 }, { "content": "# Pagong ![pagong's logo](logo.svg)\n\n\n\n> *Boring-simple Static-Site-Generator*\n\n\n\nYou want a website but writing HTML by hand is awful. I get it. But that's no problem! Write markdown at your leisure, run `pagong` and get your nice HTML lightning fast, ready to be uploaded to your hosting service!\n\n\n\n## Getting started\n\n\n\n### Installation\n\n\n\nInstall `pagong` by running the following command on a terminal:\n\n\n\n```sh\n\ncargo install --git https://github.com/expectocode/pagong\n\n```\n\n\n\nThen use it in your blog's root folder:\n\n\n\n```sh\n\npagong\n\n```\n\n\n\nIt's that simple!\n\n\n\n### Blog structure\n\n\n\nFor `pagong` to do anything useful, you need to have some entries for your blog. These should be written in markdown and saved in the `content/` directory as `.md` files. For example:\n\n\n\n```\n\nmyblog/\n\n└── content/\n\n └── hello-world.md\n\n```\n\n\n\nRunning `pagong` while inside `myblog` will create the following `dist/` folder, and the tree of your blog now looks like this:\n\n\n\n```\n\nmyblog/\n\n├── content/\n\n│   └── hello-world.md\n\n└── dist/\n\n └── hello-world/\n\n    └── index.html\n\n```\n\n\n\nNow you can move the contents of `dist/` to wherever you host your site and enjoy it.\n\n\n\n## Customization\n\n\n", "file_path": "README.md", "rank": 74, "score": 3.3014632595546094 }, { "content": " string\n\n );\n\n }\n\n if !closed {\n\n eprintln!(\n\n \"note: reached end of string without closing it: {:?}\",\n\n string\n\n );\n\n }\n\n (value, index)\n\n } else {\n\n let end_offset = match bytes[offset..].iter().position(|b| b.is_ascii_whitespace()) {\n\n Some(i) => offset + i,\n\n None => bytes.len(),\n\n };\n\n (bytes[offset..end_offset].to_vec(), end_offset)\n\n };\n\n\n\n *string = &string[end_offset..];\n\n String::from_utf8(value).ok()\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 75, "score": 3.1407975149340093 }, { "content": " let mut string = \" simple \\t\\\"quoted\\\" \\n \\\"\\\\\\\"escapes\\\\\\\\\\\" \\n\\t \\r simple\";\n\n let string = &mut string;\n\n let mut values = Vec::new();\n\n while let Some(value) = parse_next_value(string) {\n\n values.push(value);\n\n }\n\n\n\n assert_eq!(values, vec![\"simple\", \"quoted\", \"\\\"escapes\\\\\", \"simple\"]);\n\n }\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 76, "score": 2.992363029822768 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n fn bad_escape() {\n\n let mut string = \"\\\"bad\\\\_escape\\\"\";\n\n assert_eq!(parse_next_value(&mut string), Some(\"bad_escape\".to_owned()));\n\n }\n\n\n\n #[test]\n\n fn unterminated() {\n\n let mut string = \"\\\"unterminated\";\n\n assert_eq!(\n\n parse_next_value(&mut string),\n\n Some(\"unterminated\".to_owned())\n\n );\n\n }\n\n\n\n #[test]\n\n fn multiple() {\n", "file_path": "src/utils.rs", "rank": 77, "score": 2.9358961512796378 }, { "content": " let mut index = offset + 1;\n\n let mut closed = false;\n\n while index < bytes.len() {\n\n if escape {\n\n value.push(bytes[index]);\n\n escape = false;\n\n } else if bytes[index] == b'\\\\' {\n\n escape = true;\n\n } else if bytes[index] == b'\"' {\n\n closed = true;\n\n index += 1;\n\n break;\n\n } else {\n\n value.push(bytes[index]);\n\n }\n\n index += 1;\n\n }\n\n if escape {\n\n eprintln!(\n\n \"note: reached end of string with escape sequence open: {:?}\",\n", "file_path": "src/utils.rs", "rank": 78, "score": 1.670989369701466 } ]
Rust
src/args/arg_builder/flag.rs
Nemo157/clap-rs
01994467d5808903907012c26953ebde72594137
use std::convert::From; use std::fmt::{Display, Formatter, Result}; use std::rc::Rc; use std::result::Result as StdResult; use vec_map::VecMap; use Arg; use args::{AnyArg, DispOrder}; use args::settings::{ArgFlags, ArgSettings}; #[derive(Debug)] #[doc(hidden)] pub struct FlagBuilder<'n, 'e> { pub name: &'n str, pub long: Option<&'e str>, pub aliases: Option<Vec<(&'e str, bool)>>, pub help: Option<&'e str>, pub blacklist: Option<Vec<&'e str>>, pub requires: Option<Vec<&'e str>>, pub short: Option<char>, pub overrides: Option<Vec<&'e str>>, pub settings: ArgFlags, pub disp_ord: usize, } impl<'n, 'e> Default for FlagBuilder<'n, 'e> { fn default() -> Self { FlagBuilder { name: "", long: None, aliases: None, help: None, blacklist: None, requires: None, short: None, overrides: None, settings: ArgFlags::new(), disp_ord: 999, } } } impl<'n, 'e> FlagBuilder<'n, 'e> { pub fn new(name: &'n str) -> Self { FlagBuilder { name: name, ..Default::default() } } } impl<'a, 'b, 'z> From<&'z Arg<'a, 'b>> for FlagBuilder<'a, 'b> { fn from(a: &'z Arg<'a, 'b>) -> Self { assert!(a.validator.is_none(), format!("The argument '{}' has a validator set, yet was parsed as a flag. Ensure \ .takes_value(true) or .index(u64) is set.", a.name)); assert!(a.possible_vals.is_none(), format!("The argument '{}' cannot have a specific value set because it doesn't \ have takes_value(true) set", a.name)); assert!(!a.is_set(ArgSettings::Required), format!("The argument '{}' cannot be required because it's a flag, perhaps you \ forgot takes_value(true)?", a.name)); FlagBuilder { name: a.name, short: a.short, long: a.long, aliases: a.aliases.clone(), help: a.help, blacklist: a.blacklist.clone(), overrides: a.overrides.clone(), requires: a.requires.clone(), settings: a.settings, disp_ord: a.disp_ord, } } } impl<'n, 'e> Display for FlagBuilder<'n, 'e> { fn fmt(&self, f: &mut Formatter) -> Result { if let Some(l) = self.long { try!(write!(f, "--{}", l)); } else { try!(write!(f, "-{}", self.short.unwrap())); } Ok(()) } } impl<'n, 'e> Clone for FlagBuilder<'n, 'e> { fn clone(&self) -> Self { FlagBuilder { name: self.name, short: self.short, long: self.long, aliases: self.aliases.clone(), help: self.help, blacklist: self.blacklist.clone(), overrides: self.overrides.clone(), requires: self.requires.clone(), settings: self.settings, disp_ord: self.disp_ord, } } } impl<'n, 'e> AnyArg<'n, 'e> for FlagBuilder<'n, 'e> { fn name(&self) -> &'n str { self.name } fn overrides(&self) -> Option<&[&'e str]> { self.overrides.as_ref().map(|o| &o[..]) } fn requires(&self) -> Option<&[&'e str]> { self.requires.as_ref().map(|o| &o[..]) } fn blacklist(&self) -> Option<&[&'e str]> { self.blacklist.as_ref().map(|o| &o[..]) } fn required_unless(&self) -> Option<&[&'e str]> { None } fn is_set(&self, s: ArgSettings) -> bool { self.settings.is_set(s) } fn has_switch(&self) -> bool { true } fn takes_value(&self) -> bool { false } fn set(&mut self, s: ArgSettings) { self.settings.set(s) } fn max_vals(&self) -> Option<u64> { None } fn val_names(&self) -> Option<&VecMap<&'e str>> { None } fn num_vals(&self) -> Option<u64> { None } fn possible_vals(&self) -> Option<&[&'e str]> { None } fn validator(&self) -> Option<&Rc<Fn(String) -> StdResult<(), String>>> { None } fn min_vals(&self) -> Option<u64> { None } fn short(&self) -> Option<char> { self.short } fn long(&self) -> Option<&'e str> { self.long } fn val_delim(&self) -> Option<char> { None } fn help(&self) -> Option<&'e str> { self.help } fn default_val(&self) -> Option<&'n str> { None } fn longest_filter(&self) -> bool { self.long.is_some() } fn aliases(&self) -> Option<Vec<&'e str>> { if let Some(ref aliases) = self.aliases { let vis_aliases: Vec<_> = aliases.iter() .filter_map(|&(n, v)| if v { Some(n) } else { None }) .collect(); if vis_aliases.is_empty() { None } else { Some(vis_aliases) } } else { None } } } impl<'n, 'e> DispOrder for FlagBuilder<'n, 'e> { fn disp_ord(&self) -> usize { self.disp_ord } } #[cfg(test)] mod test { use args::settings::ArgSettings; use super::FlagBuilder; #[test] fn flagbuilder_display() { let mut f = FlagBuilder::new("flg"); f.settings.set(ArgSettings::Multiple); f.long = Some("flag"); assert_eq!(&*format!("{}", f), "--flag"); let mut f2 = FlagBuilder::new("flg"); f2.short = Some('f'); assert_eq!(&*format!("{}", f2), "-f"); } #[test] fn flagbuilder_display_single_alias() { let mut f = FlagBuilder::new("flg"); f.long = Some("flag"); f.aliases = Some(vec![("als", true)]); assert_eq!(&*format!("{}", f), "--flag"); } #[test] fn flagbuilder_display_multiple_aliases() { let mut f = FlagBuilder::new("flg"); f.short = Some('f'); f.aliases = Some(vec![ ("alias_not_visible", false), ("f2", true), ("f3", true), ("f4", true) ]); assert_eq!(&*format!("{}", f), "-f"); } }
use std::convert::From; use std::fmt::{Display, Formatter, Result}; use std::rc::Rc; use std::result::Result as StdResult; use vec_map::VecMap; use Arg; use args::{AnyArg, DispOrder}; use args::settings::{ArgFlags, ArgSettings}; #[derive(Debug)] #[doc(hidden)] pub struct FlagBuilder<'n, 'e> { pub name: &'n str, pub long: Option<&'e str>, pub aliases: Option<Vec<(&'e str, bool)>>, pub help: Option<&'e str>, pub blacklist: Option<Vec<&'e str>>, pub requires: Option<Vec<&'e str>>, pub short: Option<char>, pub overrides: Option<Vec<&'e str>>, pub settings: ArgFlags, pub disp_ord: usize, } impl<'n, 'e> Default for FlagBuilder<'n, 'e> { fn default() -> Self { FlagBuilder { name: "", long: None, aliases: None, help: None, blacklist: None, requires: None, short: None, overrides: None, settings: ArgFlags::new(), disp_ord: 999, } } } impl<'n, 'e> FlagBuilder<'n, 'e> { pub fn new(name: &'n str) -> Self { FlagBuilder { name: name, ..Default::default() } } } impl<'a, 'b, 'z> From<&'z Arg<'a, 'b>> for FlagBuilder<'a, 'b> { fn from(a: &'z Arg<'a, 'b>) -> Self { assert!(a.validator.is_none(), format!("The argument '{}' has a validator set, yet was parsed as a flag. Ensure \ .takes_value(true) or .index(u64) is set.", a.name)); assert!(a.possible_vals.is_none(), format!("The argument '{}' cannot have a specific value set because it doesn't \ have takes_value(true) set", a.name)); assert!(!a.is_set(ArgSettings::Required), format!("The argument '{}' cannot be required because it's a flag, perhaps you \ forgot takes_value(true)?", a.name)); FlagBuilder { name: a.name, short: a.short, long: a.long, aliases: a.aliases.clone(), help: a.help, blacklist: a.blacklist.clone(), overrides: a.overrides.clone(), requires: a.requires.clone(), settings: a.settings, disp_ord: a.disp_ord, } } } impl<'n, 'e> Display for FlagBuilder<'n, 'e> { fn fmt(&self, f: &mut Formatter) -> Result { if let Some(l) = self.long { try!(write!(f, "--{}", l)); } else { try!(write!(f, "-{}", self.short.unwrap())); } Ok(()) } } impl<'n, 'e> Clone for FlagBuilder<'n, 'e> { fn clone(&self) -> Self { FlagBuilder { name: self.name, short: self.short, long: self.long, aliases: self.aliases.clone(), help: self.help, blacklist: self.blacklist.clone(), overrides: self.overrides.clone(), requires: self.requires.clone(), settings: self.settings, disp_ord: self.disp_ord, } } } impl<'n, 'e> AnyArg<'n, 'e> for FlagBuilder<'n, 'e> { fn name(&self) -> &'n str { self.name } fn overrides(&self) -> Option<&[&'e str]> { self.overrides.as_ref().map(|o| &o[..]) } fn requires(&self) -> Option<&[&'e str]> { self.requires.as_ref().map(|o| &o[..]) } fn blacklist(&self) -> Option<&[&'e str]> { self.blacklist.as_ref().map(|o| &o[..]) } fn required_unless(&self) -> Option<&[&'e str]> { None } fn is_set(&self, s: ArgSettings) -> bool { self.settings.is_set(s) } fn has_switch(&self) -> bool { true } fn takes_value(&self) -> bool { false } fn set(&mut self, s: ArgSettings) { self.settings.set(s) } fn max_vals(&self) -> Option<u64> { None } fn val_names(&self) -> Option<&VecMap<&'e str>> { None } fn num_vals(&self) -> Option<u64> { None } fn possible_vals(&self) -> Option<&[&'e str]> { None } fn validator(&self) -> Option<&Rc<Fn(String) -> StdResult<(), String>>> { None } fn min_vals(&self) -> Option<u64> { None } fn short(&self) -> Option<char> { self.short } fn long(&self) -> Option<&'e str> { self.long } fn val_delim(&self) -> Option<char> { None } fn help(&self) -> Option<&'e str> { self.help } fn default_val(&self) -> Option<&'n str> { None } fn longest_filter(&self) -> bool { self.long.is_some() } fn aliases(&self) -> Option<Vec<&'e str>> { if let Some(ref aliases) = self.aliases { let vis_aliases: Vec<_> = aliases.iter() .filter_map(|&(n, v)| if v { Some(n) } else { None }) .collect(); if vis_aliases.is_empty() { None } else { Some(vis_aliases) } } else { None } } } impl<'n, 'e> DispOrder for FlagBuilder<'n, 'e> { fn disp_ord(&self) -> usize { self.disp_ord } } #[cfg(test)] mod test { use args::settings::ArgSettings; use super::FlagBuilder; #[test] fn flagbuilder_display() { let mut f = FlagBuilder::new("flg"); f.settings.set(ArgSettings::Multiple); f.long = Some("flag"); assert_eq!(&*format!("{}", f), "--flag"); let mut f2 = FlagBuilder::new("flg"); f2.short = Some('f'); assert_eq!(&*format!("{}", f2), "-f"); } #[test] fn flagbuilder_display_single_alias() { let mut f = FlagBuilder::new("flg"); f.long = Some("flag"); f.aliases = Some(vec![("als", true)]); assert_eq!(&*format!("{}", f), "--flag"); } #[test] fn flagbuilder_display_multiple_aliases() { let mut f = FlagBuilder::new("flg"); f.short = Some('f'); f.aliases =
; assert_eq!(&*format!("{}", f), "-f"); } }
Some(vec![ ("alias_not_visible", false), ("f2", true), ("f3", true), ("f4", true) ])
call_expression
[ { "content": "fn compare_app_str(l: &App, right: &str) -> bool {\n\n let left = build_new_help(&l);\n\n // Strip out any mismatching \\r character on windows that might sneak in on either side\n\n let b = left.trim().replace(\"\\r\", \"\") == right.replace(\"\\r\", \"\");\n\n if !b {\n\n println!(\"\");\n\n println!(\"--> left\");\n\n println!(\"{}\", left);\n\n println!(\"--> right\");\n\n println!(\"{}\", right);\n\n println!(\"--\")\n\n }\n\n b\n\n}\n\n\n", "file_path": "tests/template_help.rs", "rank": 0, "score": 225667.66274003312 }, { "content": "#[cfg(feature = \"suggestions\")]\n\n#[cfg_attr(feature = \"lints\", allow(needless_lifetimes))]\n\npub fn did_you_mean<'a, T, I>(v: &str, possible_values: I) -> Option<&'a str>\n\n where T: AsRef<str> + 'a,\n\n I: IntoIterator<Item = &'a T>\n\n{\n\n\n\n let mut candidate: Option<(f64, &str)> = None;\n\n for pv in possible_values {\n\n let confidence = strsim::jaro_winkler(v, pv.as_ref());\n\n if confidence > 0.8 &&\n\n (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) {\n\n candidate = Some((confidence, pv.as_ref()));\n\n }\n\n }\n\n match candidate {\n\n None => None,\n\n Some((_, candidate)) => Some(candidate),\n\n }\n\n}\n\n\n", "file_path": "src/suggestions.rs", "rank": 1, "score": 225389.22572974567 }, { "content": "fn wrap_help(help: &mut String, longest_w: usize, avail_chars: usize) {\n\n debugln!(\"fn=wrap_help;longest_w={},avail_chars={}\",\n\n longest_w,\n\n avail_chars);\n\n debug!(\"Enough space to wrap...\");\n\n if longest_w < avail_chars {\n\n sdebugln!(\"Yes\");\n\n let mut prev_space = 0;\n\n let mut j = 0;\n\n for (idx, g) in (&*help.clone()).grapheme_indices(true) {\n\n debugln!(\"iter;idx={},g={}\", idx, g);\n\n if g == \"\\n\" {\n\n debugln!(\"Newline found...\");\n\n debugln!(\"Still space...{:?}\", str_width(&help[j..idx]) < avail_chars);\n\n if str_width(&help[j..idx]) < avail_chars {\n\n j = idx;\n\n continue;\n\n }\n\n } else if g != \" \" {\n\n if idx != help.len() - 1 || str_width(&help[j..idx]) < avail_chars {\n", "file_path": "src/app/help.rs", "rank": 2, "score": 220686.30207422114 }, { "content": "#[test]\n\nfn arg_required_else_help() {\n\n let result = App::new(\"arg_required\")\n\n .setting(AppSettings::ArgRequiredElseHelp)\n\n .arg(Arg::with_name(\"test\")\n\n .index(1))\n\n .get_matches_from_safe(vec![\"\"]);\n\n assert!(result.is_err());\n\n let err = result.err().unwrap();\n\n assert_eq!(err.kind, ErrorKind::MissingArgumentOrSubcommand);\n\n}\n\n\n", "file_path": "tests/app_settings.rs", "rank": 3, "score": 214428.6019089144 }, { "content": "#[test]\n\nfn flag_using_long() {\n\n let m = App::new(\"flag\")\n\n .args(&[\n\n Arg::from_usage(\"--flag 'some flag'\"),\n\n Arg::from_usage(\"--color 'some other flag'\")\n\n ])\n\n .get_matches_from(vec![\"\", \"--flag\", \"--color\"]);\n\n assert!(m.is_present(\"flag\"));\n\n assert!(m.is_present(\"color\"));\n\n}\n\n\n", "file_path": "tests/flags.rs", "rank": 4, "score": 209721.07604461216 }, { "content": "#[test]\n\nfn flag_using_short() {\n\n let m = App::new(\"flag\")\n\n .args(&[\n\n Arg::from_usage(\"-f, --flag 'some flag'\"),\n\n Arg::from_usage(\"-c, --color 'some other flag'\")\n\n ])\n\n .get_matches_from(vec![\"\", \"-f\", \"-c\"]);\n\n assert!(m.is_present(\"flag\"));\n\n assert!(m.is_present(\"color\"));\n\n}\n\n\n", "file_path": "tests/flags.rs", "rank": 5, "score": 209689.1593763642 }, { "content": "#[cfg_attr(feature = \"lints\", allow(needless_lifetimes))]\n\npub fn did_you_mean_suffix<'z, T, I>(arg: &str,\n\n values: I,\n\n style: DidYouMeanMessageStyle)\n\n -> (String, Option<&'z str>)\n\n where T: AsRef<str> + 'z,\n\n I: IntoIterator<Item = &'z T>\n\n{\n\n match did_you_mean(arg, values) {\n\n Some(candidate) => {\n\n let mut suffix = \"\\n\\tDid you mean \".to_owned();\n\n match style {\n\n DidYouMeanMessageStyle::LongFlag => {\n\n suffix.push_str(&Format::Good(\"--\").to_string())\n\n }\n\n DidYouMeanMessageStyle::EnumValue => suffix.push('\\''),\n\n }\n\n suffix.push_str(&Format::Good(candidate).to_string()[..]);\n\n if let DidYouMeanMessageStyle::EnumValue = style {\n\n suffix.push('\\'');\n\n }\n", "file_path": "src/suggestions.rs", "rank": 6, "score": 205972.36241865816 }, { "content": "fn is_png(val: String) -> Result<(), String> {\n\n // val is the argument value passed in by the user\n\n // val has type of String.\n\n if val.ends_with(\".png\") {\n\n Ok(())\n\n } else {\n\n // clap automatically adds \"error: \" to the beginning\n\n // of the message.\n\n Err(String::from(\"the file format must be png.\"))\n\n }\n\n // Of course, you can do more complicated validation as\n\n // well, but for the simplicity, this example only checks\n\n // if the value passed in ends with \".png\" or not.\n\n}\n", "file_path": "examples/15_custom_validator.rs", "rank": 7, "score": 202268.43707755534 }, { "content": "#[test]\n\nfn short_flag_name_missing() {\n\n let a = Arg::from_usage(\"-f 'some flag'\");\n\n assert_eq!(a.name, \"f\");\n\n assert_eq!(a.short.unwrap(), 'f');\n\n assert!(a.long.is_none());\n\n assert_eq!(a.help.unwrap(), \"some flag\");\n\n assert!(!a.is_set(ArgSettings::Multiple));\n\n assert!(a.val_names.is_none());\n\n assert!(a.num_vals.is_none());\n\n\n\n}\n", "file_path": "tests/flags.rs", "rank": 8, "score": 201709.19771166838 }, { "content": "// Gets all subcommands including child subcommands in the form of 'name' where the name\n\n// is a single word (i.e. \"install\") of the path to said subcommand (i.e. \n\n// \"rustup toolchain install\")\n\n//\n\n// Also note, aliases are treated as their own subcommands but duplicates of whatever they're\n\n// aliasing.\n\npub fn all_subcommand_names(p: &Parser) -> Vec<String> {\n\n let mut subcmds: Vec<_> = subcommands_of(p).iter().map(|&(ref n, _)| n.clone()).collect();\n\n for sc_v in p.subcommands.iter().map(|s| all_subcommand_names(&s.p)) {\n\n subcmds.extend(sc_v);\n\n }\n\n subcmds.sort();\n\n subcmds.dedup();\n\n subcmds\n\n}\n\n\n", "file_path": "src/completions/mod.rs", "rank": 9, "score": 201306.38934525393 }, { "content": "pub fn get_all_subcommand_paths(p: &Parser, first: bool) -> Vec<String> {\n\n let mut subcmds = vec![];\n\n if !p.has_subcommands() {\n\n if !first {\n\n let name = &*p.meta.name;\n\n let path = p.meta.bin_name.as_ref().unwrap().clone().replace(\" \", \"_\");\n\n let mut ret = vec![path.clone()];\n\n if let Some(ref aliases) = p.meta.aliases {\n\n for &(n, _) in aliases {\n\n ret.push(path.replace(name, n));\n\n }\n\n }\n\n return ret;\n\n }\n\n return vec![];\n\n }\n\n for sc in &p.subcommands {\n\n let name = &*sc.p.meta.name;\n\n let path = sc.p.meta.bin_name.as_ref().unwrap().clone().replace(\" \", \"_\");\n\n subcmds.push(path.clone());\n", "file_path": "src/completions/mod.rs", "rank": 10, "score": 194367.58449614508 }, { "content": "#[test]\n\nfn multiple_values_of_option_long() {\n\n let m = App::new(\"multiple_values\")\n\n .arg(Arg::with_name(\"option\")\n\n .long(\"option\")\n\n .help(\"multiple options\")\n\n .takes_value(true)\n\n .multiple(true))\n\n .get_matches_from_safe(vec![\n\n \"\",\n\n \"--option\", \"val1\",\n\n \"--option\", \"val2\",\n\n \"--option\", \"val3\",\n\n ]);\n\n\n\n assert!(m.is_ok());\n\n let m = m.unwrap();\n\n\n\n assert!(m.is_present(\"option\"));\n\n assert_eq!(m.occurrences_of(\"option\"), 3);\n\n assert_eq!(m.values_of(\"option\").unwrap().collect::<Vec<_>>(), [\"val1\", \"val2\", \"val3\"]);\n\n}\n\n\n", "file_path": "tests/multiple_values.rs", "rank": 11, "score": 194078.7113998817 }, { "content": "#[test]\n\nfn multiple_values_of_option_short() {\n\n let m = App::new(\"multiple_values\")\n\n .arg(Arg::with_name(\"option\")\n\n .short(\"o\")\n\n .help(\"multiple options\")\n\n .takes_value(true)\n\n .multiple(true))\n\n .get_matches_from_safe(vec![\n\n \"\",\n\n \"-o\", \"val1\",\n\n \"-o\", \"val2\",\n\n \"-o\", \"val3\",\n\n ]);\n\n\n\n assert!(m.is_ok());\n\n let m = m.unwrap();\n\n\n\n assert!(m.is_present(\"option\"));\n\n assert_eq!(m.occurrences_of(\"option\"), 3);\n\n assert_eq!(m.values_of(\"option\").unwrap().collect::<Vec<_>>(), [\"val1\", \"val2\", \"val3\"]);\n\n}\n\n\n", "file_path": "tests/multiple_values.rs", "rank": 12, "score": 194049.55315226602 }, { "content": "#[cfg(not(feature = \"suggestions\"))]\n\npub fn did_you_mean<'a, T, I>(_: &str, _: I) -> Option<&'a str>\n\n where T: AsRef<str> + 'a,\n\n I: IntoIterator<Item = &'a T>\n\n{\n\n None\n\n}\n\n\n\n/// Returns a suffix that can be empty, or is the standard 'did you mean' phrase\n", "file_path": "src/suggestions.rs", "rank": 13, "score": 192754.02639245166 }, { "content": "#[test]\n\nfn single_alias_of_option() {\n\n let a = App::new(\"single_alias\")\n\n .arg(Arg::with_name(\"alias\")\n\n .long(\"alias\")\n\n .takes_value(true)\n\n .help(\"single alias\")\n\n .alias(\"new-opt\"))\n\n .get_matches_from_safe(vec![\n\n \"\", \"--new-opt\", \"cool\"\n\n ]);\n\n assert!(a.is_ok());\n\n let a = a.unwrap();\n\n assert!(a.is_present(\"alias\"));\n\n assert_eq!(a.value_of(\"alias\").unwrap(), \"cool\");\n\n}\n\n\n", "file_path": "tests/arg_aliases.rs", "rank": 14, "score": 188623.88658234902 }, { "content": "#[test]\n\nfn alias_on_a_subcommand_option() {\n\n let m = App::new(\"test\")\n\n .subcommand(SubCommand::with_name(\"some\")\n\n .arg(Arg::with_name(\"test\")\n\n .short(\"t\")\n\n .long(\"test\")\n\n .takes_value(true)\n\n .alias(\"opt\")\n\n .help(\"testing testing\")))\n\n .arg(Arg::with_name(\"other\")\n\n .long(\"other\")\n\n .aliases(&vec![\"o1\", \"o2\", \"o3\"]))\n\n .get_matches_from(vec![\n\n \"test\", \"some\", \"--opt\", \"awesome\"\n\n ]);\n\n\n\n assert!(m.subcommand_matches(\"some\").is_some());\n\n let sub_m = m.subcommand_matches(\"some\").unwrap();\n\n assert!(sub_m.is_present(\"test\"));\n\n assert_eq!(sub_m.value_of(\"test\").unwrap(), \"awesome\");\n\n}\n\n\n", "file_path": "tests/arg_aliases.rs", "rank": 15, "score": 188623.88658234902 }, { "content": "#[test]\n\nfn single_alias_of_flag() {\n\n let a = App::new(\"test\")\n\n .arg(Arg::with_name(\"flag\")\n\n .long(\"flag\")\n\n .alias(\"alias\"))\n\n .get_matches_from_safe(vec![\"\", \"--alias\"]);\n\n assert!(a.is_ok());\n\n let a = a.unwrap();\n\n assert!(a.is_present(\"flag\"));\n\n}\n\n\n", "file_path": "tests/arg_aliases.rs", "rank": 16, "score": 188608.38668898528 }, { "content": "// Gets all subcommands including child subcommands in the form of ('name', 'bin_name') where the name\n\n// is a single word (i.e. \"install\") of the path and full bin_name of said subcommand (i.e. \n\n// \"rustup toolchain install\")\n\n//\n\n// Also note, aliases are treated as their own subcommands but duplicates of whatever they're\n\n// aliasing.\n\npub fn all_subcommands(p: &Parser) -> Vec<(String, String)> {\n\n let mut subcmds: Vec<_> = subcommands_of(p);\n\n for sc_v in p.subcommands.iter().map(|s| all_subcommands(&s.p)) {\n\n subcmds.extend(sc_v);\n\n }\n\n subcmds\n\n}\n\n\n", "file_path": "src/completions/mod.rs", "rank": 17, "score": 188350.6002551124 }, { "content": "// Gets all subcommands exlcuding child subcommands in the form of (name, bin_name) where the name\n\n// is a single word (i.e. \"install\") and the bin_name is a space deliniated list of the path to said\n\n// subcommand (i.e. \"rustup toolchain install\")\n\n//\n\n// Also note, aliases are treated as their own subcommands but duplicates of whatever they're\n\n// aliasing.\n\npub fn subcommands_of(p: &Parser) -> Vec<(String, String)> {\n\n debugln!(\"fn=subcommands_of;name={};bin_name={}\", p.meta.name, p.meta.bin_name.as_ref().unwrap());\n\n let mut subcmds = vec![];\n\n\n\n debug!(\"Has subcommands...\");\n\n if !p.has_subcommands() {\n\n sdebugln!(\"No\");\n\n let mut ret = vec![(p.meta.name.clone(), p.meta.bin_name.as_ref().unwrap().clone())];\n\n debugln!(\"Looking for aliases...\");\n\n if let Some(ref aliases) = p.meta.aliases {\n\n for &(n, _) in aliases {\n\n debugln!(\"Found alias...{}\", n);\n\n let mut als_bin_name: Vec<_> = p.meta.bin_name.as_ref().unwrap().split(' ').collect();\n\n als_bin_name.push(n);\n\n let old = als_bin_name.len() - 2;\n\n als_bin_name.swap_remove(old);\n\n ret.push((n.to_owned(), als_bin_name.join(\" \")));\n\n }\n\n }\n\n return ret;\n", "file_path": "src/completions/mod.rs", "rank": 18, "score": 188350.55894933324 }, { "content": "#[test]\n\nfn sc_long_flag_short_opt_pos() {\n\n test::check_complex_output(\"clap-test subcmd value --flag -o some\", SCFOP);\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 19, "score": 187044.760797574 }, { "content": "#[test]\n\nfn sc_short_flag_long_opt_pos() {\n\n test::check_complex_output(\"clap-test subcmd value -f --option some\", SCFOP);\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 20, "score": 187044.760797574 }, { "content": "#[test]\n\nfn sc_long_flag_x2_short_opt_pos() {\n\n test::check_complex_output(\"clap-test subcmd value --flag --flag -o some\", SCF2OP);\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 21, "score": 180774.17890773877 }, { "content": "#[test]\n\nfn sc_short_flag_x2_long_opt_pos() {\n\n test::check_complex_output(\"clap-test subcmd value -f -f --option some\", SCF2OP);\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 22, "score": 180774.17890773877 }, { "content": "#[test]\n\nfn sc_short_flag_long_opt_eq_pos() {\n\n test::check_complex_output(\"clap-test subcmd value -f --option=some\", SCFOP);\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 23, "score": 180774.17890773877 }, { "content": "fn str_width(s: &str) -> usize {\n\n UnicodeWidthStr::width(s)\n\n}\n\n\n\nconst TAB: &'static str = \" \";\n\n\n", "file_path": "src/app/help.rs", "rank": 24, "score": 179778.78482139646 }, { "content": "#[test]\n\nfn option_required() {\n\n let result = App::new(\"option_required\")\n\n .arg(Arg::from_usage(\"-f [flag] 'some flag'\")\n\n .requires(\"color\"))\n\n .arg(Arg::from_usage(\"-c [color] 'third flag'\"))\n\n .get_matches_from_safe(vec![\"\", \"-f\", \"val\"]);\n\n assert!(result.is_err());\n\n let err = result.err().unwrap();\n\n assert_eq!(err.kind, ErrorKind::MissingRequiredArgument);\n\n}\n\n\n", "file_path": "tests/require.rs", "rank": 25, "score": 179732.65728513902 }, { "content": "#[test]\n\nfn option_required_2() {\n\n let m = App::new(\"option_required\")\n\n .arg(Arg::from_usage(\"-f [flag] 'some flag'\")\n\n .requires(\"c\"))\n\n .arg(Arg::from_usage(\"-c [color] 'third flag'\"))\n\n .get_matches_from(vec![\"\", \"-f\", \"val\", \"-c\", \"other_val\"]);\n\n assert!(m.is_present(\"c\"));\n\n assert_eq!(m.value_of(\"c\").unwrap(), \"other_val\");\n\n assert!(m.is_present(\"f\"));\n\n assert_eq!(m.value_of(\"f\").unwrap(), \"val\");\n\n}\n\n\n", "file_path": "tests/require.rs", "rank": 26, "score": 179732.65728513902 }, { "content": "#[test]\n\nfn flag_required() {\n\n let result = App::new(\"flag_required\")\n\n .arg(Arg::from_usage(\"-f, --flag 'some flag'\")\n\n .requires(\"color\"))\n\n .arg(Arg::from_usage(\"-c, --color 'third flag'\"))\n\n .get_matches_from_safe(vec![\"\", \"-f\"]);\n\n assert!(result.is_err());\n\n let err = result.err().unwrap();\n\n assert_eq!(err.kind, ErrorKind::MissingRequiredArgument);\n\n}\n\n\n", "file_path": "tests/require.rs", "rank": 27, "score": 179715.6182734397 }, { "content": "#[test]\n\nfn flag_required_2() {\n\n let m = App::new(\"flag_required\")\n\n .arg(Arg::from_usage(\"-f, --flag 'some flag'\")\n\n .requires(\"color\"))\n\n .arg(Arg::from_usage(\"-c, --color 'third flag'\"))\n\n .get_matches_from(vec![\"\", \"-f\", \"-c\"]);\n\n assert!(m.is_present(\"color\"));\n\n assert!(m.is_present(\"flag\"));\n\n}\n\n\n", "file_path": "tests/require.rs", "rank": 28, "score": 179715.6182734397 }, { "content": "#[test]\n\nfn help_long() {\n\n let m = App::new(\"test\")\n\n .author(\"Kevin K.\")\n\n .about(\"tests stuff\")\n\n .version(\"1.3\")\n\n .get_matches_from_safe(vec![\"myprog\", \"--help\"]);\n\n\n\n assert!(m.is_err());\n\n assert_eq!(m.unwrap_err().kind, ErrorKind::HelpDisplayed);\n\n}\n\n\n", "file_path": "tests/help.rs", "rank": 29, "score": 179364.86713370943 }, { "content": "#[test]\n\nfn help_short() {\n\n let m = App::new(\"test\")\n\n .author(\"Kevin K.\")\n\n .about(\"tests stuff\")\n\n .version(\"1.3\")\n\n .get_matches_from_safe(vec![\"myprog\", \"-h\"]);\n\n\n\n assert!(m.is_err());\n\n assert_eq!(m.unwrap_err().kind, ErrorKind::HelpDisplayed);\n\n}\n\n\n", "file_path": "tests/help.rs", "rank": 30, "score": 179331.3658245817 }, { "content": "#[cfg(not(feature = \"color\"))]\n\npub fn is_a_tty(_: bool) -> bool {\n\n debugln!(\"fn=is_a_tty;\");\n\n false\n\n}\n\n\n\n#[doc(hidden)]\n\npub struct Colorizer {\n\n pub use_stderr: bool,\n\n pub when: ColorWhen,\n\n}\n\n\n\nmacro_rules! color {\n\n ($_self:ident, $c:ident, $m:expr) => {\n\n match $_self.when {\n\n ColorWhen::Auto => if is_a_tty($_self.use_stderr) {\n\n Format::$c($m)\n\n } else {\n\n Format::None($m)\n\n },\n\n ColorWhen::Always => Format::$c($m),\n", "file_path": "src/fmt.rs", "rank": 31, "score": 178293.77874524574 }, { "content": "#[test]\n\nfn option_short_min_more_single_occur() {\n\n let m = App::new(\"multiple_values\")\n\n .arg(Arg::with_name(\"arg\")\n\n .required(true))\n\n .arg(Arg::with_name(\"option\")\n\n .short(\"o\")\n\n .help(\"multiple options\")\n\n .takes_value(true)\n\n .multiple(true)\n\n .min_values(3))\n\n .get_matches_from_safe(vec![\n\n \"\",\n\n \"pos\",\n\n \"-o\", \"val1\",\n\n \"val2\",\n\n \"val3\",\n\n \"val4\",\n\n ]);\n\n\n\n let m = m.map_err(|e| println!(\"failed to unwrap err with error kind {:#?}\", e)).unwrap();\n\n\n\n assert!(m.is_present(\"option\"));\n\n assert!(m.is_present(\"arg\"));\n\n assert_eq!(m.occurrences_of(\"option\"), 1);\n\n assert_eq!(m.values_of(\"option\").unwrap().collect::<Vec<_>>(), [\"val1\", \"val2\", \"val3\", \"val4\"]);\n\n assert_eq!(m.value_of(\"arg\"), Some(\"pos\"));\n\n}\n\n\n", "file_path": "tests/multiple_values.rs", "rank": 32, "score": 175110.65630477044 }, { "content": "#[test]\n\nfn option_short_min_more_mult_occurs() {\n\n let m = App::new(\"multiple_values\")\n\n .arg(Arg::with_name(\"arg\")\n\n .required(true))\n\n .arg(Arg::with_name(\"option\")\n\n .short(\"o\")\n\n .help(\"multiple options\")\n\n .takes_value(true)\n\n .multiple(true)\n\n .min_values(3))\n\n .get_matches_from_safe(vec![\n\n \"\",\n\n \"pos\",\n\n \"-o\", \"val1\",\n\n \"-o\", \"val2\",\n\n \"-o\", \"val3\",\n\n \"-o\", \"val4\",\n\n ]);\n\n\n\n let m = m.map_err(|e| println!(\"failed to unwrap err with error kind {:?}\", e.kind)).unwrap();\n\n\n\n assert!(m.is_present(\"option\"));\n\n assert!(m.is_present(\"arg\"));\n\n assert_eq!(m.occurrences_of(\"option\"), 4);\n\n assert_eq!(m.values_of(\"option\").unwrap().collect::<Vec<_>>(), [\"val1\", \"val2\", \"val3\", \"val4\"]);\n\n assert_eq!(m.value_of(\"arg\"), Some(\"pos\"));\n\n}\n\n\n", "file_path": "tests/multiple_values.rs", "rank": 33, "score": 175110.65630477044 }, { "content": "#[test]\n\nfn sc_short_flag_x2_long_opt_eq_pos() {\n\n test::check_complex_output(\"clap-test subcmd value -f -f --option=some\", SCF2OP);\n\n}\n", "file_path": "tests/tests.rs", "rank": 34, "score": 174956.55775616743 }, { "content": "#[test]\n\nfn sc_short_flag_x2_comb_long_opt_pos() {\n\n test::check_complex_output(\"clap-test subcmd value -ff --option some\", SCF2OP);\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 35, "score": 174956.55775616743 }, { "content": "#[test]\n\nfn sc_long_flag_x2_short_opt_eq_pos() {\n\n test::check_complex_output(\"clap-test subcmd value --flag --flag -o=some\", SCF2OP);\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 36, "score": 174956.55775616743 }, { "content": "#[cfg(feature = \"color\")]\n\npub fn is_a_tty(stderr: bool) -> bool {\n\n debugln!(\"fn=is_a_tty;\");\n\n debugln!(\"Use stderr...{:?}\", stderr);\n\n let fd = if stderr { STDERR } else { STDOUT };\n\n unsafe { libc::isatty(fd) != 0 }\n\n}\n\n\n", "file_path": "src/fmt.rs", "rank": 37, "score": 173916.52317207126 }, { "content": "#[test]\n\nfn default_values_default() {\n\n let r = App::new(\"df\")\n\n .arg( Arg::from_usage(\"[arg] 'some opt'\")\n\n .default_value(\"default\"))\n\n .get_matches_from_safe(vec![\"\"]);\n\n assert!(r.is_ok());\n\n let m = r.unwrap();\n\n assert!(m.is_present(\"arg\"));\n\n assert_eq!(m.value_of(\"arg\").unwrap(), \"default\");\n\n}\n\n\n", "file_path": "tests/positionals.rs", "rank": 38, "score": 172855.25001348025 }, { "content": "#[test]\n\nfn default_values_default() {\n\n let r = App::new(\"df\")\n\n .arg( Arg::from_usage(\"-o [opt] 'some opt'\")\n\n .default_value(\"default\"))\n\n .get_matches_from_safe(vec![\"\"]);\n\n assert!(r.is_ok());\n\n let m = r.unwrap();\n\n assert!(m.is_present(\"o\"));\n\n assert_eq!(m.value_of(\"o\").unwrap(), \"default\");\n\n}\n\n\n", "file_path": "tests/opts.rs", "rank": 39, "score": 172855.25001348025 }, { "content": "#[test]\n\nfn flag_using_mixed() {\n\n let m = App::new(\"flag\")\n\n .args(&[\n\n Arg::from_usage(\"-f, --flag 'some flag'\"),\n\n Arg::from_usage(\"-c, --color 'some other flag'\")\n\n ])\n\n .get_matches_from(vec![\"\", \"-f\", \"--color\"]);\n\n assert!(m.is_present(\"flag\"));\n\n assert!(m.is_present(\"color\"));\n\n\n\n let m = App::new(\"flag\")\n\n .args(&[\n\n Arg::from_usage(\"-f, --flag 'some flag'\"),\n\n Arg::from_usage(\"-c, --color 'some other flag'\")\n\n ])\n\n .get_matches_from(vec![\"\", \"--flag\", \"-c\"]);\n\n assert!(m.is_present(\"flag\"));\n\n assert!(m.is_present(\"color\"));\n\n}\n\n\n", "file_path": "tests/flags.rs", "rank": 40, "score": 172852.14175879222 }, { "content": "#[test]\n\nfn short_flag_misspel() {\n\n let a = Arg::from_usage(\"-f1, --flag 'some flag'\");\n\n assert_eq!(a.name, \"flag\");\n\n assert_eq!(a.short.unwrap(), 'f');\n\n assert_eq!(a.long.unwrap(), \"flag\");\n\n assert_eq!(a.help.unwrap(), \"some flag\");\n\n assert!(!a.is_set(ArgSettings::Multiple));\n\n assert!(a.val_names.is_none());\n\n assert!(a.num_vals.is_none());\n\n}\n\n\n", "file_path": "tests/flags.rs", "rank": 41, "score": 172706.0777610832 }, { "content": "#[test]\n\nfn subcommand_long_help() {\n\n let m = test::complex_app()\n\n .get_matches_from_safe(vec![\"clap-test\", \"subcmd\", \"--help\"]);\n\n\n\n assert!(m.is_err());\n\n assert_eq!(m.unwrap_err().kind, ErrorKind::HelpDisplayed);\n\n}\n\n\n", "file_path": "tests/help.rs", "rank": 42, "score": 172458.59631021565 }, { "content": "#[test]\n\nfn subcommand_short_help() {\n\n let m = test::complex_app()\n\n .get_matches_from_safe(vec![\"clap-test\", \"subcmd\", \"-h\"]);\n\n\n\n assert!(m.is_err());\n\n assert_eq!(m.unwrap_err().kind, ErrorKind::HelpDisplayed);\n\n}\n\n\n", "file_path": "tests/help.rs", "rank": 43, "score": 172426.6796419677 }, { "content": "#[test]\n\nfn sc_long_flag_long_opt() {\n\n test::check_complex_output(\"clap-test subcmd value --flag --option some\", SCFOP);\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 44, "score": 172313.01657645294 }, { "content": "#[test]\n\nfn long_flag_long_opt_pos() {\n\n test::check_complex_output(\"clap-test value --flag --option some\", FOP);\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 45, "score": 172313.01657645294 }, { "content": "#[test]\n\nfn short_flag_short_opt_pos() {\n\n test::check_complex_output(\"clap-test value -f -o some\", FOP);\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 46, "score": 172273.34724153933 }, { "content": "#[test]\n\nfn arg_require_group_3() {\n\n let m = App::new(\"arg_require_group\")\n\n .arg(Arg::from_usage(\"-f, --flag 'some flag'\")\n\n .requires(\"gr\"))\n\n .group(ArgGroup::with_name(\"gr\")\n\n .arg(\"some\")\n\n .arg(\"other\"))\n\n .arg(Arg::from_usage(\"--some 'some arg'\"))\n\n .arg(Arg::from_usage(\"--other 'other arg'\"))\n\n .get_matches_from(vec![\"\", \"-f\", \"--other\"]);\n\n assert!(!m.is_present(\"some\"));\n\n assert!(m.is_present(\"other\"));\n\n assert!(m.is_present(\"flag\"));\n\n}\n\n\n\n// REQUIRED_UNLESS\n\n\n", "file_path": "tests/require.rs", "rank": 47, "score": 171698.64955251175 }, { "content": "#[test]\n\nfn arg_require_group() {\n\n let result = App::new(\"arg_require_group\")\n\n .arg(Arg::from_usage(\"-f, --flag 'some flag'\")\n\n .requires(\"gr\"))\n\n .group(ArgGroup::with_name(\"gr\")\n\n .arg(\"some\")\n\n .arg(\"other\"))\n\n .arg(Arg::from_usage(\"--some 'some arg'\"))\n\n .arg(Arg::from_usage(\"--other 'other arg'\"))\n\n .get_matches_from_safe(vec![\"\", \"-f\"]);\n\n assert!(result.is_err());\n\n let err = result.err().unwrap();\n\n assert_eq!(err.kind, ErrorKind::MissingRequiredArgument);\n\n}\n\n\n", "file_path": "tests/require.rs", "rank": 48, "score": 171698.64955251175 }, { "content": "#[test]\n\nfn arg_require_group_2() {\n\n let m = App::new(\"arg_require_group\")\n\n .arg(Arg::from_usage(\"-f, --flag 'some flag'\")\n\n .requires(\"gr\"))\n\n .group(ArgGroup::with_name(\"gr\")\n\n .arg(\"some\")\n\n .arg(\"other\"))\n\n .arg(Arg::from_usage(\"--some 'some arg'\"))\n\n .arg(Arg::from_usage(\"--other 'other arg'\"))\n\n .get_matches_from(vec![\"\", \"-f\", \"--some\"]);\n\n assert!(m.is_present(\"some\"));\n\n assert!(!m.is_present(\"other\"));\n\n assert!(m.is_present(\"flag\"));\n\n}\n\n\n", "file_path": "tests/require.rs", "rank": 49, "score": 171698.64955251175 }, { "content": "#[test]\n\nfn sc_short_flag_x2_comb_long_opt_eq_pos() {\n\n test::check_complex_output(\"clap-test subcmd value -ff --option=some\", SCF2OP);\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 50, "score": 169543.86600093317 }, { "content": "#[test]\n\nfn alias_help() {\n\n let m = App::new(\"myprog\")\n\n .subcommand(SubCommand::with_name(\"test\")\n\n .alias(\"do-stuff\"))\n\n .get_matches_from_safe(vec![\"myprog\", \"help\", \"do-stuff\"]);\n\n assert!(m.is_err());\n\n assert_eq!(m.unwrap_err().kind, ErrorKind::HelpDisplayed);\n\n}\n\n\n", "file_path": "tests/subcommands.rs", "rank": 51, "score": 168058.03448942862 }, { "content": "// These are just convenient traits to make the code easier to read.\n\ntrait ArgWithDisplay<'b, 'c>: AnyArg<'b, 'c> + Display {}\n\nimpl<'b, 'c, T> ArgWithDisplay<'b, 'c> for T where T: AnyArg<'b, 'c> + Display {}\n\n\n", "file_path": "src/app/help.rs", "rank": 52, "score": 167843.42134149245 }, { "content": "fn build_new_help(app: &App) -> String {\n\n let mut buf = Cursor::new(Vec::with_capacity(50));\n\n app.write_help(&mut buf).unwrap();\n\n let content = buf.into_inner();\n\n String::from_utf8(content).unwrap()\n\n}\n\n\n", "file_path": "tests/template_help.rs", "rank": 53, "score": 167806.91742283112 }, { "content": "#[test]\n\nfn long_flag_long_opt_eq_pos() {\n\n test::check_complex_output(\"clap-test value --flag --option=some\", FOP);\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 54, "score": 167014.10880976706 }, { "content": "#[test]\n\nfn sc_short_flag_short_opt_pos() {\n\n test::check_complex_output(\"clap-test subcmd value -f -o some\", SCFOP);\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 55, "score": 166975.57226439536 }, { "content": "#[test]\n\nfn default_values_user_value() {\n\n let r = App::new(\"df\")\n\n .arg( Arg::from_usage(\"-o [opt] 'some opt'\")\n\n .default_value(\"default\"))\n\n .get_matches_from_safe(vec![\"\", \"-o\", \"value\"]);\n\n assert!(r.is_ok());\n\n let m = r.unwrap();\n\n assert!(m.is_present(\"o\"));\n\n assert_eq!(m.value_of(\"o\").unwrap(), \"value\");\n\n}\n\n\n", "file_path": "tests/opts.rs", "rank": 56, "score": 166420.71188825008 }, { "content": "#[test]\n\nfn default_values_user_value() {\n\n let r = App::new(\"df\")\n\n .arg( Arg::from_usage(\"[arg] 'some arg'\")\n\n .default_value(\"default\"))\n\n .get_matches_from_safe(vec![\"\", \"value\"]);\n\n assert!(r.is_ok());\n\n let m = r.unwrap();\n\n assert!(m.is_present(\"arg\"));\n\n assert_eq!(m.value_of(\"arg\").unwrap(), \"value\");\n\n}\n\n\n", "file_path": "tests/positionals.rs", "rank": 57, "score": 166420.71188825008 }, { "content": "#[test]\n\nfn possible_values_of_option() {\n\n let m = App::new(\"possible_values\")\n\n .arg(Arg::with_name(\"option\")\n\n .short(\"-o\")\n\n .long(\"--option\")\n\n .takes_value(true)\n\n .possible_value(\"test123\"))\n\n .get_matches_from_safe(vec![\"myprog\", \"--option\", \"test123\"]);\n\n\n\n assert!(m.is_ok());\n\n let m = m.unwrap();\n\n\n\n assert!(m.is_present(\"option\"));\n\n assert_eq!(m.value_of(\"option\"), Some(\"test123\"));\n\n}\n\n\n", "file_path": "tests/possible_values.rs", "rank": 58, "score": 166219.72694520664 }, { "content": "#[test]\n\n#[should_panic]\n\nfn unique_arg_names() {\n\n App::new(\"some\").args(&[\n\n Arg::with_name(\"arg\").short(\"a\"),\n\n Arg::with_name(\"arg\").short(\"b\")\n\n ]);\n\n}\n\n\n", "file_path": "tests/unique_args.rs", "rank": 59, "score": 165125.132926404 }, { "content": "#[test]\n\n#[should_panic]\n\nfn unique_arg_longs() {\n\n App::new(\"some\").args(&[\n\n Arg::with_name(\"arg1\").long(\"long\"),\n\n Arg::with_name(\"arg2\").long(\"long\")\n\n ]);\n\n}\n", "file_path": "tests/unique_args.rs", "rank": 60, "score": 164990.02782014047 }, { "content": "#[test]\n\n#[should_panic]\n\nfn unique_arg_shorts() {\n\n App::new(\"some\").args(&[\n\n Arg::with_name(\"arg1\").short(\"a\"),\n\n Arg::with_name(\"arg2\").short(\"a\")\n\n ]);\n\n}\n\n\n", "file_path": "tests/unique_args.rs", "rank": 61, "score": 164959.55265361158 }, { "content": "#[test]\n\nfn sc_long_flag_x2_long_opt_pos() {\n\n test::check_complex_output(\"clap-test subcmd value --flag --flag --option some\", SCF2OP);\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 62, "score": 162074.9878391332 }, { "content": "#[test]\n\nfn sc_long_flag_long_opt_eq_pos() {\n\n test::check_complex_output(\"clap-test subcmd value --flag --option=some\", SCFOP);\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 63, "score": 162074.9878391332 }, { "content": "#[test]\n\nfn sc_short_flag_x2_short_opt_pos() {\n\n test::check_complex_output(\"clap-test subcmd value -f -f -o some\", SCF2OP);\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 64, "score": 162037.5211840223 }, { "content": "#[test]\n\nfn sc_short_flag_short_opt_eq_pos() {\n\n test::check_complex_output(\"clap-test subcmd value -f -o=some\", SCFOP);\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 65, "score": 162037.5211840223 }, { "content": "#[test]\n\nfn short_flag_x2_comb_short_opt_pos() {\n\n test::check_complex_output(\"clap-test value -ff -o some\", F2OP);\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 66, "score": 162037.5211840223 }, { "content": "#[test]\n\nfn opts_using_short() {\n\n let r = App::new(\"opts\")\n\n .args(&[\n\n Arg::from_usage(\"-f [flag] 'some flag'\"),\n\n Arg::from_usage(\"-c [color] 'some other flag'\")\n\n ])\n\n .get_matches_from_safe(vec![\"\", \"-f\", \"some\", \"-c\", \"other\"]);\n\n assert!(r.is_ok());\n\n let m = r.unwrap();\n\n assert!(m.is_present(\"f\"));\n\n assert_eq!(m.value_of(\"f\").unwrap(), \"some\");\n\n assert!(m.is_present(\"c\"));\n\n assert_eq!(m.value_of(\"c\").unwrap(), \"other\");\n\n}\n\n\n", "file_path": "tests/opts.rs", "rank": 67, "score": 161150.97992429638 }, { "content": "#[test]\n\nfn no_bin_name() {\n\n let result = App::new(\"arg_required\")\n\n .setting(AppSettings::NoBinaryName)\n\n .arg(Arg::with_name(\"test\")\n\n .required(true)\n\n .index(1))\n\n .get_matches_from_safe(vec![\"testing\"]);\n\n assert!(result.is_ok());\n\n let matches = result.unwrap();\n\n assert_eq!(matches.value_of(\"test\").unwrap(), \"testing\");\n\n}\n\n\n", "file_path": "tests/app_settings.rs", "rank": 68, "score": 161081.3832982084 }, { "content": "#[bench]\n\nfn build_app(b: &mut Bencher) {\n\n b.iter(|| App::new(\"claptests\"));\n\n}\n\n\n", "file_path": "benches/01_default.rs", "rank": 69, "score": 160656.24999083983 }, { "content": "#[bench]\n\nfn parse_clean(b: &mut Bencher) {\n\n b.iter(|| App::new(\"claptests\").get_matches_from(vec![\"\"]));\n\n}\n", "file_path": "benches/01_default.rs", "rank": 70, "score": 160656.24999083983 }, { "content": "#[test]\n\nfn unified_help() {\n\n let mut app = App::new(\"test\")\n\n .author(\"Kevin K.\")\n\n .about(\"tests stuff\")\n\n .version(\"1.3\")\n\n .setting(AppSettings::UnifiedHelpMessage)\n\n .args_from_usage(\"-f, --flag 'some flag'\n\n [arg1] 'some pos arg'\n\n --option [opt] 'some option'\");\n\n // We call a get_matches method to cause --help and --version to be built\n\n let _ = app.get_matches_from_safe_borrow(vec![\"\"]);\n\n\n\n // Now we check the output of print_help()\n\n let mut help = vec![];\n\n app.write_help(&mut help).expect(\"failed to print help\");\n\n assert_eq!(&*String::from_utf8_lossy(&*help), &*String::from(\"test 1.3\\n\\\n\nKevin K.\n\ntests stuff\n\n\n\nUSAGE:\n", "file_path": "tests/app_settings.rs", "rank": 71, "score": 160615.73122827313 }, { "content": "#[test]\n\nfn issue_702_multiple_values() {\n\n let app = App::new(\"myapp\")\n\n .version(\"1.0\")\n\n .author(\"foo\")\n\n .about(\"bar\")\n\n .arg(Arg::with_name(\"arg1\")\n\n .help(\"some option\"))\n\n .arg(Arg::with_name(\"arg2\")\n\n .multiple(true)\n\n .help(\"some option\"))\n\n .arg(Arg::with_name(\"some\")\n\n .help(\"some option\")\n\n .short(\"s\")\n\n .long(\"some\")\n\n .takes_value(true))\n\n .arg(Arg::with_name(\"other\")\n\n .help(\"some other option\")\n\n .short(\"o\")\n\n .long(\"other\")\n\n .takes_value(true))\n\n .arg(Arg::with_name(\"label\")\n\n .help(\"a label\")\n\n .short(\"l\")\n\n .long(\"label\")\n\n .multiple(true)\n\n .takes_value(true));\n\n test::check_err_output(app, \"myapp --help\", ISSUE_702, false);\n\n}", "file_path": "tests/help.rs", "rank": 72, "score": 160562.44973250286 }, { "content": "#[bench]\n\nfn parse_option(b: &mut Bencher) {\n\n b.iter(|| create_app!().get_matches_from(vec![\"myprog\", \"-o\", \"option1\"]));\n\n}\n\n\n", "file_path": "benches/02_simple.rs", "rank": 73, "score": 160455.2650477964 }, { "content": "#[bench]\n\nfn parse_option(b: &mut Bencher) {\n\n b.iter(|| create_app!().get_matches_from(vec![\"myprog\", \"-o\", \"option1\"]));\n\n}\n\n\n", "file_path": "benches/03_complex.rs", "rank": 74, "score": 160455.2650477964 }, { "content": "#[bench]\n\nfn parse_flag(b: &mut Bencher) {\n\n b.iter(|| create_app!().get_matches_from(vec![\"myprog\", \"-f\"]));\n\n}\n\n\n", "file_path": "benches/02_simple.rs", "rank": 75, "score": 160439.76515443265 }, { "content": "#[bench]\n\nfn parse_flag(b: &mut Bencher) {\n\n b.iter(|| create_app!().get_matches_from(vec![\"myprog\", \"-f\"]));\n\n}\n\n\n", "file_path": "benches/03_complex.rs", "rank": 76, "score": 160439.76515443265 }, { "content": "#[test]\n\nfn possible_values_of_option_multiple() {\n\n let m = App::new(\"possible_values\")\n\n .arg(Arg::with_name(\"option\")\n\n .short(\"-o\")\n\n .long(\"--option\")\n\n .takes_value(true)\n\n .possible_value(\"test123\")\n\n .possible_value(\"test321\")\n\n .multiple(true))\n\n .get_matches_from_safe(vec![\n\n \"\",\n\n \"--option\", \"test123\",\n\n \"--option\", \"test321\",\n\n ]);\n\n\n\n assert!(m.is_ok());\n\n let m = m.unwrap();\n\n\n\n assert!(m.is_present(\"option\"));\n\n assert_eq!(m.values_of(\"option\").unwrap().collect::<Vec<_>>(), vec![\"test123\", \"test321\"]);\n\n}\n\n\n", "file_path": "tests/possible_values.rs", "rank": 77, "score": 160396.20084215514 }, { "content": "#[test]\n\nfn multiple_values_of_option_exact_more() {\n\n let m = App::new(\"multiple_values\")\n\n .arg(Arg::with_name(\"option\")\n\n .short(\"o\")\n\n .help(\"multiple options\")\n\n .takes_value(true)\n\n .multiple(true)\n\n .number_of_values(3))\n\n .get_matches_from_safe(vec![\n\n \"\",\n\n \"-o\", \"val1\",\n\n \"-o\", \"val2\",\n\n \"-o\", \"val3\",\n\n \"-o\", \"val4\",\n\n ]);\n\n\n\n assert!(m.is_err());\n\n assert_eq!(m.unwrap_err().kind, ErrorKind::WrongNumberOfValues);\n\n}\n\n\n", "file_path": "tests/multiple_values.rs", "rank": 78, "score": 160396.20084215514 }, { "content": "#[test]\n\nfn multiple_values_of_option_mixed() {\n\n let m = App::new(\"multiple_values\")\n\n .arg(Arg::with_name(\"option\")\n\n .long(\"option\")\n\n .short(\"o\")\n\n .help(\"multiple options\")\n\n .takes_value(true)\n\n .multiple(true))\n\n .get_matches_from_safe(vec![\n\n \"\",\n\n \"-o\", \"val1\",\n\n \"--option\", \"val2\",\n\n \"--option\", \"val3\",\n\n \"-o\", \"val4\",\n\n ]);\n\n\n\n assert!(m.is_ok());\n\n let m = m.unwrap();\n\n\n\n assert!(m.is_present(\"option\"));\n\n assert_eq!(m.occurrences_of(\"option\"), 4);\n\n assert_eq!(m.values_of(\"option\").unwrap().collect::<Vec<_>>(), [\"val1\", \"val2\", \"val3\", \"val4\"]);\n\n}\n\n\n", "file_path": "tests/multiple_values.rs", "rank": 79, "score": 160396.20084215514 }, { "content": "#[test]\n\nfn multiple_values_of_option_max_more() {\n\n let m = App::new(\"multiple_values\")\n\n .arg(Arg::with_name(\"option\")\n\n .short(\"o\")\n\n .help(\"multiple options\")\n\n .takes_value(true)\n\n .multiple(true)\n\n .max_values(3))\n\n .get_matches_from_safe(vec![\n\n \"\",\n\n \"-o\", \"val1\",\n\n \"-o\", \"val2\",\n\n \"-o\", \"val3\",\n\n \"-o\", \"val4\",\n\n ]);\n\n\n\n assert!(m.is_err());\n\n assert_eq!(m.unwrap_err().kind, ErrorKind::TooManyValues);\n\n}\n\n\n", "file_path": "tests/multiple_values.rs", "rank": 80, "score": 160396.20084215514 }, { "content": "#[test]\n\nfn possible_values_of_option_fail() {\n\n let m = App::new(\"possible_values\")\n\n .arg(Arg::with_name(\"option\")\n\n .short(\"-o\")\n\n .long(\"--option\")\n\n .takes_value(true)\n\n .possible_value(\"test123\"))\n\n .get_matches_from_safe(vec![\"myprog\", \"--option\", \"notest\"]);\n\n\n\n assert!(m.is_err());\n\n assert_eq!(m.unwrap_err().kind, ErrorKind::InvalidValue);\n\n}\n\n\n", "file_path": "tests/possible_values.rs", "rank": 81, "score": 160396.20084215514 }, { "content": "#[bench]\n\nfn example4(b: &mut Bencher) {\n\n let app = app_example4();\n\n b.iter(|| build_help(&app));\n\n}\n\n\n", "file_path": "benches/04_new_help.rs", "rank": 82, "score": 160237.74207231446 }, { "content": "#[bench]\n\nfn example5(b: &mut Bencher) {\n\n let app = app_example5();\n\n b.iter(|| build_help(&app));\n\n}\n\n\n", "file_path": "benches/04_new_help.rs", "rank": 83, "score": 160237.74207231446 }, { "content": "#[bench]\n\nfn example3(b: &mut Bencher) {\n\n let app = app_example3();\n\n b.iter(|| build_help(&app));\n\n}\n\n\n", "file_path": "benches/04_new_help.rs", "rank": 84, "score": 160237.74207231446 }, { "content": "#[bench]\n\nfn example8(b: &mut Bencher) {\n\n let app = app_example8();\n\n b.iter(|| build_help(&app));\n\n}\n\n\n", "file_path": "benches/04_new_help.rs", "rank": 85, "score": 160237.74207231446 }, { "content": "#[bench]\n\nfn example2(b: &mut Bencher) {\n\n let app = app_example2();\n\n b.iter(|| build_help(&app));\n\n}\n\n\n", "file_path": "benches/04_new_help.rs", "rank": 86, "score": 160237.74207231446 }, { "content": "#[bench]\n\nfn example7(b: &mut Bencher) {\n\n let app = app_example7();\n\n b.iter(|| build_help(&app));\n\n}\n\n\n", "file_path": "benches/04_new_help.rs", "rank": 87, "score": 160237.74207231446 }, { "content": "#[bench]\n\nfn example10(b: &mut Bencher) {\n\n let app = app_example10();\n\n b.iter(|| build_help(&app));\n\n}\n\n\n", "file_path": "benches/04_new_help.rs", "rank": 88, "score": 160237.74207231446 }, { "content": "#[bench]\n\nfn example6(b: &mut Bencher) {\n\n let app = app_example6();\n\n b.iter(|| build_help(&app));\n\n}\n\n\n", "file_path": "benches/04_new_help.rs", "rank": 89, "score": 160237.74207231446 }, { "content": "#[bench]\n\nfn example1(b: &mut Bencher) {\n\n let app = app_example1();\n\n b.iter(|| build_help(&app));\n\n}\n\n\n", "file_path": "benches/04_new_help.rs", "rank": 90, "score": 160237.74207231446 }, { "content": "#[doc(hidden)]\n\npub trait AnyArg<'n, 'e> {\n\n fn name(&self) -> &'n str;\n\n fn overrides(&self) -> Option<&[&'e str]>;\n\n fn aliases(&self) -> Option<Vec<&'e str>>;\n\n fn requires(&self) -> Option<&[&'e str]>;\n\n fn blacklist(&self) -> Option<&[&'e str]>;\n\n fn required_unless(&self) -> Option<&[&'e str]>;\n\n fn is_set(&self, ArgSettings) -> bool;\n\n fn set(&mut self, ArgSettings);\n\n fn has_switch(&self) -> bool;\n\n fn max_vals(&self) -> Option<u64>;\n\n fn min_vals(&self) -> Option<u64>;\n\n fn num_vals(&self) -> Option<u64>;\n\n fn possible_vals(&self) -> Option<&[&'e str]>;\n\n fn validator(&self) -> Option<&Rc<Fn(String) -> Result<(), String>>>;\n\n fn short(&self) -> Option<char>;\n\n fn long(&self) -> Option<&'e str>;\n\n fn val_delim(&self) -> Option<char>;\n\n fn takes_value(&self) -> bool;\n\n fn val_names(&self) -> Option<&VecMap<&'e str>>;\n\n fn help(&self) -> Option<&'e str>;\n\n fn default_val(&self) -> Option<&'n str>;\n\n fn longest_filter(&self) -> bool;\n\n}\n\n\n", "file_path": "src/args/any_arg.rs", "rank": 91, "score": 158192.13384660776 }, { "content": "#[test]\n\nfn sc_long_flag_x2_long_opt_eq_pos() {\n\n test::check_complex_output(\"clap-test subcmd value --flag --flag --option=some\", SCF2OP);\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 92, "score": 157459.176404319 }, { "content": "#[test]\n\nfn sc_short_flag_x2_comb_short_opt_pos() {\n\n test::check_complex_output(\"clap-test subcmd value -ff -o some\", SCF2OP);\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 93, "score": 157422.7218374905 }, { "content": "#[test]\n\nfn sc_short_flag_x2_short_opt_eq_pos() {\n\n test::check_complex_output(\"clap-test subcmd value -f -f -o=some\", SCF2OP);\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 94, "score": 157422.7218374905 }, { "content": "#[inline]\n\nfn name_end(b: u8) -> bool {\n\n b != b']' && b != b'>'\n\n}\n\n\n", "file_path": "src/usage_parser.rs", "rank": 95, "score": 156551.73413202696 }, { "content": "#[inline]\n\nfn long_end(b: u8) -> bool {\n\n b != b'\\'' && b != b'.' && b != b'<' && b != b'[' && b != b'=' && b != b' '\n\n}\n\n\n", "file_path": "src/usage_parser.rs", "rank": 96, "score": 156422.46730295703 }, { "content": "#[inline]\n\nfn help_start(b: u8) -> bool {\n\n b != b'\\''\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use args::Arg;\n\n use args::ArgSettings;\n\n\n\n #[test]\n\n fn create_flag_usage() {\n\n let a = Arg::from_usage(\"[flag] -f 'some help info'\");\n\n assert_eq!(a.name, \"flag\");\n\n assert_eq!(a.short.unwrap(), 'f');\n\n assert!(a.long.is_none());\n\n assert_eq!(a.help.unwrap(), \"some help info\");\n\n assert!(!a.is_set(ArgSettings::Multiple));\n\n assert!(a.val_names.is_none());\n\n assert!(a.num_vals.is_none());\n\n\n", "file_path": "src/usage_parser.rs", "rank": 97, "score": 156126.32637165528 }, { "content": "#[bench]\n\nfn parse_sc_option(b: &mut Bencher) {\n\n b.iter(|| create_app!().get_matches_from(vec![\"myprog\", \"subcmd\", \"-o\", \"option1\"]));\n\n}\n\n\n", "file_path": "benches/03_complex.rs", "rank": 98, "score": 155956.9963828889 }, { "content": "#[bench]\n\nfn parse_sc_flag(b: &mut Bencher) {\n\n b.iter(|| create_app!().get_matches_from(vec![\"myprog\", \"subcmd\", \"-f\"]));\n\n}\n\n\n", "file_path": "benches/03_complex.rs", "rank": 99, "score": 155942.16628414975 } ]
Rust
src/rest_api/routes/consortium.rs
arsulegai/splinter-admin-ops-daemon
8511d0ce2505eb9bfe4bfa973a31bbf87b1f4bf4
use std::collections::HashMap; use actix_web::{client::Client, error, http::StatusCode, web, Error, HttpResponse}; use futures::{Future, IntoFuture}; use openssl::hash::{hash, MessageDigest}; use protobuf::Message; use splinter::admin::messages::{ AuthorizationType, CreateCircuit, DurabilityType, PersistenceType, RouteType, SplinterNode, SplinterService, }; use splinter::node_registry::Node; use splinter::protos::admin::{ CircuitManagementPayload, CircuitManagementPayload_Action as Action, CircuitManagementPayload_Header as Header, }; use uuid::Uuid; use crate::application_metadata::ApplicationMetadata; use crate::rest_api::{ConsortiumData, RestApiResponseError}; use super::{ get_response_paging_info, validate_limit, ErrorResponse, SuccessResponse, DEFAULT_LIMIT, DEFAULT_OFFSET, }; use db_models::models::Consortium; #[derive(Debug, Serialize, Deserialize)] pub struct CreateConsortiumForm { alias: String, members: Vec<String>, } pub fn propose_consortium( create_consortium: web::Json<CreateConsortiumForm>, node_info: web::Data<Node>, client: web::Data<Client>, splinterd_url: web::Data<String>, consortium_data: web::Data<ConsortiumData>, ) -> impl Future<Item = HttpResponse, Error = Error> { fetch_node_information(&create_consortium.members, &splinterd_url, client).then(move |resp| { let nodes = match resp { Ok(nodes) => nodes, Err(err) => match err { RestApiResponseError::BadRequest(message) => { return HttpResponse::BadRequest() .json(ErrorResponse::bad_request(&message.to_string())) .into_future(); } _ => { debug!("Failed to fetch node information: {}", err); return HttpResponse::InternalServerError() .json(ErrorResponse::internal_error()) .into_future(); } }, }; let mut members = nodes .iter() .map(|node| SplinterNode { node_id: node.identity.to_string(), endpoint: node .metadata .get("endpoint") .unwrap_or(&"".to_string()) .to_string(), }) .collect::<Vec<SplinterNode>>(); members.push(SplinterNode { node_id: node_info.identity.to_string(), endpoint: node_info .metadata .get("endpoint") .unwrap_or(&"".to_string()) .to_string(), }); let partial_circuit_id = members.iter().fold(String::new(), |mut acc, member| { acc.push_str(&format!("::{}", member.node_id)); acc }); let scabbard_admin_keys = vec![consortium_data.get_ref().public_key.clone()]; let mut scabbard_args = vec![]; scabbard_args.push(( "admin_keys".into(), match serde_json::to_string(&scabbard_admin_keys) { Ok(s) => s, Err(err) => { debug!("Failed to serialize scabbard admin keys: {}", err); return HttpResponse::InternalServerError() .json(ErrorResponse::internal_error()) .into_future(); } }, )); let mut roster = vec![]; for node in members.iter() { let peer_services = match serde_json::to_string( &members .iter() .filter_map(|other_node| { if other_node.node_id != node.node_id { Some(format!("consortium_{}", other_node.node_id)) } else { None } }) .collect::<Vec<_>>(), ) { Ok(s) => s, Err(err) => { debug!("Failed to serialize peer services: {}", err); return HttpResponse::InternalServerError() .json(ErrorResponse::internal_error()) .into_future(); } }; let mut service_args = scabbard_args.clone(); service_args.push(("peer_services".into(), peer_services)); roster.push(SplinterService { service_id: format!("consortium_{}", node.node_id), service_type: "scabbard".to_string(), allowed_nodes: vec![node.node_id.to_string()], arguments: service_args, }); } let application_metadata = match ApplicationMetadata::new(&create_consortium.alias, &scabbard_admin_keys) .to_bytes() { Ok(bytes) => bytes, Err(err) => { debug!("Failed to serialize application metadata: {}", err); return HttpResponse::InternalServerError() .json(ErrorResponse::internal_error()) .into_future(); } }; let create_request = CreateCircuit { circuit_id: format!( "consortium{}::{}", partial_circuit_id, Uuid::new_v4().to_string() ), roster, members, authorization_type: AuthorizationType::Trust, persistence: PersistenceType::Any, durability: DurabilityType::NoDurability, routes: RouteType::Any, circuit_management_type: "consortium".to_string(), application_metadata, }; let payload_bytes = match make_payload(create_request, node_info.identity.to_string()) { Ok(bytes) => bytes, Err(err) => { debug!("Failed to make circuit management payload: {}", err); return HttpResponse::InternalServerError() .json(ErrorResponse::internal_error()) .into_future(); } }; HttpResponse::Ok() .json(SuccessResponse::new(json!({ "payload_bytes": payload_bytes }))) .into_future() }) } fn fetch_node_information( node_ids: &[String], splinterd_url: &str, client: web::Data<Client>, ) -> Box<dyn Future<Item = Vec<Node>, Error = RestApiResponseError>> { let node_ids = node_ids.to_owned(); Box::new( client .get(&format!("{}/nodes?limit={}", splinterd_url, std::i64::MAX)) .send() .map_err(|err| { RestApiResponseError::InternalError(format!("Failed to send request {}", err)) }) .and_then(move |mut resp| { let body = resp.body().wait().map_err(|err| { RestApiResponseError::InternalError(format!( "Failed to receive response body {}", err )) })?; match resp.status() { StatusCode::OK => { let list_reponse: SuccessResponse<Vec<Node>> = serde_json::from_slice(&body).map_err(|err| { RestApiResponseError::InternalError(format!( "Failed to parse response body {}", err )) })?; let nodes = node_ids.into_iter().try_fold(vec![], |mut acc, node_id| { if let Some(node) = list_reponse .data .iter() .find(|node| node.identity == node_id) { acc.push(node.clone()); Ok(acc) } else { Err(RestApiResponseError::BadRequest(format!( "Could not find node with id {}", node_id ))) } })?; Ok(nodes) } StatusCode::BAD_REQUEST => { let message: String = serde_json::from_slice(&body).map_err(|err| { RestApiResponseError::InternalError(format!( "Failed to parse response body {}", err )) })?; Err(RestApiResponseError::BadRequest(message)) } _ => { let message: String = serde_json::from_slice(&body).map_err(|err| { RestApiResponseError::InternalError(format!( "Failed to parse response body {}", err )) })?; Err(RestApiResponseError::InternalError(message)) } } }), ) } fn make_payload( create_request: CreateCircuit, local_node: String, ) -> Result<Vec<u8>, RestApiResponseError> { let circuit_proto = create_request.into_proto()?; let circuit_bytes = circuit_proto.write_to_bytes()?; let hashed_bytes = hash(MessageDigest::sha512(), &circuit_bytes)?; let mut header = Header::new(); header.set_action(Action::CIRCUIT_CREATE_REQUEST); header.set_payload_sha512(hashed_bytes.to_vec()); header.set_requester_node_id(local_node); let header_bytes = header.write_to_bytes()?; let mut circuit_management_payload = CircuitManagementPayload::new(); circuit_management_payload.set_header(header_bytes); circuit_management_payload.set_circuit_create_request(circuit_proto); let payload_bytes = circuit_management_payload.write_to_bytes()?; Ok(payload_bytes) }
use std::collections::HashMap; use actix_web::{client::Client, error, http::StatusCode, web, Error, HttpResponse}; use futures::{Future, IntoFuture}; use openssl::hash::{hash, MessageDigest}; use protobuf::Message; use splinter::admin::messages::{ AuthorizationType, CreateCircuit, DurabilityType, PersistenceType, RouteType, SplinterNode, SplinterService, }; use splinter::node_registry::Node; use splinter::protos::admin::{ CircuitManagementPayload, CircuitManagementPayload_Action as Action, CircuitManagementPayload_Header as Header, }; use uuid::Uuid; use crate::application_metadata::ApplicationMetadata; use crate::rest_api::{ConsortiumData, RestApiResponseError}; use super::{ get_response_paging_info, validate_limit, ErrorResponse, SuccessResponse, DEFAULT_LIMIT, DEFAULT_OFFSET, }; use db_models::models::Consortium; #[derive(Debug, Serialize, Deserialize)] pub struct CreateConsortiumForm { alias: String, members: Vec<String>, } pub fn propose_consortium( create_consortium: web::Json<CreateConsortiumForm>, node_info: web::Data<Node>, client: web::Data<Client>, splinterd_url: web::Data<String>, consortium_data: web::Data<ConsortiumData>, ) -> impl Future<Item = HttpResponse, Error = Error> { fetch_node_information(&create_consortium.members, &splinterd_url, client).then(move |resp| { let nodes = match resp { Ok(nodes) => nodes, Err(err) => match err { RestApiResponseError::BadRequest(message) => { return HttpResponse::BadRequest() .json(ErrorResponse::bad_request(&message.to_string())) .into_future(); } _ => { debug!("Failed to fetch node information: {}", err); return HttpResponse::InternalServerError() .json(ErrorResponse::internal_error()) .into_future(); } }, }; let mut members = nodes .iter() .map(|node| SplinterNode { node_id: node.identity.to_string(), endpoint: node .metadata .get("endpoint") .unwrap_or(&"".to_string()) .to_string(), }) .collect::<Vec<SplinterNode>>(); members.push(SplinterNode { node_id: node_info.identity.to_string(), endpoint: node_info .metadata .get("endpoint") .unwrap_or(&"".to_string()) .to_string(), }); let partial_circuit_id = members.iter().fold(String::new(), |mut acc, member| { acc.push_str(&format!("::{}", member.node_id)); acc }); let scabbard_admin_keys = vec![consortium_data.get_ref().public_key.clone()]; let mut scabbard_args = vec![]; scabbard_args.push(( "admin_keys".into(), match serde_json::to_string(&scabbard_admin_keys) { Ok(s) => s, Err(err) => { debug!("Failed to serialize scabbard admin keys: {}", err); return HttpResponse::InternalServerError() .json(ErrorResponse::internal_error()) .into_future(); } }, )); let mut roster = vec![]; for node in members.iter() { let peer_services = match serde_json::to_string( &members .iter() .filter_map(|other_node| { if other_node.node_id != node.node_id { Some(format!("consortium_{}", other_node.node_id)) } else { None } }) .collect::<Vec<_>>(), ) { Ok(s) => s, Err(err) => { debug!("Failed to serialize peer services: {}", err); return HttpResponse::InternalServerError() .json(ErrorResponse::internal_error()) .into_future(); } }; let mut service_args = scabbard_args.clone(); service_args.push(("peer_services".into(), peer_services)); roster.push(SplinterService { service_id: format!("consortium_{}", node.node_id), service_type: "scabbard".to_string(), allowed_nodes: vec![node.node_id.to_string()], arguments: service_args, }); } let application_metadata = match ApplicationMetadata::new(&create_consortium.alias, &scabbard_admin_keys) .to_bytes() { Ok(bytes) => bytes, Err(err) => { debug!("Failed to serialize application metadata: {}", err); return HttpResponse::InternalServerError() .json(ErrorResponse::internal_error()) .into_future(); } }; let create_request = CreateCircuit { circuit_id: format!( "consortium{}::{}", partial_circuit_
fn fetch_node_information( node_ids: &[String], splinterd_url: &str, client: web::Data<Client>, ) -> Box<dyn Future<Item = Vec<Node>, Error = RestApiResponseError>> { let node_ids = node_ids.to_owned(); Box::new( client .get(&format!("{}/nodes?limit={}", splinterd_url, std::i64::MAX)) .send() .map_err(|err| { RestApiResponseError::InternalError(format!("Failed to send request {}", err)) }) .and_then(move |mut resp| { let body = resp.body().wait().map_err(|err| { RestApiResponseError::InternalError(format!( "Failed to receive response body {}", err )) })?; match resp.status() { StatusCode::OK => { let list_reponse: SuccessResponse<Vec<Node>> = serde_json::from_slice(&body).map_err(|err| { RestApiResponseError::InternalError(format!( "Failed to parse response body {}", err )) })?; let nodes = node_ids.into_iter().try_fold(vec![], |mut acc, node_id| { if let Some(node) = list_reponse .data .iter() .find(|node| node.identity == node_id) { acc.push(node.clone()); Ok(acc) } else { Err(RestApiResponseError::BadRequest(format!( "Could not find node with id {}", node_id ))) } })?; Ok(nodes) } StatusCode::BAD_REQUEST => { let message: String = serde_json::from_slice(&body).map_err(|err| { RestApiResponseError::InternalError(format!( "Failed to parse response body {}", err )) })?; Err(RestApiResponseError::BadRequest(message)) } _ => { let message: String = serde_json::from_slice(&body).map_err(|err| { RestApiResponseError::InternalError(format!( "Failed to parse response body {}", err )) })?; Err(RestApiResponseError::InternalError(message)) } } }), ) } fn make_payload( create_request: CreateCircuit, local_node: String, ) -> Result<Vec<u8>, RestApiResponseError> { let circuit_proto = create_request.into_proto()?; let circuit_bytes = circuit_proto.write_to_bytes()?; let hashed_bytes = hash(MessageDigest::sha512(), &circuit_bytes)?; let mut header = Header::new(); header.set_action(Action::CIRCUIT_CREATE_REQUEST); header.set_payload_sha512(hashed_bytes.to_vec()); header.set_requester_node_id(local_node); let header_bytes = header.write_to_bytes()?; let mut circuit_management_payload = CircuitManagementPayload::new(); circuit_management_payload.set_header(header_bytes); circuit_management_payload.set_circuit_create_request(circuit_proto); let payload_bytes = circuit_management_payload.write_to_bytes()?; Ok(payload_bytes) }
id, Uuid::new_v4().to_string() ), roster, members, authorization_type: AuthorizationType::Trust, persistence: PersistenceType::Any, durability: DurabilityType::NoDurability, routes: RouteType::Any, circuit_management_type: "consortium".to_string(), application_metadata, }; let payload_bytes = match make_payload(create_request, node_info.identity.to_string()) { Ok(bytes) => bytes, Err(err) => { debug!("Failed to make circuit management payload: {}", err); return HttpResponse::InternalServerError() .json(ErrorResponse::internal_error()) .into_future(); } }; HttpResponse::Ok() .json(SuccessResponse::new(json!({ "payload_bytes": payload_bytes }))) .into_future() }) }
function_block-function_prefixed
[ { "content": "pub fn fetch_node(\n\n identity: web::Path<String>,\n\n client: web::Data<Client>,\n\n splinterd_url: web::Data<String>,\n\n) -> impl Future<Item = HttpResponse, Error = Error> {\n\n client\n\n .get(&format!(\"{}/nodes/{}\", splinterd_url.get_ref(), identity))\n\n .send()\n\n .map_err(Error::from)\n\n .and_then(|mut resp| {\n\n let body = resp.body().wait()?;\n\n match resp.status() {\n\n StatusCode::OK => {\n\n let node: Node = serde_json::from_slice(&body)?;\n\n Ok(HttpResponse::Ok().json(SuccessResponse::new(node)))\n\n }\n\n StatusCode::NOT_FOUND => {\n\n let message: String = serde_json::from_slice(&body)?;\n\n Ok(HttpResponse::NotFound().json(ErrorResponse::not_found(&message)))\n\n }\n", "file_path": "src/rest_api/routes/node.rs", "rank": 1, "score": 116735.6839462439 }, { "content": "pub fn fetch_key_info(\n\n client: web::Data<Client>,\n\n splinterd_url: web::Data<String>,\n\n public_key: web::Path<String>,\n\n) -> impl Future<Item = HttpResponse, Error = Error> {\n\n let public_key = public_key.into_inner();\n\n client\n\n .get(format!(\"{}/keys/{}\", splinterd_url.get_ref(), public_key))\n\n .send()\n\n .map_err(Error::from)\n\n .and_then(move |mut resp| {\n\n let body = resp.body().wait()?;\n\n match resp.status() {\n\n StatusCode::OK => Ok(HttpResponse::Ok()\n\n .content_type(\"application/json\")\n\n .body(Body::Bytes(body))),\n\n StatusCode::NOT_FOUND => Ok(HttpResponse::NotFound().json(\n\n ErrorResponse::not_found(&format!(\n\n \"Could not find user information of key {}\",\n\n public_key\n", "file_path": "src/rest_api/routes/key.rs", "rank": 2, "score": 114248.65504583757 }, { "content": "pub fn get_node(splinterd_url: &str) -> Result<Node, GetNodeError> {\n\n let mut runtime = Runtime::new()\n\n .map_err(|err| GetNodeError(format!(\"Failed to get set up runtime: {}\", err)))?;\n\n let client = HyperClient::new();\n\n let splinterd_url = splinterd_url.to_owned();\n\n let uri = format!(\"{}/status\", splinterd_url)\n\n .parse::<Uri>()\n\n .map_err(|err| GetNodeError(format!(\"Failed to get set up request: {}\", err)))?;\n\n\n\n runtime.block_on(\n\n client\n\n .get(uri)\n\n .map_err(|err| {\n\n GetNodeError(format!(\n\n \"Failed to get splinter node metadata: {}\",\n\n err\n\n ))\n\n })\n\n .and_then(|resp| {\n\n if resp.status() != StatusCode::OK {\n", "file_path": "src/config.rs", "rank": 3, "score": 113252.40044849445 }, { "content": "// format for logs\n\npub fn log_format(\n\n w: &mut dyn std::io::Write,\n\n now: &mut DeferredNow,\n\n record: &Record,\n\n) -> Result<(), std::io::Error> {\n\n let level = record.level();\n\n write!(\n\n w,\n\n \"[{}] T[{:?}] {} [{}] {}\",\n\n now.now().format(\"%Y-%m-%d %H:%M:%S%.3f\"),\n\n thread::current().name().unwrap_or(\"<unnamed>\"),\n\n record.level(),\n\n record.module_path().unwrap_or(\"<unnamed>\"),\n\n style(level, &record.args()),\n\n )\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 5, "score": 97436.77863084772 }, { "content": "pub fn list_nodes(\n\n client: web::Data<Client>,\n\n splinterd_url: web::Data<String>,\n\n query: web::Query<HashMap<String, String>>,\n\n) -> impl Future<Item = HttpResponse, Error = Error> {\n\n let mut request_url = format!(\"{}/nodes\", splinterd_url.get_ref());\n\n\n\n let offset = query\n\n .get(\"offset\")\n\n .map(ToOwned::to_owned)\n\n .unwrap_or_else(|| DEFAULT_OFFSET.to_string());\n\n let limit = query\n\n .get(\"limit\")\n\n .map(ToOwned::to_owned)\n\n .unwrap_or_else(|| DEFAULT_LIMIT.to_string());\n\n\n\n request_url = format!(\"{}?offset={}&limit={}\", request_url, offset, limit);\n\n\n\n if let Some(filter) = query.get(\"filter\") {\n\n request_url = format!(\n", "file_path": "src/rest_api/routes/node.rs", "rank": 6, "score": 95636.87987293844 }, { "content": "fn parse_link(response_bytes: &[u8]) -> Result<String, RestApiResponseError> {\n\n let mut response_value: HashMap<String, String> = serde_json::from_slice(&response_bytes)\n\n .map_err(|err| {\n\n RestApiResponseError::InternalError(format!(\n\n \"Failed to parse batches_ids from splinterd response {}\",\n\n err\n\n ))\n\n })?;\n\n\n\n if let Some(link) = response_value.remove(\"link\") {\n\n Ok(link)\n\n } else {\n\n Err(RestApiResponseError::InternalError(\n\n \"The splinter daemon did not return a link for batch status\".to_string(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/rest_api/routes/submit.rs", "rank": 7, "score": 89537.30189317797 }, { "content": "fn handle_error(err: Box<dyn ActixError::ResponseError>) -> ActixError::Error {\n\n let message = err.to_string();\n\n ActixError::InternalError::from_response(\n\n err,\n\n HttpResponse::BadRequest().json(ErrorResponse::bad_request(&message)),\n\n )\n\n .into()\n\n}\n", "file_path": "src/rest_api/mod.rs", "rank": 8, "score": 86938.62828096864 }, { "content": "pub fn submit_scabbard_payload(\n\n client: web::Data<Client>,\n\n splinterd_url: web::Data<String>,\n\n circuit_id: web::Path<String>,\n\n node_info: web::Data<Node>,\n\n signed_payload: web::Bytes,\n\n query: web::Query<HashMap<String, String>>,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {\n\n let service_id = format!(\"consortium_{}\", node_info.identity);\n\n let wait = query\n\n .get(\"wait\")\n\n .map(|val| match val.as_ref() {\n\n \"false\" => 0,\n\n _ => val.parse().unwrap_or(DEFAULT_WAIT),\n\n })\n\n .unwrap_or_else(|| DEFAULT_WAIT);\n\n\n\n Box::new(\n\n client\n\n .post(format!(\n", "file_path": "src/rest_api/routes/submit.rs", "rank": 9, "score": 85990.4955919654 }, { "content": "#[derive(Debug, Serialize)]\n\nstruct ApiConsortiumMember {\n\n node_id: String,\n\n endpoint: String,\n\n}\n\n\n\nimpl ApiConsortiumMember {\n\n fn from(db_circuit_member: ConsortiumMember) -> Self {\n\n ApiConsortiumMember {\n\n node_id: db_circuit_member.node_id.to_string(),\n\n endpoint: db_circuit_member.endpoint.to_string(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/rest_api/routes/proposal.rs", "rank": 10, "score": 81002.50853405076 }, { "content": "pub fn get_response_paging_info(\n\n limit: usize,\n\n offset: usize,\n\n link: &str,\n\n query_count: usize,\n\n) -> Paging {\n\n let limit = validate_limit(limit);\n\n let offset = offset as i64;\n\n let query_count = query_count as i64;\n\n\n\n let base_link = format!(\"{}limit={}&\", link, limit);\n\n\n\n let current_link = format!(\"{}offset={}\", base_link, offset);\n\n\n\n let first_link = format!(\"{}offset=0\", base_link);\n\n\n\n let previous_offset = if offset > limit { offset - limit } else { 0 };\n\n let previous_link = format!(\"{}offset={}\", base_link, previous_offset);\n\n\n\n let last_offset = if query_count > 0 {\n", "file_path": "src/rest_api/routes/mod.rs", "rank": 11, "score": 79741.94270857229 }, { "content": "\n\n#[derive(Debug)]\n\npub enum ApplicationMetadataError {\n\n SerializationError(SerdeError),\n\n DeserializationError(SerdeError),\n\n}\n\n\n\nimpl Error for ApplicationMetadataError {\n\n fn source(&self) -> Option<&(dyn Error + 'static)> {\n\n match self {\n\n ApplicationMetadataError::SerializationError(err) => Some(err),\n\n ApplicationMetadataError::DeserializationError(err) => Some(err),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for ApplicationMetadataError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n ApplicationMetadataError::SerializationError(e) => {\n\n write!(f, \"Failed to serialize ApplicationMetadata: {}\", e)\n\n }\n\n ApplicationMetadataError::DeserializationError(e) => {\n\n write!(f, \"Failed to deserialize ApplicationMetadata: {}\", e)\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/application_metadata/error.rs", "rank": 12, "score": 75086.84213803372 }, { "content": "/*\n\n * Copyright 2019 Cargill Incorporated\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n\n\nuse serde_json::error::Error as SerdeError;\n\nuse std::error::Error;\n\nuse std::fmt;\n", "file_path": "src/application_metadata/error.rs", "rank": 13, "score": 75067.251360545 }, { "content": "fn run() -> Result<(), AdminOpDaemonError> {\n\n let matches = clap_app!(myapp =>\n\n (name: APP_NAME)\n\n (version: VERSION)\n\n (author: \"Walmart Inc.\")\n\n (about: \"Daemon Package for PO Administration\")\n\n (@arg verbose: -v +multiple \"Log verbosely\")\n\n (@arg bind: -b --bind +takes_value \"connection endpoint for Administration Service rest API\")\n\n (@arg splinterd_url: --(\"splinterd-url\") +takes_value \"connection endpoint to SplinterD rest API\")\n\n )\n\n .get_matches();\n\n\n\n let log_level = match matches.occurrences_of(\"verbose\") {\n\n 0 => log::LevelFilter::Warn,\n\n 1 => log::LevelFilter::Info,\n\n 2 => log::LevelFilter::Debug,\n\n _ => log::LevelFilter::Trace,\n\n };\n\n\n\n let mut log_spec_builder = LogSpecBuilder::new();\n", "file_path": "src/main.rs", "rank": 14, "score": 72607.44171728446 }, { "content": "pub fn run(\n\n bind_url: &str,\n\n splinterd_url: &str,\n\n node: Node,\n\n public_key: String,\n\n) -> Result<\n\n (\n\n RestApiShutdownHandle,\n\n thread::JoinHandle<Result<(), RestApiServerError>>,\n\n ),\n\n RestApiServerError,\n\n> {\n\n let bind_url = bind_url.to_owned();\n\n let splinterd_url = splinterd_url.to_owned();\n\n let consortium_data = ConsortiumData { public_key };\n\n let (tx, rx) = mpsc::channel();\n\n let join_handle = thread::Builder::new()\n\n .name(\"AdminDaemonService\".into())\n\n .spawn(move || {\n\n let sys = actix::System::new(\"DaemonService-REST-API\");\n", "file_path": "src/rest_api/mod.rs", "rank": 15, "score": 71058.02459082831 }, { "content": "pub fn proposal_vote(\n\n vote: web::Json<CircuitProposalVote>,\n\n node_info: web::Data<Node>,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {\n\n let node_identity = node_info.identity.to_string();\n\n Box::new(\n\n // TODO: If proposal exists\n\n web::block(move || Ok(())).then(|res| match res {\n\n Ok(()) => match make_payload(vote.into_inner(), node_identity) {\n\n Ok(bytes) => Ok(HttpResponse::Ok()\n\n .json(SuccessResponse::new(json!({ \"payload_bytes\": bytes })))),\n\n Err(err) => {\n\n debug!(\"Failed to prepare circuit management payload {}\", err);\n\n Ok(HttpResponse::InternalServerError().json(ErrorResponse::internal_error()))\n\n }\n\n },\n\n Err(err) => match err {\n\n error::BlockingError::Error(err) => {\n\n match err {\n\n RestApiResponseError::NotFound(err) => Ok(HttpResponse::NotFound()\n", "file_path": "src/rest_api/routes/proposal.rs", "rank": 16, "score": 67306.46043663377 }, { "content": "pub fn submit_signed_payload(\n\n client: web::Data<Client>,\n\n splinterd_url: web::Data<String>,\n\n signed_payload: web::Bytes,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {\n\n Box::new(\n\n client\n\n .post(format!(\"{}/admin/submit\", *splinterd_url))\n\n .send_body(Body::Bytes(signed_payload))\n\n .map_err(Error::from)\n\n .and_then(|mut resp| {\n\n let status = resp.status();\n\n let body = resp.body().wait()?;\n\n\n\n match status {\n\n StatusCode::ACCEPTED => Ok(HttpResponse::Accepted().json(\n\n SuccessResponse::new(\"The payload was submitted successfully\"),\n\n )),\n\n StatusCode::BAD_REQUEST => {\n\n let body_value: serde_json::Value = serde_json::from_slice(&body)?;\n", "file_path": "src/rest_api/routes/submit.rs", "rank": 17, "score": 65656.90998185951 }, { "content": "pub fn validate_limit(limit: usize) -> i64 {\n\n if limit > MAX_LIMIT {\n\n DEFAULT_LIMIT as i64\n\n } else {\n\n limit as i64\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const TEST_LINK: &str = \"/api/test?\";\n\n\n\n #[test]\n\n fn test_default_paging_response() {\n\n // Create paging response from default limit, default offset, a total of 1000\n\n let test_paging_response =\n\n get_response_paging_info(DEFAULT_LIMIT, DEFAULT_OFFSET, TEST_LINK, 1000);\n\n let generated_paging_response =\n", "file_path": "src/rest_api/routes/mod.rs", "rank": 18, "score": 65545.79281238293 }, { "content": "#[derive(Debug, Serialize)]\n\nstruct ApiConsortiumProposal {\n\n proposal_id: String,\n\n circuit_id: String,\n\n circuit_hash: String,\n\n members: Vec<ApiConsortiumMember>,\n\n requester: String,\n\n requester_node_id: String,\n\n created_time: u64,\n\n updated_time: u64,\n\n}\n\n\n\nimpl ApiConsortiumProposal {\n\n fn from(db_proposal: ConsortiumProposal, db_members: Vec<ConsortiumMember>) -> Self {\n\n ApiConsortiumProposal {\n\n proposal_id: db_proposal.id.to_string(),\n\n circuit_id: db_proposal.circuit_id.to_string(),\n\n circuit_hash: db_proposal.circuit_hash.to_string(),\n\n members: db_members\n\n .into_iter()\n\n .map(ApiConsortiumMember::from)\n", "file_path": "src/rest_api/routes/proposal.rs", "rank": 19, "score": 59817.2283399086 }, { "content": "fn process_failed_baches(invalid_batches: &[&BatchInfo]) -> String {\n\n if invalid_batches.is_empty() {\n\n \"\".to_string()\n\n } else if invalid_batches.len() == 1 {\n\n if let BatchStatus::Invalid(invalid_transactions) = &invalid_batches[0].status {\n\n if invalid_transactions.len() <= 1 {\n\n \"A transaction failed. Please try again. If it continues to fail contact your administrator for help.\".to_string()\n\n } else {\n\n \"Several transactions failed. Please try again. If it continues to fail contact your administrator for help.\".to_string()\n\n }\n\n } else {\n\n \"\".to_string()\n\n }\n\n } else {\n\n \"Several transactions failed. Please try again. If it continues to fail please contact your administrator.\".to_string()\n\n }\n\n}\n\n\n", "file_path": "src/rest_api/routes/submit.rs", "rank": 21, "score": 55693.188244253826 }, { "content": "\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct ApplicationMetadata {\n\n alias: String,\n\n scabbard_admin_keys: Vec<String>,\n\n}\n\n\n\nimpl ApplicationMetadata {\n\n pub fn new(alias: &str, scabbard_admin_keys: &[String]) -> ApplicationMetadata {\n\n ApplicationMetadata {\n\n alias: alias.to_string(),\n\n scabbard_admin_keys: scabbard_admin_keys.to_vec(),\n\n }\n\n }\n\n\n\n pub fn from_bytes(bytes: &[u8]) -> Result<ApplicationMetadata, ApplicationMetadataError> {\n\n serde_json::from_slice(bytes).map_err(ApplicationMetadataError::DeserializationError)\n\n }\n\n\n\n pub fn to_bytes(&self) -> Result<Vec<u8>, ApplicationMetadataError> {\n", "file_path": "src/application_metadata/mod.rs", "rank": 22, "score": 50532.933112610306 }, { "content": " serde_json::to_vec(self).map_err(ApplicationMetadataError::SerializationError)\n\n }\n\n\n\n pub fn alias(&self) -> &str {\n\n &self.alias\n\n }\n\n\n\n pub fn scabbard_admin_keys(&self) -> &[String] {\n\n &self.scabbard_admin_keys\n\n }\n\n}\n", "file_path": "src/application_metadata/mod.rs", "rank": 23, "score": 50525.30601469038 }, { "content": "/*\n\n * Copyright 2019 Cargill Incorporated\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n\n\nmod error;\n\n\n\npub use error::ApplicationMetadataError;\n", "file_path": "src/application_metadata/mod.rs", "rank": 24, "score": 50502.77230967584 }, { "content": "fn main() {\n\n if let Err(e) = run() {\n\n error!(\"{}\", e);\n\n std::process::exit(1);\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 25, "score": 40010.616028094686 }, { "content": "fn make_payload(\n\n vote: CircuitProposalVote,\n\n local_node: String,\n\n) -> Result<Vec<u8>, RestApiResponseError> {\n\n let vote_proto = vote.into_proto();\n\n let vote_bytes = vote_proto.write_to_bytes()?;\n\n let hashed_bytes = hash(MessageDigest::sha512(), &vote_bytes)?;\n\n\n\n let mut header = Header::new();\n\n header.set_action(Action::CIRCUIT_PROPOSAL_VOTE);\n\n header.set_payload_sha512(hashed_bytes.to_vec());\n\n header.set_requester_node_id(local_node);\n\n let header_bytes = header.write_to_bytes()?;\n\n\n\n let mut circuit_management_payload = CircuitManagementPayload::new();\n\n circuit_management_payload.set_header(header_bytes);\n\n circuit_management_payload.set_circuit_proposal_vote(vote_proto);\n\n let payload_bytes = circuit_management_payload.write_to_bytes()?;\n\n Ok(payload_bytes)\n\n}\n", "file_path": "src/rest_api/routes/proposal.rs", "rank": 26, "score": 35517.951954147764 }, { "content": "fn check_batch_status(\n\n client: web::Data<Client>,\n\n splinterd_url: &str,\n\n link: &str,\n\n start_time: Instant,\n\n wait: u64,\n\n) -> Box<dyn Future<Item = Vec<BatchInfo>, Error = RestApiResponseError>> {\n\n let splinterd_url = splinterd_url.to_owned();\n\n let link = link.to_owned();\n\n debug!(\"Checking batch status {}\", link);\n\n Box::new(\n\n client\n\n .get(format!(\"{}{}\", splinterd_url, link))\n\n .send()\n\n .map_err(|err| {\n\n RestApiResponseError::InternalError(format!(\"Failed to send request {}\", err))\n\n })\n\n .and_then(move |mut resp| {\n\n let body = match resp.body().wait() {\n\n Ok(b) => b,\n", "file_path": "src/rest_api/routes/submit.rs", "rank": 27, "score": 34630.011152025065 }, { "content": "impl From<ConfigurationError> for AdminOpDaemonError {\n\n fn from(err: ConfigurationError) -> Self {\n\n AdminOpDaemonError::ConfigurationError(Box::new(err))\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct GetNodeError(pub String);\n\n\n\nimpl Error for GetNodeError {\n\n fn source(&self) -> Option<&(dyn Error + 'static)> {\n\n None\n\n }\n\n}\n\n\n\nimpl fmt::Display for GetNodeError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\nimpl From<GetNodeError> for AdminOpDaemonError {\n\n fn from(err: GetNodeError) -> Self {\n\n AdminOpDaemonError::GetNodeError(err)\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 28, "score": 27338.31192682862 }, { "content": "use crate::rest_api::RestApiServerError;\n\n\n\n#[derive(Debug)]\n\npub enum AdminOpDaemonError {\n\n LoggingInitializationError(flexi_logger::FlexiLoggerError),\n\n ConfigurationError(Box<ConfigurationError>),\n\n RestApiError(RestApiServerError),\n\n KeyGenError(KeyGenError),\n\n GetNodeError(GetNodeError),\n\n}\n\n\n\nimpl Error for AdminOpDaemonError {\n\n fn source(&self) -> Option<&(dyn Error + 'static)> {\n\n match self {\n\n AdminOpDaemonError::LoggingInitializationError(err) => Some(err),\n\n AdminOpDaemonError::ConfigurationError(err) => Some(err),\n\n AdminOpDaemonError::RestApiError(err) => Some(err),\n\n AdminOpDaemonError::KeyGenError(err) => Some(err),\n\n AdminOpDaemonError::GetNodeError(err) => Some(err),\n\n }\n", "file_path": "src/error.rs", "rank": 29, "score": 27336.017159956147 }, { "content": " }\n\n}\n\n\n\nimpl fmt::Display for AdminOpDaemonError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n AdminOpDaemonError::LoggingInitializationError(e) => {\n\n write!(f, \"Logging initialization error: {}\", e)\n\n }\n\n AdminOpDaemonError::ConfigurationError(e) => write!(f, \"Coniguration error: {}\", e),\n\n AdminOpDaemonError::RestApiError(e) => write!(f, \"Rest API error: {}\", e),\n\n AdminOpDaemonError::KeyGenError(e) => write!(\n\n f,\n\n \"an error occurred while generating a new key pair: {}\",\n\n e\n\n ),\n\n AdminOpDaemonError::GetNodeError(e) => write!(\n\n f,\n\n \"an error occurred while getting splinterd node information: {}\",\n\n e\n", "file_path": "src/error.rs", "rank": 30, "score": 27333.71303861816 }, { "content": " ),\n\n }\n\n }\n\n}\n\n\n\nimpl From<flexi_logger::FlexiLoggerError> for AdminOpDaemonError {\n\n fn from(err: flexi_logger::FlexiLoggerError) -> AdminOpDaemonError {\n\n AdminOpDaemonError::LoggingInitializationError(err)\n\n }\n\n}\n\n\n\nimpl From<RestApiServerError> for AdminOpDaemonError {\n\n fn from(err: RestApiServerError) -> AdminOpDaemonError {\n\n AdminOpDaemonError::RestApiError(err)\n\n }\n\n}\n\n\n\nimpl From<KeyGenError> for AdminOpDaemonError {\n\n fn from(err: KeyGenError) -> AdminOpDaemonError {\n\n AdminOpDaemonError::KeyGenError(err)\n", "file_path": "src/error.rs", "rank": 31, "score": 27332.009682336608 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum ConfigurationError {\n\n MissingValue(String),\n\n}\n\n\n\nimpl Error for ConfigurationError {}\n\n\n\nimpl fmt::Display for ConfigurationError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n ConfigurationError::MissingValue(config_field_name) => {\n\n write!(f, \"Missing configuration for {}\", config_field_name)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/error.rs", "rank": 32, "score": 27330.30597559135 }, { "content": "// Copyright 2019 Cargill Incorporated\n\n// Copyright 2019 Walmart Inc.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse std::error::Error;\n\nuse std::fmt;\n\n\n\nuse sawtooth_sdk::signing::Error as KeyGenError;\n\n\n", "file_path": "src/error.rs", "rank": 33, "score": 27325.302478734673 }, { "content": "use splinter::admin::error::MarshallingError;\n\n\n\n#[derive(Debug)]\n\npub enum RestApiServerError {\n\n StdError(std::io::Error),\n\n StartUpError(String),\n\n}\n\n\n\nimpl Error for RestApiServerError {\n\n fn source(&self) -> Option<&(dyn Error + 'static)> {\n\n match self {\n\n RestApiServerError::StdError(err) => Some(err),\n\n RestApiServerError::StartUpError(_) => None,\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for RestApiServerError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n", "file_path": "src/rest_api/error.rs", "rank": 34, "score": 24587.6924731941 }, { "content": " }\n\n}\n\n\n\nimpl From<ProtobufError> for RestApiResponseError {\n\n fn from(err: ProtobufError) -> Self {\n\n RestApiResponseError::InternalError(err.to_string())\n\n }\n\n}\n\n\n\nimpl From<MarshallingError> for RestApiResponseError {\n\n fn from(err: MarshallingError) -> Self {\n\n RestApiResponseError::InternalError(err.to_string())\n\n }\n\n}\n", "file_path": "src/rest_api/error.rs", "rank": 35, "score": 24579.659663574603 }, { "content": " RestApiServerError::StdError(e) => write!(f, \"Std Error: {}\", e),\n\n RestApiServerError::StartUpError(e) => write!(f, \"Start-up Error: {}\", e),\n\n }\n\n }\n\n}\n\n\n\nimpl From<std::io::Error> for RestApiServerError {\n\n fn from(err: std::io::Error) -> RestApiServerError {\n\n RestApiServerError::StdError(err)\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum RestApiResponseError {\n\n DatabaseError(String),\n\n InternalError(String),\n\n Unauthorized,\n\n BadRequest(String),\n\n NotFound(String),\n\n}\n", "file_path": "src/rest_api/error.rs", "rank": 36, "score": 24579.549232603094 }, { "content": " RestApiResponseError::NotFound(e) => write!(f, \"Not Found: {}\", e),\n\n }\n\n }\n\n}\n\n\n\nimpl From<diesel::result::Error> for RestApiResponseError {\n\n fn from(err: diesel::result::Error) -> Self {\n\n RestApiResponseError::DatabaseError(err.to_string())\n\n }\n\n}\n\n\n\nimpl From<BcryptError> for RestApiResponseError {\n\n fn from(err: BcryptError) -> Self {\n\n RestApiResponseError::InternalError(err.to_string())\n\n }\n\n}\n\n\n\nimpl From<openssl::error::ErrorStack> for RestApiResponseError {\n\n fn from(err: openssl::error::ErrorStack) -> Self {\n\n RestApiResponseError::InternalError(err.to_string())\n", "file_path": "src/rest_api/error.rs", "rank": 37, "score": 24579.47859798315 }, { "content": "\n\nimpl Error for RestApiResponseError {\n\n fn source(&self) -> Option<&(dyn Error + 'static)> {\n\n match self {\n\n RestApiResponseError::DatabaseError(_) => None,\n\n RestApiResponseError::InternalError(_) => None,\n\n RestApiResponseError::Unauthorized => None,\n\n RestApiResponseError::BadRequest(_) => None,\n\n RestApiResponseError::NotFound(_) => None,\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for RestApiResponseError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n RestApiResponseError::DatabaseError(e) => write!(f, \"Database error: {}\", e),\n\n RestApiResponseError::InternalError(e) => write!(f, \"Internal error occurred: {}\", e),\n\n RestApiResponseError::Unauthorized => write!(f, \"Unauthorized\"),\n\n RestApiResponseError::BadRequest(e) => write!(f, \"Bad Request: {}\", e),\n", "file_path": "src/rest_api/error.rs", "rank": 38, "score": 24579.209858004004 }, { "content": "// Copyright 2019 Cargill Incorporated\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse bcrypt::BcryptError;\n\nuse diesel;\n\nuse std::error::Error;\n\nuse std::fmt;\n\n\n\nuse protobuf::error::ProtobufError;\n", "file_path": "src/rest_api/error.rs", "rank": 39, "score": 24575.28194846768 }, { "content": "// Copyright 2019 Cargill Incorporated\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse actix_web::{client::Client, dev::Body, http::StatusCode, web, Error, HttpResponse};\n\nuse futures::Future;\n\n\n\nuse super::ErrorResponse;\n\n\n", "file_path": "src/rest_api/routes/key.rs", "rank": 40, "score": 24143.51359013684 }, { "content": " )),\n\n )),\n\n StatusCode::BAD_REQUEST => {\n\n let body_value: serde_json::Value = serde_json::from_slice(&body)?;\n\n let message = match body_value.get(\"message\") {\n\n Some(value) => value.as_str().unwrap_or(\"Request was malformed.\"),\n\n None => \"Request malformed.\",\n\n };\n\n Ok(HttpResponse::BadRequest().json(ErrorResponse::bad_request(&message)))\n\n }\n\n _ => {\n\n debug!(\n\n \"Internal Server Error. Splinterd responded with error {}\",\n\n resp.status(),\n\n );\n\n Ok(HttpResponse::InternalServerError().json(ErrorResponse::internal_error()))\n\n }\n\n }\n\n })\n\n}\n", "file_path": "src/rest_api/routes/key.rs", "rank": 41, "score": 24141.27585624774 }, { "content": " test, web, App,\n\n };\n\n\n\n static SPLINTERD_URL: &str = \"http://splinterd-node:8085\";\n\n\n\n #[test]\n\n /// Tests a GET /nodes/{identity} request returns the expected node.\n\n fn test_fetch_node_ok() {\n\n let mut app = test::init_service(\n\n App::new()\n\n .data(Client::new())\n\n .data(SPLINTERD_URL.to_string())\n\n .service(web::resource(\"/nodes/{identity}\").route(web::get().to_async(fetch_node))),\n\n );\n\n\n\n let req = test::TestRequest::get()\n\n .uri(&format!(\"/nodes/{}\", get_node_1().identity))\n\n .to_request();\n\n\n\n let resp = test::call_service(&mut app, req);\n", "file_path": "src/rest_api/routes/node.rs", "rank": 53, "score": 23463.21438734652 }, { "content": " assert_eq!(resp.status(), StatusCode::OK);\n\n let nodes: SuccessResponse<Vec<Node>> =\n\n serde_json::from_slice(&test::read_body(resp)).unwrap();\n\n assert_eq!(nodes.data, vec![get_node_1(), get_node_2()]);\n\n assert_eq!(\n\n nodes.paging,\n\n Some(create_test_paging_response(0, 100, 0, 0, 0, 2, \"/nodes?\"))\n\n )\n\n }\n\n\n\n #[test]\n\n /// Tests a GET /nodes request with filters returns the expected node.\n\n fn test_list_node_with_filters_ok() {\n\n let mut app = test::init_service(\n\n App::new()\n\n .data(Client::new())\n\n .data(SPLINTERD_URL.to_string())\n\n .service(web::resource(\"/nodes\").route(web::get().to_async(list_nodes))),\n\n );\n\n\n", "file_path": "src/rest_api/routes/node.rs", "rank": 54, "score": 23462.262505315513 }, { "content": "\n\n #[test]\n\n /// Tests a GET /nodes request with invalid filter returns BadRequest response.\n\n fn test_list_node_with_filters_bad_request() {\n\n let mut app = test::init_service(\n\n App::new()\n\n .data(Client::new())\n\n .data(SPLINTERD_URL.to_string())\n\n .service(web::resource(\"/nodes\").route(web::get().to_async(list_nodes))),\n\n );\n\n\n\n let filter = utf8_percent_encode(\"{\\\"company\\\":[\\\"*\\\",\\\"Bitwise IO\\\"]}\", QUERY_ENCODE_SET)\n\n .to_string();\n\n\n\n let req = test::TestRequest::get()\n\n .uri(&format!(\"/nodes?filter={}\", filter))\n\n .header(header::CONTENT_TYPE, \"application/json\")\n\n .to_request();\n\n\n\n let resp = test::call_service(&mut app, req);\n", "file_path": "src/rest_api/routes/node.rs", "rank": 55, "score": 23462.17918793151 }, { "content": "\n\n let resp = test::call_service(&mut app, req);\n\n\n\n assert_eq!(resp.status(), StatusCode::NOT_FOUND);\n\n }\n\n\n\n #[test]\n\n /// Tests a GET /nodes request with no filters returns the expected nodes.\n\n fn test_list_node_ok() {\n\n let mut app = test::init_service(\n\n App::new()\n\n .data(Client::new())\n\n .data(SPLINTERD_URL.to_string())\n\n .service(web::resource(\"/nodes\").route(web::get().to_async(list_nodes))),\n\n );\n\n\n\n let req = test::TestRequest::get().uri(\"/nodes\").to_request();\n\n\n\n let resp = test::call_service(&mut app, req);\n\n\n", "file_path": "src/rest_api/routes/node.rs", "rank": 56, "score": 23462.159454941062 }, { "content": " \"{}&filter={}\",\n\n request_url,\n\n utf8_percent_encode(filter, QUERY_ENCODE_SET).to_string()\n\n );\n\n }\n\n\n\n client\n\n .get(&request_url)\n\n .send()\n\n .map_err(Error::from)\n\n .and_then(|mut resp| {\n\n let body = resp.body().wait()?;\n\n match resp.status() {\n\n StatusCode::OK => {\n\n let list_reponse: SuccessResponse<Vec<Node>> = serde_json::from_slice(&body)?;\n\n Ok(HttpResponse::Ok().json(list_reponse))\n\n }\n\n StatusCode::BAD_REQUEST => {\n\n let message: String = serde_json::from_slice(&body)?;\n\n Ok(HttpResponse::BadRequest().json(ErrorResponse::bad_request(&message)))\n", "file_path": "src/rest_api/routes/node.rs", "rank": 57, "score": 23462.06647515353 }, { "content": " }\n\n _ => {\n\n let message: String = serde_json::from_slice(&body)?;\n\n debug!(\n\n \"Internal Server Error. Splinterd responded with error {} message {}\",\n\n resp.status(),\n\n message\n\n );\n\n Ok(HttpResponse::InternalServerError().json(ErrorResponse::internal_error()))\n\n }\n\n }\n\n })\n\n}\n\n\n\n#[cfg(all(feature = \"test-node-endpoint\", test))]\n\nmod test {\n\n use super::*;\n\n use crate::rest_api::routes::Paging;\n\n use actix_web::{\n\n http::{header, StatusCode},\n", "file_path": "src/rest_api/routes/node.rs", "rank": 58, "score": 23461.514251992834 }, { "content": "\n\n assert_eq!(resp.status(), StatusCode::OK);\n\n let response: SuccessResponse<Node> =\n\n serde_json::from_slice(&test::read_body(resp)).unwrap();\n\n assert_eq!(response.data, get_node_1())\n\n }\n\n\n\n #[test]\n\n /// Tests a GET /nodes/{identity} request returns NotFound when an invalid identity is passed\n\n fn test_fetch_node_not_found() {\n\n let mut app = test::init_service(\n\n App::new()\n\n .data(Client::new())\n\n .data(SPLINTERD_URL.to_string())\n\n .service(web::resource(\"/nodes/{identity}\").route(web::get().to_async(fetch_node))),\n\n );\n\n\n\n let req = test::TestRequest::get()\n\n .uri(\"/nodes/Node-not-valid\")\n\n .to_request();\n", "file_path": "src/rest_api/routes/node.rs", "rank": 59, "score": 23461.48138749946 }, { "content": " let filter = utf8_percent_encode(\"{\\\"company\\\":[\\\"=\\\",\\\"Bitwise IO\\\"]}\", QUERY_ENCODE_SET)\n\n .to_string();\n\n\n\n let req = test::TestRequest::get()\n\n .uri(&format!(\"/nodes?filter={}\", filter))\n\n .header(header::CONTENT_TYPE, \"application/json\")\n\n .to_request();\n\n\n\n let resp = test::call_service(&mut app, req);\n\n\n\n assert_eq!(resp.status(), StatusCode::OK);\n\n let nodes: SuccessResponse<Vec<Node>> =\n\n serde_json::from_slice(&test::read_body(resp)).unwrap();\n\n assert_eq!(nodes.data, vec![get_node_1()]);\n\n let link = format!(\"/nodes?filter={}&\", filter);\n\n assert_eq!(\n\n nodes.paging,\n\n Some(create_test_paging_response(0, 100, 0, 0, 0, 1, &link))\n\n )\n\n }\n", "file_path": "src/rest_api/routes/node.rs", "rank": 60, "score": 23460.859954685977 }, { "content": "\n\n assert_eq!(resp.status(), StatusCode::BAD_REQUEST);\n\n }\n\n\n\n fn get_node_1() -> Node {\n\n let mut metadata = HashMap::new();\n\n metadata.insert(\"url\".to_string(), \"127.0.0.1:8080\".to_string());\n\n metadata.insert(\"company\".to_string(), \"Bitwise IO\".to_string());\n\n Node {\n\n identity: \"Node-123\".to_string(),\n\n metadata,\n\n }\n\n }\n\n\n\n fn get_node_2() -> Node {\n\n let mut metadata = HashMap::new();\n\n metadata.insert(\"url\".to_string(), \"127.0.0.1:8082\".to_string());\n\n metadata.insert(\"company\".to_string(), \"Cargill\".to_string());\n\n Node {\n\n identity: \"Node-456\".to_string(),\n", "file_path": "src/rest_api/routes/node.rs", "rank": 61, "score": 23458.15988851254 }, { "content": "// Copyright 2019 Cargill Incorporated\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse actix_web::{client::Client, http::StatusCode, web, Error, HttpResponse};\n\nuse futures::Future;\n\nuse percent_encoding::utf8_percent_encode;\n\nuse splinter::node_registry::Node;\n\nuse std::collections::HashMap;\n\n\n\nuse super::{ErrorResponse, SuccessResponse, DEFAULT_LIMIT, DEFAULT_OFFSET, QUERY_ENCODE_SET};\n\n\n", "file_path": "src/rest_api/routes/node.rs", "rank": 62, "score": 23456.721205681257 }, { "content": " _ => {\n\n let message: String = serde_json::from_slice(&body)?;\n\n debug!(\n\n \"Internal Server Error. Splinterd responded with error {} message {}\",\n\n resp.status(),\n\n message\n\n );\n\n Ok(HttpResponse::InternalServerError().json(ErrorResponse::internal_error()))\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/rest_api/routes/node.rs", "rank": 63, "score": 23452.983223956417 }, { "content": " metadata,\n\n }\n\n }\n\n\n\n fn create_test_paging_response(\n\n offset: usize,\n\n limit: usize,\n\n next_offset: usize,\n\n previous_offset: usize,\n\n last_offset: usize,\n\n total: usize,\n\n link: &str,\n\n ) -> Paging {\n\n let base_link = format!(\"{}limit={}&\", link, limit);\n\n let current_link = format!(\"{}offset={}\", base_link, offset);\n\n let first_link = format!(\"{}offset=0\", base_link);\n\n let next_link = format!(\"{}offset={}\", base_link, next_offset);\n\n let previous_link = format!(\"{}offset={}\", base_link, previous_offset);\n\n let last_link = format!(\"{}offset={}\", base_link, last_offset);\n\n\n", "file_path": "src/rest_api/routes/node.rs", "rank": 64, "score": 23444.93728534839 }, { "content": " Paging {\n\n current: current_link,\n\n offset,\n\n limit,\n\n total,\n\n first: first_link,\n\n prev: previous_link,\n\n next: next_link,\n\n last: last_link,\n\n }\n\n }\n\n}\n", "file_path": "src/rest_api/routes/node.rs", "rank": 65, "score": 23443.332794504808 }, { "content": " Future, Stream,\n\n};\n\nuse hyper::{Client as HyperClient, StatusCode, Uri};\n\nuse serde_json::Value;\n\nuse splinter::node_registry::Node;\n\nuse tokio::runtime::Runtime;\n\n\n\nuse crate::error::{ConfigurationError, GetNodeError};\n\n\n\n#[derive(Debug)]\n\npub struct ConsortiumConfig {\n\n rest_api_endpoint: String,\n\n splinterd_url: String,\n\n}\n\n\n\nimpl ConsortiumConfig {\n\n pub fn rest_api_endpoint(&self) -> &str {\n\n &self.rest_api_endpoint\n\n }\n\n\n", "file_path": "src/config.rs", "rank": 66, "score": 23.152600076901336 }, { "content": "use actix_web::{\n\n client::Client, error as ActixError, web, App, FromRequest, HttpResponse, HttpServer, Result,\n\n};\n\nuse futures::future::Future;\n\nuse splinter::node_registry::Node;\n\n\n\npub use error::{RestApiResponseError, RestApiServerError};\n\nuse routes::ErrorResponse;\n\n\n\n#[derive(Clone)]\n\npub struct ConsortiumData {\n\n pub public_key: String,\n\n}\n\n\n\npub struct RestApiShutdownHandle {\n\n do_shutdown: Box<dyn Fn() -> Result<(), RestApiServerError> + Send>,\n\n}\n\n\n\nimpl RestApiShutdownHandle {\n\n pub fn shutdown(&self) -> Result<(), RestApiServerError> {\n\n (*self.do_shutdown)()\n\n }\n\n}\n\n\n", "file_path": "src/rest_api/mod.rs", "rank": 67, "score": 22.93397470779039 }, { "content": "use protobuf::Message;\n\nuse splinter::admin::messages::CircuitProposalVote;\n\nuse splinter::node_registry::Node;\n\nuse splinter::protos::admin::{\n\n CircuitManagementPayload, CircuitManagementPayload_Action as Action,\n\n CircuitManagementPayload_Header as Header,\n\n};\n\n\n\nuse super::{\n\n get_response_paging_info, validate_limit, ErrorResponse, SuccessResponse, DEFAULT_LIMIT,\n\n DEFAULT_OFFSET,\n\n};\n\nuse crate::rest_api::RestApiResponseError;\n\nuse db_models::models::{ConsortiumProposal, ConsortiumMember};\n\n\n\n#[derive(Debug, Serialize)]\n", "file_path": "src/rest_api/routes/proposal.rs", "rank": 68, "score": 21.853950963837782 }, { "content": " \"{}/scabbard/{}/{}/batches\",\n\n *splinterd_url, &circuit_id, &service_id\n\n ))\n\n .send_body(Body::Bytes(signed_payload))\n\n .map_err(|err| {\n\n RestApiResponseError::InternalError(format!(\"Failed to send request {}\", err))\n\n })\n\n .and_then(|mut resp| {\n\n let status = resp.status();\n\n let body = resp.body().wait().map_err(|err| {\n\n RestApiResponseError::InternalError(format!(\n\n \"Failed to receive response body {}\",\n\n err\n\n ))\n\n })?;\n\n\n\n match status {\n\n StatusCode::ACCEPTED => {\n\n let link = match parse_link(&body) {\n\n Ok(value) => value,\n", "file_path": "src/rest_api/routes/submit.rs", "rank": 69, "score": 18.869953460084094 }, { "content": " .add(b'!')\n\n .add(b'{')\n\n .add(b'}')\n\n .add(b'[')\n\n .add(b']')\n\n .add(b':')\n\n .add(b',');\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct ErrorResponse<T: Serialize> {\n\n code: String,\n\n message: String,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n data: Option<T>,\n\n}\n\n\n\nimpl ErrorResponse<String> {\n\n pub fn internal_error() -> ErrorResponse<String> {\n\n ErrorResponse {\n\n code: \"500\".to_string(),\n", "file_path": "src/rest_api/routes/mod.rs", "rank": 70, "score": 17.986872849824874 }, { "content": " err\n\n ))\n\n })?;\n\n\n\n let node_id = match node_status.get(\"node_id\") {\n\n Some(node_id_val) => node_id_val.as_str().unwrap_or(\"\").to_string(),\n\n None => \"\".to_string(),\n\n };\n\n\n\n Ok(node_id)\n\n })\n\n .and_then(move |node_id| {\n\n let uri = match format!(\"{}/nodes/{}\", splinterd_url, node_id).parse::<Uri>() {\n\n Ok(uri) => uri,\n\n Err(err) => return\n\n Either::A(\n\n future::err(GetNodeError(format!(\n\n \"Failed to get set up request : {}\",\n\n err\n\n ))))\n", "file_path": "src/config.rs", "rank": 71, "score": 17.329032329021214 }, { "content": " pub fn with_cli_args(&mut self, matches: &clap::ArgMatches<'_>) -> Self {\n\n Self {\n\n rest_api_endpoint: matches\n\n .value_of(\"bind\")\n\n .map(ToOwned::to_owned)\n\n .or_else(|| self.rest_api_endpoint.take()),\n\n\n\n splinterd_url: matches\n\n .value_of(\"splinterd_url\")\n\n .map(ToOwned::to_owned)\n\n .or_else(|| self.splinterd_url.take()),\n\n }\n\n }\n\n\n\n pub fn build(mut self) -> Result<ConsortiumConfig, ConfigurationError> {\n\n Ok(ConsortiumConfig {\n\n rest_api_endpoint: self\n\n .rest_api_endpoint\n\n .take()\n\n .ok_or_else(|| ConfigurationError::MissingValue(\"rest_api_endpoint\".to_owned()))?,\n\n splinterd_url: self\n\n .splinterd_url\n\n .take()\n\n .ok_or_else(|| ConfigurationError::MissingValue(\"splinterd_url\".to_owned()))?,\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 72, "score": 17.188926782760173 }, { "content": "pub use consortium::*;\n\npub use key::*;\n\npub use node::*;\n\npub use proposal::*;\n\npub use submit::*;\n\n\n\nuse percent_encoding::{AsciiSet, CONTROLS};\n\nuse serde::{Deserialize, Serialize};\n\n\n\npub const DEFAULT_LIMIT: usize = 100;\n\npub const DEFAULT_OFFSET: usize = 0;\n\nconst MAX_LIMIT: usize = 1000;\n\n\n\nconst QUERY_ENCODE_SET: &AsciiSet = &CONTROLS\n\n .add(b' ')\n\n .add(b'\"')\n\n .add(b'<')\n\n .add(b'>')\n\n .add(b'`')\n\n .add(b'=')\n", "file_path": "src/rest_api/routes/mod.rs", "rank": 73, "score": 16.54915941281266 }, { "content": " return Err(GetNodeError(format!(\n\n \"Failed to get splinter node metadata. Splinterd responded with status {}\",\n\n resp.status()\n\n )));\n\n }\n\n let body = resp\n\n .into_body()\n\n .concat2()\n\n .wait()\n\n .map_err(|err| {\n\n GetNodeError(format!(\n\n \"Failed to get splinter node metadata: {}\",\n\n err\n\n ))\n\n })?\n\n .to_vec();\n\n\n\n let node_status: Value = serde_json::from_slice(&body).map_err(|err| {\n\n GetNodeError(format!(\n\n \"Failed to get splinter node metadata: {}\",\n", "file_path": "src/config.rs", "rank": 74, "score": 16.4920058730486 }, { "content": " err\n\n ))\n\n })?\n\n .to_vec();\n\n\n\n match status {\n\n StatusCode::OK => {\n\n let node: Node = serde_json::from_slice(&body).map_err(|err| {\n\n GetNodeError(format!(\n\n \"Failed to get splinter node: {}\",\n\n err\n\n ))\n\n })?;\n\n\n\n Ok(node)\n\n }\n\n _ => Err(GetNodeError(format!(\n\n \"Failed to get splinter node data. Splinterd responded with status {}\",\n\n status\n\n ))),\n\n }\n\n }))\n\n }),\n\n )\n\n}\n", "file_path": "src/config.rs", "rank": 75, "score": 15.736628562580128 }, { "content": " };\n\n\n\n Either::B(client\n\n .get(uri)\n\n .map_err(|err| {\n\n GetNodeError(format!(\n\n \"Failed to get splinter node: {}\",\n\n err\n\n ))\n\n })\n\n .then(|resp| {\n\n let response = resp?;\n\n let status = response.status();\n\n let body = response\n\n .into_body()\n\n .concat2()\n\n .wait()\n\n .map_err(|err| {\n\n GetNodeError(format!(\n\n \"Failed to get splinter node metadata: {}\",\n", "file_path": "src/config.rs", "rank": 76, "score": 15.644729877505304 }, { "content": " _ => {\n\n let body_value: serde_json::Value = serde_json::from_slice(&body).map_err(|err| {\n\n RestApiResponseError::InternalError(format!(\n\n \"Failed to parse response body {}\",\n\n err\n\n ))\n\n })?;\n\n let message = match body_value.get(\"message\") {\n\n Some(value) => value.as_str().unwrap_or(\"Unknown cause\"),\n\n None => \"Unknown cause\",\n\n };\n\n debug!(\n\n \"Internal Server Error. Gameroom service responded with an error {} with message {}\",\n\n resp.status(),\n\n message\n\n );\n\n Err(RestApiResponseError::InternalError(message.to_string()))\n\n }\n\n }\n\n }).then(move |resp| match resp {\n", "file_path": "src/rest_api/routes/submit.rs", "rank": 77, "score": 15.572843955746501 }, { "content": "\n\n pub fn unauthorized(message: &str) -> ErrorResponse<String> {\n\n ErrorResponse {\n\n code: \"401\".to_string(),\n\n message: message.to_string(),\n\n data: None,\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Serialize> ErrorResponse<T> {\n\n pub fn bad_request_with_data(message: &str, data: T) -> ErrorResponse<T> {\n\n ErrorResponse {\n\n code: \"400\".to_string(),\n\n message: message.to_string(),\n\n data: Some(data),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/rest_api/routes/mod.rs", "rank": 78, "score": 14.802566030979872 }, { "content": "\n\n let addr = HttpServer::new(move || {\n\n App::new()\n\n .data(Client::new())\n\n .data(splinterd_url.to_owned())\n\n .data(node.clone())\n\n .data(consortium_data.clone())\n\n .data(\n\n // change path extractor configuration\n\n web::Path::<String>::configure(|cfg| {\n\n // <- create custom error response\n\n cfg.error_handler(|err, _| handle_error(Box::new(err)))\n\n }),\n\n )\n\n .data(\n\n // change json extractor configuration\n\n web::Json::<String>::configure(|cfg| {\n\n // <- create custom error response\n\n cfg.error_handler(|err, _| handle_error(Box::new(err)))\n\n }),\n", "file_path": "src/rest_api/mod.rs", "rank": 79, "score": 14.801173984901034 }, { "content": " )\n\n .service(\n\n web::scope(\"/circuits\")\n\n .service(\n\n web::scope(\"/{circuit_id}\")\n\n .service(web::resource(\"/batches\").route(\n\n web::post().to_async(routes::submit_scabbard_payload),\n\n )),\n\n ),\n\n )\n\n .service(\n\n web::scope(\"/keys\").service(\n\n web::resource(\"/{public_key}\")\n\n .route(web::get().to_async(routes::fetch_key_info)),\n\n ),\n\n )\n\n })\n\n .bind(bind_url)?\n\n .disable_signals()\n\n .system_exit()\n", "file_path": "src/rest_api/mod.rs", "rank": 80, "score": 14.386411107770398 }, { "content": " future,\n\n future::{Either, IntoFuture},\n\n Future,\n\n};\n\nuse splinter::node_registry::Node;\n\nuse splinter::service::scabbard::{BatchInfo, BatchStatus};\n\n\n\nuse super::{ErrorResponse, SuccessResponse};\n\n\n\nuse crate::rest_api::RestApiResponseError;\n\n\n\nconst DEFAULT_WAIT: u64 = 30; // default wait time in seconds for batch to be commited\n\n\n", "file_path": "src/rest_api/routes/submit.rs", "rank": 81, "score": 14.239687918788256 }, { "content": " pub fn splinterd_url(&self) -> &str {\n\n &self.splinterd_url\n\n }\n\n}\n\n\n\npub struct ConsortiumConfigBuilder {\n\n rest_api_endpoint: Option<String>,\n\n splinterd_url: Option<String>,\n\n}\n\n\n\nimpl Default for ConsortiumConfigBuilder {\n\n fn default() -> Self {\n\n Self {\n\n rest_api_endpoint: Some(\"127.0.0.1:8000\".to_owned()),\n\n splinterd_url: Some(\"http://127.0.0.1:8080\".to_owned()),\n\n }\n\n }\n\n}\n\n\n\nimpl ConsortiumConfigBuilder {\n", "file_path": "src/config.rs", "rank": 82, "score": 14.167484191851777 }, { "content": " )\n\n .service(\n\n web::resource(\"/nodes/{identity}\")\n\n .route(web::get().to_async(routes::fetch_node)),\n\n )\n\n .service(web::resource(\"/nodes\").route(web::get().to_async(routes::list_nodes)))\n\n .service(\n\n web::resource(\"/circuits/propose\")\n\n .route(web::post().to_async(routes::propose_consortium)),\n\n )\n\n .service(\n\n web::scope(\"/proposals\")\n\n .service(\n\n web::resource(\"/{proposal_id}/vote\")\n\n .route(web::post().to_async(routes::proposal_vote)),\n\n )\n\n )\n\n .service(\n\n web::resource(\"/submit\")\n\n .route(web::post().to_async(routes::submit_signed_payload)),\n", "file_path": "src/rest_api/mod.rs", "rank": 83, "score": 13.466087107077069 }, { "content": " Err(err) => {\n\n return Either::B(future::err(RestApiResponseError::InternalError(\n\n format!(\"Failed to receive response body {}\", err),\n\n )))\n\n }\n\n };\n\n match resp.status() {\n\n StatusCode::OK => {\n\n let batches_info: Vec<BatchInfo> = match serde_json::from_slice(&body) {\n\n Ok(b) => b,\n\n Err(err) => {\n\n return Either::B(future::err(RestApiResponseError::InternalError(\n\n format!(\"Failed to parse response body {}\", err),\n\n )))\n\n }\n\n };\n\n\n\n // If batch status is still pending and the wait time has not yet passed,\n\n // send request again to re-check the batch status\n\n if batches_info\n", "file_path": "src/rest_api/routes/submit.rs", "rank": 84, "score": 13.440848351841737 }, { "content": " Ok(link) => {\n\n let start = Instant::now();\n\n Either::A(check_batch_status(client, &splinterd_url, &link, start, wait).then(|resp| {\n\n match resp {\n\n Ok(batches_info) => {\n\n let invalid_batches = batches_info.iter().filter(|batch| {\n\n if let BatchStatus::Invalid(_) = batch.status {\n\n return true\n\n }\n\n false\n\n }).collect::<Vec<&BatchInfo>>();\n\n if !invalid_batches.is_empty() {\n\n let error_message = process_failed_baches(&invalid_batches);\n\n return Ok(HttpResponse::BadRequest()\n\n .json(ErrorResponse::bad_request_with_data(&error_message, batches_info)));\n\n }\n\n\n\n if batches_info.iter().any(|batch| batch.status == BatchStatus::Pending) {\n\n return Ok(HttpResponse::Accepted()\n\n .json(SuccessResponse::new(batches_info)));\n", "file_path": "src/rest_api/routes/submit.rs", "rank": 85, "score": 13.332598232337457 }, { "content": " _ => {\n\n let body_value: serde_json::Value = match serde_json::from_slice(&body) {\n\n Ok(b) => b,\n\n Err(err) => {\n\n return Either::B(future::err(RestApiResponseError::InternalError(\n\n format!(\"Failed to parse response body {}\", err),\n\n )))\n\n }\n\n };\n\n\n\n let message = match body_value.get(\"message\") {\n\n Some(value) => value.as_str().unwrap_or(\"Unknown cause\"),\n\n None => \"Unknown cause\",\n\n };\n\n\n\n Either::B(future::err(RestApiResponseError::InternalError(\n\n message.to_string(),\n\n )))\n\n }\n\n }\n\n }),\n\n )\n\n}\n", "file_path": "src/rest_api/routes/submit.rs", "rank": 86, "score": 13.227615595731617 }, { "content": "#[macro_use]\n\nextern crate serde_json;\n\nextern crate db_models;\n\n\n\nmod application_metadata;\n\nmod config;\n\nmod error;\n\nmod rest_api;\n\n\n\nuse std::thread;\n\n\n\nuse flexi_logger::{style, DeferredNow, LogSpecBuilder, Logger};\n\nuse log::Record;\n\nuse sawtooth_sdk::signing::create_context;\n\n\n\nuse crate::config::{get_node, ConsortiumConfigBuilder};\n\nuse crate::error::AdminOpDaemonError;\n\n\n\nconst APP_NAME: &str = env!(\"CARGO_PKG_NAME\");\n\nconst VERSION: &str = env!(\"CARGO_PKG_VERSION\");\n\n\n\n// format for logs\n", "file_path": "src/main.rs", "rank": 87, "score": 13.112139333124478 }, { "content": " }\n\n StatusCode::BAD_REQUEST => {\n\n let body_value: serde_json::Value = match serde_json::from_slice(&body) {\n\n Ok(b) => b,\n\n Err(err) => {\n\n return Either::B(future::err(RestApiResponseError::InternalError(\n\n format!(\"Failed to parse response body {}\", err),\n\n )))\n\n }\n\n };\n\n\n\n let message = match body_value.get(\"message\") {\n\n Some(value) => value.as_str().unwrap_or(\"Request malformed.\"),\n\n None => \"Request malformed.\",\n\n };\n\n\n\n Either::B(future::err(RestApiResponseError::BadRequest(\n\n message.to_string(),\n\n )))\n\n }\n", "file_path": "src/rest_api/routes/submit.rs", "rank": 88, "score": 12.687977545706618 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\npub struct SuccessResponse<T: Serialize> {\n\n data: T,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n paging: Option<Paging>,\n\n}\n\n\n\nimpl<T: Serialize> SuccessResponse<T> {\n\n pub fn new(data: T) -> SuccessResponse<T> {\n\n SuccessResponse { data, paging: None }\n\n }\n\n\n\n pub fn list(data: T, paging: Paging) -> SuccessResponse<T> {\n\n SuccessResponse {\n\n data,\n\n paging: Some(paging),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/rest_api/routes/mod.rs", "rank": 89, "score": 12.472135786699127 }, { "content": " message: \"The server encountered an error\".to_string(),\n\n data: None,\n\n }\n\n }\n\n\n\n pub fn bad_request(message: &str) -> ErrorResponse<String> {\n\n ErrorResponse {\n\n code: \"400\".to_string(),\n\n message: message.to_string(),\n\n data: None,\n\n }\n\n }\n\n\n\n pub fn not_found(message: &str) -> ErrorResponse<String> {\n\n ErrorResponse {\n\n code: \"404\".to_string(),\n\n message: message.to_string(),\n\n data: None,\n\n }\n\n }\n", "file_path": "src/rest_api/routes/mod.rs", "rank": 90, "score": 12.318789869878215 }, { "content": " Err(err) => {\n\n debug!(\"Internal Server Error. Error parsing splinter daemon response {}\", err);\n\n return Err(RestApiResponseError::InternalError(format!(\"{}\", err)))\n\n }\n\n };\n\n Ok(link)\n\n }\n\n StatusCode::BAD_REQUEST => {\n\n let body_value: serde_json::Value = serde_json::from_slice(&body).map_err(|err| {\n\n RestApiResponseError::InternalError(format!(\n\n \"Failed to parse response body {}\",\n\n err\n\n ))\n\n })?;\n\n let message = match body_value.get(\"message\") {\n\n Some(value) => value.as_str().unwrap_or(\"Request malformed.\"),\n\n None => \"Request malformed.\",\n\n };\n\n Err(RestApiResponseError::BadRequest(message.to_string()))\n\n }\n", "file_path": "src/rest_api/routes/submit.rs", "rank": 91, "score": 11.992190985807857 }, { "content": "\n\n let (rest_api_shutdown_handle, rest_api_join_handle) = rest_api::run(\n\n config.rest_api_endpoint(),\n\n config.splinterd_url(),\n\n node,\n\n public_key.as_hex(),\n\n )?;\n\n\n\n ctrlc::set_handler(move || {\n\n info!(\"Received Shutdown\");\n\n\n\n if let Err(err) = rest_api_shutdown_handle.shutdown() {\n\n error!(\"Unable to cleanly shutdown REST API server: {}\", err);\n\n }\n\n })\n\n .expect(\"Error setting Ctrl-C handler\");\n\n\n\n let _ = rest_api_join_handle.join();\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 92, "score": 11.711636366291993 }, { "content": " log_spec_builder.default(log_level);\n\n log_spec_builder.module(\"hyper\", log::LevelFilter::Warn);\n\n log_spec_builder.module(\"tokio\", log::LevelFilter::Warn);\n\n log_spec_builder.module(\"trust_dns\", log::LevelFilter::Warn);\n\n\n\n Logger::with(log_spec_builder.build())\n\n .format(log_format)\n\n .start()?;\n\n\n\n let config = ConsortiumConfigBuilder::default()\n\n .with_cli_args(&matches)\n\n .build()?;\n\n\n\n // Generate a public/private key pair\n\n let context = create_context(\"secp256k1\")?;\n\n let private_key = context.new_random_private_key()?;\n\n let public_key = context.get_public_key(&*private_key)?;\n\n\n\n // Get splinterd node information\n\n let node = get_node(config.splinterd_url())?;\n", "file_path": "src/main.rs", "rank": 93, "score": 11.65979009436628 }, { "content": " .json(ErrorResponse::not_found(&err.to_string()))),\n\n RestApiResponseError::BadRequest(err) => Ok(HttpResponse::BadRequest()\n\n .json(ErrorResponse::bad_request(&err.to_string()))),\n\n _ => Ok(HttpResponse::InternalServerError()\n\n .json(ErrorResponse::internal_error())),\n\n }\n\n }\n\n error::BlockingError::Canceled => {\n\n debug!(\"Internal Server Error: {}\", err);\n\n Ok(HttpResponse::InternalServerError().json(ErrorResponse::internal_error()))\n\n }\n\n },\n\n }),\n\n )\n\n}\n\n\n", "file_path": "src/rest_api/routes/proposal.rs", "rank": 94, "score": 11.321731031252789 }, { "content": " }\n\n\n\n Ok(HttpResponse::Ok()\n\n .json(SuccessResponse::new(batches_info)))\n\n\n\n }\n\n Err(err) => match err {\n\n RestApiResponseError::BadRequest(message) => {\n\n Ok(HttpResponse::BadRequest().json(ErrorResponse::bad_request(&message)))\n\n }\n\n _ => {\n\n Ok(HttpResponse::InternalServerError().json(ErrorResponse::internal_error()))\n\n }\n\n }\n\n }\n\n }))\n\n }\n\n Err(err) => match err {\n\n RestApiResponseError::BadRequest(message) => {\n\n Either::B(HttpResponse::BadRequest().json(ErrorResponse::bad_request(&message)).into_future())\n", "file_path": "src/rest_api/routes/submit.rs", "rank": 95, "score": 11.297685054008424 }, { "content": " let message = match body_value.get(\"message\") {\n\n Some(value) => value.as_str().unwrap_or(\"Request malformed.\"),\n\n None => \"Request malformed.\",\n\n };\n\n Ok(HttpResponse::BadRequest().json(ErrorResponse::bad_request(&message)))\n\n }\n\n _ => {\n\n debug!(\n\n \"Internal Server Error. Splinterd responded with error {}\",\n\n resp.status(),\n\n );\n\n\n\n Ok(HttpResponse::InternalServerError()\n\n .json(ErrorResponse::internal_error()))\n\n }\n\n }\n\n }),\n\n )\n\n}\n\n\n", "file_path": "src/rest_api/routes/submit.rs", "rank": 96, "score": 11.1457512539399 }, { "content": "#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]\n\npub struct Paging {\n\n current: String,\n\n offset: usize,\n\n limit: usize,\n\n total: usize,\n\n first: String,\n\n prev: String,\n\n next: String,\n\n last: String,\n\n}\n\n\n", "file_path": "src/rest_api/routes/mod.rs", "rank": 97, "score": 10.664721222057082 }, { "content": "// Copyright 2019 Cargill Incorporated\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse std::collections::HashMap;\n\nuse std::thread::sleep;\n\nuse std::time::{Duration, Instant};\n\n\n\nuse actix_web::{client::Client, dev::Body, http::StatusCode, web, Error, HttpResponse};\n\nuse futures::{\n", "file_path": "src/rest_api/routes/submit.rs", "rank": 98, "score": 10.125369226771568 }, { "content": "// Copyright 2019 Cargill Incorporated\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse std::collections::HashMap;\n\nuse std::time::{Duration, SystemTime};\n\n\n\nuse actix_web::{error, web, Error, HttpResponse};\n\nuse futures::Future;\n\nuse openssl::hash::{hash, MessageDigest};\n", "file_path": "src/rest_api/routes/proposal.rs", "rank": 99, "score": 8.637618842854534 } ]
Rust
src/lib.rs
pudnax/chonker
dff08090f23322631ea60fe13d5dd3789f926577
#![feature(get_mut_unchecked)] use std::{ path::{Path, PathBuf}, time::Instant, }; pub mod camera; pub mod context; mod utils; mod watcher; pub use camera::{Camera, CameraBinding}; pub use context::{ Context, GlobalUniformBinding, HdrBackBuffer, PipelineHandle, Uniform, VolumeTexture, }; pub use utils::{dispatch_optimal, shader_compiler, NonZeroSized}; pub use watcher::{ReloadablePipeline, Watcher}; use color_eyre::eyre::Result; use pollster::FutureExt; use utils::{frame_counter::FrameCounter, input::Input, recorder::RecordEvent}; use winit::{ dpi::{PhysicalPosition, PhysicalSize}, event::{ DeviceEvent, ElementState, Event, KeyboardInput, MouseScrollDelta, VirtualKeyCode, WindowEvent, }, event_loop::{ControlFlow, EventLoop}, window::Window, }; const SHADER_FOLDER: &str = "shaders"; const SCREENSHOTS_FOLDER: &str = "screenshots"; const VIDEO_FOLDER: &str = "recordings"; pub trait Demo: 'static + Sized { fn init(ctx: &mut Context) -> Self; fn resize(&mut self, _: &wgpu::Device, _: &wgpu::Queue, _: &wgpu::SurfaceConfiguration) {} fn update(&mut self, _: &mut Context) {} fn update_input(&mut self, _: WindowEvent) {} fn render(&mut self, _: &Context) {} } pub fn run<D: Demo>( event_loop: EventLoop<(PathBuf, wgpu::ShaderModule)>, window: Window, camera: Option<Camera>, ) -> Result<()> { color_eyre::install()?; env_logger::init(); let mut context = Context::new(&window, &event_loop, camera).block_on()?; let mut recording_status = false; let recorder = utils::recorder::Recorder::new(); print_help(context.get_info(), &recorder.ffmpeg_version); let mut frame_counter = FrameCounter::new(); let mut input = Input::new(); let mut mouse_dragged = false; let rotate_speed = 0.0025; let zoom_speed = 0.002; let mut demo = D::init(&mut context); let mut main_window_focused = false; event_loop.run(move |event, _, control_flow| { *control_flow = ControlFlow::Wait; match event { Event::MainEventsCleared => { context.update(&frame_counter, &input); demo.update(&mut context); window.request_redraw(); } Event::WindowEvent { event, window_id, .. } if window.id() == window_id => { input.update(&event, &window); match event { WindowEvent::Focused(focused) => main_window_focused = focused, WindowEvent::CloseRequested | WindowEvent::KeyboardInput { input: KeyboardInput { virtual_keycode: Some(VirtualKeyCode::Escape), state: ElementState::Pressed, .. }, .. } => *control_flow = ControlFlow::Exit, WindowEvent::Resized(PhysicalSize { width, height }) | WindowEvent::ScaleFactorChanged { new_inner_size: &mut PhysicalSize { width, height }, .. } => { if width != 0 && height != 0 { context.resize(width, height); demo.resize(&context.device, &context.queue, &context.surface_config); } if recording_status { println!("Stop recording. Resolution has been changed.",); recording_status = false; recorder.send(RecordEvent::Finish); } } WindowEvent::KeyboardInput { input: KeyboardInput { state: ElementState::Pressed, virtual_keycode: Some(keycode), .. }, .. } => { if VirtualKeyCode::F11 == keycode { let now = Instant::now(); let frame = context.capture_frame(); eprintln!("Capture image: {:#.2?}", now.elapsed()); recorder.send(RecordEvent::Screenshot(frame)); } if recorder.ffmpeg_installed() && VirtualKeyCode::F12 == keycode { if !recording_status { recorder .send(RecordEvent::Start(context.capture_image_dimentions())); } else { recorder.send(RecordEvent::Finish); } recording_status = !recording_status; } } _ => {} } demo.update_input(event); } Event::DeviceEvent { ref event, .. } if main_window_focused => match event { DeviceEvent::Button { #[cfg(target_os = "macos")] button: 0, #[cfg(not(target_os = "macos"))] button: 1, state: statee, } => { let is_pressed = *statee == ElementState::Pressed; mouse_dragged = is_pressed; } DeviceEvent::MouseWheel { delta, .. } => { let scroll_amount = -match delta { MouseScrollDelta::LineDelta(_, scroll) => scroll * 1.0, MouseScrollDelta::PixelDelta(PhysicalPosition { y: scroll, .. }) => { *scroll as f32 } }; context.camera.add_zoom(scroll_amount * zoom_speed); } DeviceEvent::MouseMotion { delta } => { if mouse_dragged { context.camera.add_yaw(-delta.0 as f32 * rotate_speed); context.camera.add_pitch(delta.1 as f32 * rotate_speed); } } _ => (), }, Event::RedrawRequested(_) => { frame_counter.record(); demo.render(&context); match context.render() { Ok(_) => {} Err(wgpu::SurfaceError::Lost) => { context.resize(context.width, context.height); window.request_redraw(); } Err(wgpu::SurfaceError::OutOfMemory) => *control_flow = ControlFlow::Exit, Err(e) => { eprintln!("{:?}", e); window.request_redraw(); } } if recording_status { let (frame, _) = context.capture_frame(); recorder.send(RecordEvent::Record(frame)); } } Event::UserEvent((path, shader)) => context.register_shader_change(path, shader), Event::LoopDestroyed => { println!("\n// End from the loop. Bye bye~⏎ "); } _ => {} } }) } pub fn print_help(info: impl std::fmt::Display, ffmpeg_version: &str) { println!("{}", info); println!("{}", ffmpeg_version); println!( "Default shader path:\n\t{}\n", Path::new(SHADER_FOLDER).canonicalize().unwrap().display() ); println!("- `F11`: Take Screenshot"); println!("- `F12`: Start/Stop record video"); println!("- `ESC`: Exit the application"); println!(); println!("// Set up our new world⏎ "); println!("// And let's begin the⏎ "); println!("\tSIMULATION⏎ \n"); }
#![feature(get_mut_unchecked)] use std::{ path::{Path, PathBuf}, time::Instant, }; pub mod camera; pub mod context; mod utils; mod watcher; pub use camera::{Camera, CameraBinding}; pub use context::{ Context, GlobalUniformBinding, HdrBackBuffer, PipelineHandle, Uniform, VolumeTexture, }; pub use utils::{dispatch_optimal, shader_compiler, NonZeroSized}; pub use watcher::{ReloadablePipeline, Watcher}; use color_eyre::eyre::Result; use pollster::FutureExt; use utils::{frame_counter::FrameCounter, input::Input, recorder::RecordEvent}; use winit::{ dpi::{PhysicalPosition, PhysicalSize}, event::{ DeviceEvent, ElementState, Event, KeyboardInput, MouseScrollDelta, VirtualKeyCode, WindowEvent, }, event_loop::{ControlFlow, EventLoop}, window::Window, }; const SHADER_FOLDER: &str = "shaders"; const SCREENSHOTS_FOLDER: &str = "screenshots"; const VIDEO_FOLDER: &str = "recordings"; pub trait Demo: 'static + Sized { fn init(ctx: &mut Context) -> Self; fn resize(&mut self, _: &wgpu::Device, _: &wgpu::Queue, _: &wgpu::SurfaceConfiguration) {} fn update(&mut self, _: &mut Context) {} fn update_input(&mut self, _: WindowEvent) {} fn render(&mut self, _: &Context) {} } pub fn run<D: Demo>( event_loop: EventLoop<(PathBuf, wgpu::ShaderModule)>, window: Window, camera: Option<Camera>, ) -> Result<()> { color_eyre::install()?; env_logger::init(); let mut context = Context::new(&window, &event_loop, camera).block_on()?; let mut recording_status = false; let recorder = utils::recorder::Recorder::new(); print_help(context.get_info(), &recorder.ffmpeg_version); let mut frame_counter = FrameCounter::new(); let mut input = Input::new(); let mut mouse_dragged = false; let rotate_speed = 0.0025; let zoom_speed = 0.002; let mut demo = D::init(&mut context); let mut main_window_focused = false; event_loop.run(move |event, _, control_flow| { *control_flow = ControlFlow::Wait; match event { Event::MainEventsCleared => { context.update(&frame_counter, &input); demo.update(&mut context); window.request_redraw(); } Event::WindowEvent { event, window_id, .. } if window.id() == window_id => { input.update(&event, &window); match event { WindowEvent::Focused(focused) => main_window_focused = focused, WindowEvent::CloseRequested | WindowEvent::KeyboardInput { input: KeyboardInput { virtual_keycode: Some(VirtualKeyCode::Escape), state: ElementState::Pressed, .. }, .. } => *control_flow = ControlFlow::Exit, WindowEvent::Resized(PhysicalSize { width, height }) | WindowEvent::ScaleFactorChanged { new_inner_size: &mut PhysicalSize { width, height }, .. } => { if width != 0 && height != 0 { context.resize(width, height); demo.resize(&context.device, &context.queue, &context.surface_config); } if recording_status { println!("Stop recording. Resolution has been changed.",); recording_status = false; recorder.send(RecordEvent::Finish); } } WindowEvent::KeyboardInput { input: KeyboardInput { state: ElementState::Pressed, virtual_keycode: Some(keycode), .. }, .. } => { if VirtualKeyCode::F11 == keycode { let now = Instant::now(); let frame = context.capture_frame(); eprintln!("Capture image: {:#.2?}", now.elapsed()); recorder.send(RecordEvent::Screenshot(frame)); } if recorder.ffmpeg_installed() && VirtualKeyCode::F12 == keycode { if !recording_status { recorder .send(RecordEvent::Start(context.capture_image_dimentions())); } else { recorder.send(RecordEvent::Finish); } recording_status = !recording_status; } } _ => {} } demo.update_input(event); } Event::DeviceEvent { ref event, .. } if main_window_focused => match event { DeviceEvent::Button { #[cfg(target_os = "macos")] button: 0, #[cfg(not(target_os = "macos"))] button: 1, state: statee, } => { let is_pressed = *statee == ElementState::Pressed; mouse_dragged = is_pressed; } DeviceEvent::MouseWheel { delta, .. } => { let scroll_amount = -match delta { MouseScrollDelta::LineDelta(_, scroll) => scroll * 1.0, MouseScrollDelta::PixelDelta(PhysicalPosition { y: scroll, .. }) => { *scroll as f32 } }; context.camera.add_zoom(scroll_amount * zoom_speed); } DeviceEvent::MouseMotion { delta } => { if mouse_dragged { context.camera.add_yaw(-delta.0 as f32 * rotate_speed); context.camera.add_pitch(delta.1 as f32 * rotate_speed); } } _ => (), }, Event::RedrawRequested(_) => { frame_counter.record(); demo.render(&context); match context.render() { Ok(_) => {} Err(wgpu::SurfaceError::Lost) => { context.resize(context.width, context.height); window.request_redraw(); } Err(wgpu::SurfaceError::OutOfMemory) => *control_flow = ControlFlow::Exit, Err(e) => { eprintln!("{:?}", e); window.request_redraw(); } } if recording_status { let (frame, _) = context.capture_frame(); recorder.send(RecordEvent::Record(frame)); } } Event::UserEvent((path, shader)) => context.register_shader_change(path, shader), Event::LoopDestroyed => { println!("\n// End from the loop. Bye bye~⏎ "); } _ => {} } }) }
pub fn print_help(info: impl std::fmt::Display, ffmpeg_version: &str) { println!("{}", info); println!("{}", ffmpeg_version); println!( "Default shader path:\n\t{}\n", Path::new(SHADER_FOLDER).canonicalize().unwrap().display() ); println!("- `F11`: Take Screenshot"); println!("- `F12`: Start/Stop record video"); println!("- `ESC`: Exit the application"); println!(); println!("// Set up our new world⏎ "); println!("// And let's begin the⏎ "); println!("\tSIMULATION⏎ \n"); }
function_block-full_function
[ { "content": "pub fn save_screenshot(frame: Vec<u8>, image_dimentions: ImageDimentions) -> Result<()> {\n\n let now = Instant::now();\n\n let screenshots_folder = Path::new(SCREENSHOTS_FOLDER);\n\n create_folder(screenshots_folder)?;\n\n let path = screenshots_folder.join(format!(\n\n \"screenshot-{}.png\",\n\n chrono::Local::now().format(\"%d-%m-%Y-%H-%M-%S\")\n\n ));\n\n let file = File::create(path)?;\n\n let w = BufWriter::new(file);\n\n let mut encoder =\n\n png::Encoder::new(w, image_dimentions.width as _, image_dimentions.height as _);\n\n encoder.set_color(png::ColorType::Rgba);\n\n encoder.set_depth(png::BitDepth::Eight);\n\n let padded_bytes = image_dimentions.padded_bytes_per_row as _;\n\n let unpadded_bytes = image_dimentions.unpadded_bytes_per_row as _;\n\n let mut writer = encoder\n\n .write_header()?\n\n .into_stream_writer_with_size(unpadded_bytes)?;\n\n for chunk in frame\n\n .chunks(padded_bytes)\n\n .map(|chunk| &chunk[..unpadded_bytes])\n\n {\n\n writer.write_all(chunk)?;\n\n }\n\n writer.finish()?;\n\n eprintln!(\"Encode image: {:#.2?}\", now.elapsed());\n\n Ok(())\n\n}\n", "file_path": "src/utils/recorder.rs", "rank": 0, "score": 181524.54273197945 }, { "content": "fn new_ffmpeg_command(image_dimentions: ImageDimentions, filename: &str) -> Result<RecorderThread> {\n\n #[rustfmt::skip]\n\n let args = [\n\n \"-framerate\", \"60\",\n\n \"-pix_fmt\", \"rgba\",\n\n \"-f\", \"rawvideo\",\n\n \"-i\", \"pipe:\",\n\n \"-c:v\", \"libx264\",\n\n \"-crf\", \"15\",\n\n \"-preset\", \"ultrafast\",\n\n \"-tune\", \"animation\",\n\n \"-color_primaries\", \"bt709\",\n\n \"-color_trc\", \"bt709\",\n\n \"-colorspace\", \"bt709\",\n\n \"-color_range\", \"tv\",\n\n \"-chroma_sample_location\", \"center\",\n\n \"-pix_fmt\", \"yuv420p\",\n\n \"-movflags\", \"+faststart\",\n\n \"-y\",\n\n ];\n", "file_path": "src/utils/recorder.rs", "rank": 2, "score": 144782.22975451074 }, { "content": "pub trait NonZeroSized: Sized {\n\n const SIZE: NonZeroU64 = unsafe { NonZeroU64::new_unchecked(std::mem::size_of::<Self>() as _) };\n\n}\n\n/// Holds invariants? Nah!\n\nimpl<T> NonZeroSized for T where T: Sized {}\n\n\n\n/// A hash map with a [HashSet](std::collections::HashSet) to hold unique values\n\n#[derive(Debug)]\n\npub struct ContiniousHashMap<K, V>(HashMap<K, Vec<V>>);\n\n\n\nimpl<K, V> Deref for ContiniousHashMap<K, V> {\n\n type Target = HashMap<K, Vec<V>>;\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl<K, V> DerefMut for ContiniousHashMap<K, V> {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.0\n", "file_path": "src/utils/mod.rs", "rank": 3, "score": 144400.8238130676 }, { "content": "pub fn create_folder<P: AsRef<Path>>(name: P) -> io::Result<()> {\n\n match std::fs::create_dir(name) {\n\n Ok(_) => {}\n\n Err(e) if e.kind() == io::ErrorKind::AlreadyExists => {}\n\n Err(e) => return Err(e),\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 4, "score": 130359.25300958013 }, { "content": "pub fn green_blink() {\n\n const ESC: &str = \"\\x1B[\";\n\n const RESET: &str = \"\\x1B[0m\";\n\n eprint!(\"\\r{}42m{}K{}\\r\", ESC, ESC, RESET);\n\n std::io::stdout().flush().unwrap();\n\n std::thread::spawn(|| {\n\n std::thread::sleep(std::time::Duration::from_millis(50));\n\n eprint!(\"\\r{}40m{}K{}\\r\", ESC, ESC, RESET);\n\n std::io::stdout().flush().unwrap();\n\n });\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 5, "score": 121146.83277263571 }, { "content": "pub fn dispatch_optimal(len: u32, subgroup_size: u32) -> u32 {\n\n let padded_size = (subgroup_size - len % subgroup_size) % subgroup_size;\n\n (len + padded_size) / subgroup_size\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 7, "score": 108579.9922558181 }, { "content": "pub trait ReloadablePipeline {\n\n fn reload(&mut self, device: &wgpu::Device, module: &wgpu::ShaderModule);\n\n}\n\n\n\npub struct Watcher {\n\n _watcher: notify::RecommendedWatcher,\n\n pub hash_dump: ContiniousHashMap<PathBuf, Rc<dyn ReloadablePipeline>>,\n\n}\n\n\n\nimpl Watcher {\n\n pub fn new(\n\n device: Arc<wgpu::Device>,\n\n event_loop: &EventLoop<(PathBuf, wgpu::ShaderModule)>,\n\n ) -> Result<Self> {\n\n let mut watcher = notify::recommended_watcher(watch_callback(device, event_loop))?;\n\n watcher.configure(Config::PreciseEvents(true))?;\n\n watcher.watch(Path::new(SHADER_FOLDER), notify::RecursiveMode::Recursive)?;\n\n\n\n Ok(Self {\n\n _watcher: watcher,\n", "file_path": "src/watcher.rs", "rank": 9, "score": 106903.79374061697 }, { "content": "fn record_thread(rx: Receiver<RecordEvent>) {\n\n // puffin::profile_function!();\n\n\n\n let mut recorder = None;\n\n\n\n while let Ok(event) = rx.recv() {\n\n match event {\n\n RecordEvent::Start(image_dimentions) => {\n\n // puffin::profile_scope!(\"Start Recording\");\n\n\n\n create_folder(VIDEO_FOLDER).unwrap();\n\n let dir_path = Path::new(VIDEO_FOLDER);\n\n let filename = dir_path.join(format!(\n\n \"record-{}.mp4\",\n\n chrono::Local::now().format(\"%d-%m-%Y-%H-%M-%S\")\n\n ));\n\n recorder =\n\n Some(new_ffmpeg_command(image_dimentions, filename.to_str().unwrap()).unwrap());\n\n }\n\n RecordEvent::Record(frame) => {\n", "file_path": "src/utils/recorder.rs", "rank": 10, "score": 104929.66319539327 }, { "content": "fn main() -> Result<()> {\n\n let event_loop = EventLoop::with_user_event();\n\n let window = WindowBuilder::new()\n\n .with_title(\"Vokselis\")\n\n .with_inner_size(LogicalSize::new(1280, 720))\n\n .build(&event_loop)?;\n\n\n\n run::<BasicTrig>(event_loop, window, None)\n\n}\n", "file_path": "examples/trig.rs", "rank": 11, "score": 81523.20061693942 }, { "content": "fn create_host_buffer(device: &wgpu::Device, image_dimentions: &ImageDimentions) -> wgpu::Buffer {\n\n device.create_buffer(&wgpu::BufferDescriptor {\n\n label: Some(\"Screenshot Buffer\"),\n\n size: image_dimentions.linear_size(),\n\n usage: wgpu::BufferUsages::COPY_DST | wgpu::BufferUsages::MAP_READ,\n\n mapped_at_creation: false,\n\n })\n\n}\n", "file_path": "src/context/screenshot.rs", "rank": 12, "score": 80146.06000468729 }, { "content": "fn main() -> Result<()> {\n\n let event_loop = EventLoop::with_user_event();\n\n let window = WindowBuilder::new()\n\n .with_title(\"Vokselis\")\n\n .with_inner_size(LogicalSize::new(1280, 720))\n\n .build(&event_loop)?;\n\n let window_size = window.inner_size();\n\n\n\n let camera = Camera::new(\n\n 1.,\n\n 0.5,\n\n 1.,\n\n (0.5, 0.5, 0.5).into(),\n\n window_size.width as f32 / window_size.height as f32,\n\n );\n\n run::<Bonsai>(event_loop, window, Some(camera))\n\n}\n", "file_path": "examples/bonsai/main.rs", "rank": 13, "score": 78786.59679366497 }, { "content": "fn main() -> Result<()> {\n\n let event_loop = EventLoop::with_user_event();\n\n let window = WindowBuilder::new()\n\n .with_title(\"Vokselis\")\n\n .with_inner_size(LogicalSize::new(1280, 720))\n\n .build(&event_loop)?;\n\n let window_size = window.inner_size();\n\n\n\n let camera = Camera::new(\n\n 3.,\n\n -0.5,\n\n 1.,\n\n (0., 0., 0.).into(),\n\n window_size.width as f32 / window_size.height as f32,\n\n );\n\n run::<Xor>(event_loop, window, Some(camera))\n\n}\n", "file_path": "examples/xor/main.rs", "rank": 14, "score": 78786.59679366497 }, { "content": "// https://github.com/gfx-rs/wgpu/blob/master/wgpu-hal/src/vulkan/adapter.rs#L1166\n\nfn get_options() -> spv::Options {\n\n let capabilities = vec![\n\n spv::Capability::Shader,\n\n spv::Capability::Matrix,\n\n spv::Capability::Sampled1D,\n\n spv::Capability::Image1D,\n\n spv::Capability::ImageQuery,\n\n spv::Capability::DerivativeControl,\n\n spv::Capability::SampledCubeArray,\n\n spv::Capability::SampleRateShading,\n\n //Note: this is requested always, no matter what the actual\n\n // adapter supports. It's not the responsibility of SPV-out\n\n // translation to handle the storage support for formats.\n\n spv::Capability::StorageImageExtendedFormats,\n\n spv::Capability::MultiView,\n\n //TODO: fill out the rest\n\n ];\n\n\n\n let mut flags = spv::WriterFlags::empty();\n\n flags.set(\n", "file_path": "src/utils/shader_compiler.rs", "rank": 15, "score": 74779.74140505888 }, { "content": "fn watch_callback(\n\n device: Arc<wgpu::Device>,\n\n event_loop: &EventLoop<(PathBuf, wgpu::ShaderModule)>,\n\n) -> impl FnMut(notify::Result<notify::Event>) {\n\n let proxy = event_loop.create_proxy();\n\n let device = Arc::downgrade(&device);\n\n let mut shader_compiler = ShaderCompiler::new();\n\n move |event| match event {\n\n Ok(res) => {\n\n if let notify::event::Event {\n\n kind: EventKind::Access(AccessKind::Close(AccessMode::Write)),\n\n paths,\n\n ..\n\n } = res\n\n {\n\n for path in paths\n\n .into_iter()\n\n .filter(|p| p.extension() == Some(OsStr::new(\"wgsl\")))\n\n {\n\n match shader_compiler.create_shader_module(&path) {\n", "file_path": "src/watcher.rs", "rank": 16, "score": 66381.23031100235 }, { "content": "fn create_rgb_framebuffer(\n\n device: &wgpu::Device,\n\n config: &wgpu::SurfaceConfiguration,\n\n) -> wgpu::Texture {\n\n let size = wgpu::Extent3d {\n\n width: config.width,\n\n height: config.height,\n\n depth_or_array_layers: 1,\n\n };\n\n let multisampled_frame_descriptor = &wgpu::TextureDescriptor {\n\n label: Some(\"RGB Texture\"),\n\n format: wgpu::TextureFormat::Rgba8Unorm,\n\n size,\n\n mip_level_count: 1,\n\n sample_count: 1,\n\n dimension: wgpu::TextureDimension::D2,\n\n usage: wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::COPY_SRC,\n\n };\n\n\n\n device.create_texture(multisampled_frame_descriptor)\n\n}\n", "file_path": "src/context.rs", "rank": 17, "score": 61982.120912525934 }, { "content": "struct RecorderThread {\n\n process: Child,\n\n image_dimentions: ImageDimentions,\n\n}\n\n\n", "file_path": "src/utils/recorder.rs", "rank": 18, "score": 56632.8298022936 }, { "content": " }\n\n VirtualKeyCode::Space => {\n\n self.space_pressed = pressed;\n\n }\n\n _ => return false,\n\n };\n\n }\n\n WindowEvent::CursorMoved {\n\n position: PhysicalPosition { x, y },\n\n ..\n\n } => {\n\n let PhysicalSize { width, height } = window.inner_size();\n\n let x = (*x as f32 / width as f32 - 0.5) * 2.;\n\n let y = -(*y as f32 / height as f32 - 0.5) * 2.;\n\n self.mouse_position = [x, y];\n\n }\n\n WindowEvent::MouseInput {\n\n button: winit::event::MouseButton::Left,\n\n state,\n\n ..\n", "file_path": "src/utils/input.rs", "rank": 19, "score": 54065.288283057394 }, { "content": "}\n\n\n\nimpl Input {\n\n pub fn new() -> Self {\n\n Default::default()\n\n }\n\n\n\n pub fn update(&mut self, event: &WindowEvent, window: &Window) -> bool {\n\n match event {\n\n WindowEvent::KeyboardInput {\n\n input:\n\n KeyboardInput {\n\n virtual_keycode: Some(keycode),\n\n state,\n\n ..\n\n },\n\n ..\n\n } => {\n\n let pressed = state == &ElementState::Pressed;\n\n match keycode {\n", "file_path": "src/utils/input.rs", "rank": 20, "score": 54061.78761676029 }, { "content": "use winit::{\n\n dpi::{PhysicalPosition, PhysicalSize},\n\n event::{ElementState, KeyboardInput, VirtualKeyCode, WindowEvent},\n\n window::Window,\n\n};\n\n\n\nuse crate::context::Uniform;\n\n\n\n#[derive(Debug, Default)]\n\npub struct Input {\n\n pub up_pressed: bool,\n\n pub down_pressed: bool,\n\n pub right_pressed: bool,\n\n pub left_pressed: bool,\n\n pub slash_pressed: bool,\n\n pub right_shift_pressed: bool,\n\n pub enter_pressed: bool,\n\n pub space_pressed: bool,\n\n pub left_mouse_pressed: bool,\n\n pub mouse_position: [f32; 2],\n", "file_path": "src/utils/input.rs", "rank": 21, "score": 54060.04716612324 }, { "content": " } => self.left_mouse_pressed = matches!(state, ElementState::Pressed),\n\n\n\n _ => {}\n\n }\n\n true\n\n }\n\n\n\n pub fn process_position(&self, uniform: &mut Uniform) {\n\n let dx = 0.01;\n\n if self.left_pressed {\n\n uniform.pos[0] -= dx;\n\n }\n\n if self.right_pressed {\n\n uniform.pos[0] += dx;\n\n }\n\n if self.down_pressed {\n\n uniform.pos[1] -= dx;\n\n }\n\n if self.up_pressed {\n\n uniform.pos[1] += dx;\n", "file_path": "src/utils/input.rs", "rank": 22, "score": 54046.606635469856 }, { "content": " }\n\n if self.slash_pressed {\n\n uniform.pos[2] -= dx;\n\n }\n\n if self.right_shift_pressed {\n\n uniform.pos[2] += dx;\n\n }\n\n uniform.mouse_pressed = self.left_mouse_pressed as _;\n\n uniform.mouse = self.mouse_position;\n\n }\n\n}\n", "file_path": "src/utils/input.rs", "rank": 23, "score": 54036.73835414328 }, { "content": " VirtualKeyCode::Up => {\n\n self.up_pressed = pressed;\n\n }\n\n VirtualKeyCode::Down => {\n\n self.down_pressed = pressed;\n\n }\n\n VirtualKeyCode::Left => {\n\n self.left_pressed = pressed;\n\n }\n\n VirtualKeyCode::Right => {\n\n self.right_pressed = pressed;\n\n }\n\n VirtualKeyCode::Slash => {\n\n self.slash_pressed = pressed;\n\n }\n\n VirtualKeyCode::RShift => {\n\n self.right_shift_pressed = pressed;\n\n }\n\n VirtualKeyCode::Return => {\n\n self.enter_pressed = pressed;\n", "file_path": "src/utils/input.rs", "rank": 24, "score": 54032.82064941902 }, { "content": "use std::{\n\n collections::HashMap,\n\n hash::Hash,\n\n io::{self, Write},\n\n num::NonZeroU64,\n\n ops::{Deref, DerefMut},\n\n path::Path,\n\n};\n\n\n\npub mod frame_counter;\n\npub mod input;\n\npub mod recorder;\n\npub mod shader_compiler;\n\n\n", "file_path": "src/utils/mod.rs", "rank": 25, "score": 53954.531032525694 }, { "content": " }\n\n}\n\n\n\nimpl<K, V> Default for ContiniousHashMap<K, V> {\n\n fn default() -> Self {\n\n Self(HashMap::new())\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct ImageDimentions {\n\n pub width: u32,\n\n pub height: u32,\n\n pub unpadded_bytes_per_row: u32,\n\n pub padded_bytes_per_row: u32,\n\n}\n\n\n\nimpl ImageDimentions {\n\n pub fn new(width: u32, height: u32, align: u32) -> Self {\n\n let height = height.saturating_sub(height % 2);\n", "file_path": "src/utils/mod.rs", "rank": 26, "score": 53946.35400121689 }, { "content": " let width = width.saturating_sub(width % 2);\n\n let bytes_per_pixel = std::mem::size_of::<[u8; 4]>() as u32;\n\n let unpadded_bytes_per_row = width * bytes_per_pixel;\n\n let row_padding = (align - unpadded_bytes_per_row % align) % align;\n\n let padded_bytes_per_row = unpadded_bytes_per_row + row_padding;\n\n Self {\n\n width,\n\n height,\n\n unpadded_bytes_per_row,\n\n padded_bytes_per_row,\n\n }\n\n }\n\n\n\n pub fn linear_size(&self) -> u64 {\n\n self.padded_bytes_per_row as u64 * self.height as u64\n\n }\n\n}\n", "file_path": "src/utils/mod.rs", "rank": 27, "score": 53944.720815302484 }, { "content": " }\n\n}\n\n\n\nimpl<K, V> ContiniousHashMap<K, V> {\n\n /// Creates an empty [ContiniousHashMap]\n\n ///\n\n /// The hash map is initially created with a capacity of 0,\n\n /// so it will not allocate until it is first inserted into.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\n\n\nimpl<K: Eq + Hash, V> ContiniousHashMap<K, V> {\n\n /// Inserts a key-value pair into the map.\n\n ///\n\n /// If the mep already contain this key this method will add\n\n /// a value instead of rewriting an old value.\n\n pub fn push_value(&mut self, key: K, value: V) {\n\n self.0.entry(key).or_insert_with(Vec::new).push(value);\n", "file_path": "src/utils/mod.rs", "rank": 28, "score": 53937.03619841919 }, { "content": "\n\n if let Some(ref mut process) = recorder {\n\n process.process.wait().unwrap();\n\n }\n\n drop(recorder);\n\n recorder = None;\n\n eprintln!(\"Recording finished\");\n\n }\n\n RecordEvent::Screenshot((frame, image_dimentions)) => {\n\n match save_screenshot(frame, image_dimentions) {\n\n Ok(_) => {}\n\n Err(err) => {\n\n eprintln!(\"{err}\")\n\n }\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/utils/recorder.rs", "rank": 29, "score": 53857.398429633155 }, { "content": "use color_eyre::eyre::Result;\n\nuse crossbeam_channel::{Receiver, Sender};\n\nuse std::{\n\n fs::File,\n\n io::{BufWriter, Write},\n\n path::Path,\n\n process::{Child, Command, Stdio},\n\n time::Instant,\n\n};\n\n\n\n#[cfg(windows)]\n\nuse std::os::windows::process::CommandExt;\n\n\n\nuse super::ImageDimentions;\n\n\n\nuse crate::{utils::create_folder, SCREENSHOTS_FOLDER, VIDEO_FOLDER};\n\n\n\npub enum RecordEvent {\n\n Start(ImageDimentions),\n\n Record(Vec<u8>),\n", "file_path": "src/utils/recorder.rs", "rank": 30, "score": 53851.12824594369 }, { "content": " Finish,\n\n Screenshot((Vec<u8>, ImageDimentions)),\n\n}\n\n\n\npub struct Recorder {\n\n sender: Sender<RecordEvent>,\n\n ffmpeg_installed: bool,\n\n pub ffmpeg_version: String,\n\n}\n\n\n\nimpl Recorder {\n\n pub fn new() -> Self {\n\n let mut command = Command::new(\"ffmpeg\");\n\n command.arg(\"-version\");\n\n let (version, installed) = match command.output() {\n\n Ok(output) => (\n\n String::from_utf8(output.stdout)\n\n .unwrap()\n\n .lines()\n\n .next()\n", "file_path": "src/utils/recorder.rs", "rank": 31, "score": 53845.85399862296 }, { "content": " // puffin::profile_scope!(\"Process Frame\");\n\n\n\n if let Some(ref mut recorder) = recorder {\n\n let writer = recorder.process.stdin.as_mut().unwrap();\n\n let mut writer = BufWriter::new(writer);\n\n\n\n let padded_bytes = recorder.image_dimentions.padded_bytes_per_row as _;\n\n let unpadded_bytes = recorder.image_dimentions.unpadded_bytes_per_row as _;\n\n for chunk in frame\n\n .chunks(padded_bytes)\n\n .map(|chunk| &chunk[..unpadded_bytes])\n\n {\n\n writer.write_all(chunk).unwrap();\n\n }\n\n // writer.write_all(&frame).unwrap();\n\n writer.flush().unwrap();\n\n }\n\n }\n\n RecordEvent::Finish => {\n\n // puffin::profile_scope!(\"Stop Recording\");\n", "file_path": "src/utils/recorder.rs", "rank": 32, "score": 53842.854280435364 }, { "content": "\n\n pub fn send(&self, event: RecordEvent) {\n\n if matches!(\n\n event,\n\n RecordEvent::Finish | RecordEvent::Start(_) | RecordEvent::Record(_)\n\n ) && !self.ffmpeg_installed\n\n {\n\n return;\n\n }\n\n self.sender.send(event).unwrap()\n\n }\n\n}\n\n\n", "file_path": "src/utils/recorder.rs", "rank": 33, "score": 53840.78524351044 }, { "content": "\n\n let mut command = Command::new(\"ffmpeg\");\n\n command\n\n .arg(\"-video_size\")\n\n .arg(format!(\n\n \"{}x{}\",\n\n image_dimentions.unpadded_bytes_per_row / 4,\n\n image_dimentions.height\n\n ))\n\n .args(&args)\n\n .arg(filename)\n\n .stdin(Stdio::piped())\n\n .stdout(Stdio::inherit())\n\n .stderr(Stdio::inherit());\n\n\n\n #[cfg(windows)]\n\n {\n\n const WINAPI_UM_WINBASE_CREATE_NO_WINDOW: u32 = 0x08000000;\n\n // Not create terminal window\n\n command.creation_flags(WINAPI_UM_WINBASE_CREATE_NO_WINDOW);\n", "file_path": "src/utils/recorder.rs", "rank": 34, "score": 53839.18792198116 }, { "content": " .unwrap()\n\n .to_string(),\n\n true,\n\n ),\n\n Err(e) => (e.to_string(), false),\n\n };\n\n\n\n let (tx, rx) = crossbeam_channel::unbounded();\n\n std::thread::spawn(move || record_thread(rx));\n\n\n\n Self {\n\n sender: tx,\n\n ffmpeg_installed: installed,\n\n ffmpeg_version: version,\n\n }\n\n }\n\n\n\n pub fn ffmpeg_installed(&self) -> bool {\n\n self.ffmpeg_installed\n\n }\n", "file_path": "src/utils/recorder.rs", "rank": 35, "score": 53838.69267178013 }, { "content": " }\n\n\n\n let child = command.spawn()?;\n\n\n\n Ok(RecorderThread {\n\n process: child,\n\n image_dimentions,\n\n })\n\n}\n\n\n", "file_path": "src/utils/recorder.rs", "rank": 36, "score": 53833.49193655815 }, { "content": "use std::num::NonZeroU32;\n\n\n\nuse crate::utils::ImageDimentions;\n\nuse wgpu::{Device, MapMode};\n\n\n\npub struct ScreenshotCtx {\n\n pub image_dimentions: ImageDimentions,\n\n data: wgpu::Buffer,\n\n}\n\n\n\nimpl ScreenshotCtx {\n\n pub fn resize(&mut self, device: &Device, width: u32, height: u32) {\n\n // puffin::profile_function!();\n\n let new_dims = ImageDimentions::new(width, height, wgpu::COPY_BYTES_PER_ROW_ALIGNMENT);\n\n if new_dims.linear_size() > self.image_dimentions.linear_size() {\n\n // puffin::profile_scope!(\"Reallocating Buffer\");\n\n let image_dimentions =\n\n ImageDimentions::new(width, height, wgpu::COPY_BYTES_PER_ROW_ALIGNMENT);\n\n\n\n self.data = create_host_buffer(device, &image_dimentions);\n", "file_path": "src/context/screenshot.rs", "rank": 37, "score": 53309.09023062609 }, { "content": " }\n\n self.image_dimentions = new_dims;\n\n }\n\n\n\n pub fn new(device: &Device, width: u32, height: u32) -> Self {\n\n let image_dimentions =\n\n ImageDimentions::new(width, height, wgpu::COPY_BYTES_PER_ROW_ALIGNMENT);\n\n\n\n let data = create_host_buffer(device, &image_dimentions);\n\n\n\n Self {\n\n image_dimentions,\n\n data,\n\n }\n\n }\n\n\n\n pub fn capture_frame(\n\n &self,\n\n device: &wgpu::Device,\n\n queue: &wgpu::Queue,\n", "file_path": "src/context/screenshot.rs", "rank": 38, "score": 53302.7376016883 }, { "content": " src_texture: &wgpu::Texture,\n\n ) -> (Vec<u8>, ImageDimentions) {\n\n // puffin::profile_function!();\n\n let mut encoder = device.create_command_encoder(&wgpu::CommandEncoderDescriptor {\n\n label: Some(\"Capture Encoder\"),\n\n });\n\n let copy_size = wgpu::Extent3d {\n\n width: self.image_dimentions.width,\n\n height: self.image_dimentions.height,\n\n depth_or_array_layers: 1,\n\n };\n\n encoder.copy_texture_to_buffer(\n\n src_texture.as_image_copy(),\n\n wgpu::ImageCopyBuffer {\n\n buffer: &self.data,\n\n layout: wgpu::ImageDataLayout {\n\n offset: 0,\n\n bytes_per_row: Some(\n\n NonZeroU32::new(self.image_dimentions.padded_bytes_per_row).unwrap(),\n\n ),\n", "file_path": "src/context/screenshot.rs", "rank": 39, "score": 53297.52785428528 }, { "content": " rows_per_image: Some(NonZeroU32::new(self.image_dimentions.height).unwrap()),\n\n },\n\n },\n\n copy_size,\n\n );\n\n\n\n queue.submit(Some(encoder.finish()));\n\n\n\n let image_slice = self.data.slice(0..self.image_dimentions.linear_size());\n\n let _ = image_slice.map_async(MapMode::Read);\n\n\n\n device.poll(wgpu::Maintain::Wait);\n\n let frame = image_slice.get_mapped_range().to_vec();\n\n self.data.unmap();\n\n\n\n (frame, self.image_dimentions)\n\n }\n\n}\n\n\n", "file_path": "src/context/screenshot.rs", "rank": 40, "score": 53297.32692028864 }, { "content": "use std::time::Instant;\n\n\n\npub struct FrameCounter {\n\n pub frame_count: u32,\n\n accum_time: f32,\n\n last_inst: Instant,\n\n}\n\n\n\nimpl FrameCounter {\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n\n\n pub fn time_delta(&self) -> f32 {\n\n self.accum_time * 1000.0 / self.frame_count as f32\n\n }\n\n\n\n pub fn record(&mut self) -> f32 /* dt */ {\n\n self.accum_time += self.last_inst.elapsed().as_secs_f32();\n\n self.last_inst = Instant::now();\n", "file_path": "src/utils/frame_counter.rs", "rank": 41, "score": 51136.65727810947 }, { "content": "\n\n self.frame_count += 1;\n\n if self.frame_count == 100 {\n\n println!(\"Avg frame time {}ms\", self.time_delta());\n\n self.accum_time = 0.0;\n\n self.frame_count = 0;\n\n }\n\n self.accum_time\n\n }\n\n}\n\n\n\nimpl Default for FrameCounter {\n\n fn default() -> Self {\n\n Self {\n\n frame_count: 0,\n\n accum_time: 0.,\n\n last_inst: Instant::now(),\n\n }\n\n }\n\n}\n", "file_path": "src/utils/frame_counter.rs", "rank": 42, "score": 51122.90537730259 }, { "content": " Self::Read(err) => write!(f, \"{}\", err),\n\n Self::WriteSpirv(err) => write!(f, \"{}\", err),\n\n Self::Validate(err) => write!(f, \"{}\", err),\n\n Self::Compile { error, source } => {\n\n error.emit_to_stderr(source);\n\n Ok(())\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for CompilerError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n Self::Read(err) => write!(f, \"{}\", err),\n\n Self::WriteSpirv(err) => write!(f, \"{}\", err),\n\n Self::Validate(err) => write!(f, \"{}\", err),\n\n Self::Compile { error, source } => write!(f, \"{}\", error.emit_to_string(source)),\n\n }\n\n }\n", "file_path": "src/utils/shader_compiler.rs", "rank": 43, "score": 50969.59653940539 }, { "content": "use std::path::Path;\n\n\n\nuse color_eyre::Result;\n\nuse naga::{\n\n back::spv::{self, BindingMap},\n\n front::wgsl,\n\n valid::{Capabilities, ValidationError, ValidationFlags, Validator},\n\n};\n\n\n\npub struct ShaderCompiler {\n\n parser: wgsl::Parser,\n\n validator: Validator,\n\n writer: spv::Writer,\n\n}\n\n\n\nimpl ShaderCompiler {\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n\n", "file_path": "src/utils/shader_compiler.rs", "rank": 44, "score": 50969.392427331164 }, { "content": "}\n\n\n\nimpl std::error::Error for CompilerError {\n\n fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {\n\n match *self {\n\n Self::Read(ref e) => Some(e),\n\n Self::Compile { error: ref e, .. } => Some(e),\n\n Self::Validate(ref e) => Some(e),\n\n Self::WriteSpirv(ref e) => Some(e),\n\n }\n\n }\n\n}\n", "file_path": "src/utils/shader_compiler.rs", "rank": 45, "score": 50969.38184153667 }, { "content": " fn from(e: std::io::Error) -> Self {\n\n Self::Read(e)\n\n }\n\n}\n\n\n\nimpl From<naga::WithSpan<ValidationError>> for CompilerError {\n\n fn from(e: naga::WithSpan<ValidationError>) -> Self {\n\n Self::Validate(e)\n\n }\n\n}\n\n\n\nimpl From<spv::Error> for CompilerError {\n\n fn from(e: spv::Error) -> Self {\n\n Self::WriteSpirv(e)\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for CompilerError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n", "file_path": "src/utils/shader_compiler.rs", "rank": 46, "score": 50966.776111181935 }, { "content": " pub fn create_shader_module(&mut self, path: &Path) -> Result<Vec<u32>, CompilerError> {\n\n let source = std::fs::read_to_string(&path)?;\n\n let module = self\n\n .parser\n\n .parse(&source)\n\n .map_err(|error| CompilerError::Compile { error, source })?;\n\n let module_info = self.validator.validate(&module)?;\n\n let mut words = vec![];\n\n self.writer.write(&module, &module_info, None, &mut words)?;\n\n Ok(words)\n\n }\n\n}\n\n\n\nimpl Default for ShaderCompiler {\n\n fn default() -> Self {\n\n let parser = wgsl::Parser::new();\n\n let validator = Validator::new(ValidationFlags::all(), Capabilities::all());\n\n let options = get_options();\n\n let writer = spv::Writer::new(&options).unwrap();\n\n Self {\n\n parser,\n\n validator,\n\n writer,\n\n }\n\n }\n\n}\n\n\n\n// https://github.com/gfx-rs/wgpu/blob/master/wgpu-hal/src/vulkan/adapter.rs#L1166\n", "file_path": "src/utils/shader_compiler.rs", "rank": 47, "score": 50966.10637888476 }, { "content": " bounds_check_policies: naga::proc::BoundsCheckPolicies {\n\n index: naga::proc::BoundsCheckPolicy::Unchecked,\n\n buffer: naga::proc::BoundsCheckPolicy::Unchecked,\n\n image: naga::proc::BoundsCheckPolicy::Unchecked,\n\n binding_array: naga::proc::BoundsCheckPolicy::Unchecked,\n\n },\n\n }\n\n}\n\n\n\npub enum CompilerError {\n\n Read(std::io::Error),\n\n Compile {\n\n error: wgsl::ParseError,\n\n source: String,\n\n },\n\n Validate(naga::WithSpan<ValidationError>),\n\n WriteSpirv(spv::Error),\n\n}\n\n\n\nimpl From<std::io::Error> for CompilerError {\n", "file_path": "src/utils/shader_compiler.rs", "rank": 48, "score": 50959.17144220232 }, { "content": " spv::WriterFlags::DEBUG,\n\n true,\n\n // self.instance.flags.contains(crate::InstanceFlags::DEBUG),\n\n );\n\n flags.set(\n\n spv::WriterFlags::LABEL_VARYINGS,\n\n true, // self.phd_capabilities.properties.vendor_id != crate::auxil::db::qualcomm::VENDOR,\n\n );\n\n flags.set(\n\n spv::WriterFlags::FORCE_POINT_SIZE,\n\n //Note: we could technically disable this when we are compiling separate entry points,\n\n // and we know exactly that the primitive topology is not `PointList`.\n\n // But this requires cloning the `spv::Options` struct, which has heap allocations.\n\n true, // could check `super::Workarounds::SEPARATE_ENTRY_POINTS`\n\n );\n\n spv::Options {\n\n binding_map: BindingMap::new(),\n\n lang_version: (1, 0),\n\n flags,\n\n capabilities: Some(capabilities.into_iter().collect()),\n", "file_path": "src/utils/shader_compiler.rs", "rank": 49, "score": 50954.861437093954 }, { "content": "pub mod basic_with_camera;\n", "file_path": "src/context/pipelines/mod.rs", "rank": 50, "score": 50225.59299680817 }, { "content": "use std::path::Path;\n\n\n\nuse crate::{\n\n camera::CameraBinding,\n\n context::{global_ubo::GlobalUniformBinding, Uniform},\n\n utils::shader_compiler::ShaderCompiler,\n\n watcher::ReloadablePipeline,\n\n};\n\n\n\npub struct BasicPipeline {\n\n pub pipeline: wgpu::RenderPipeline,\n\n surface_format: wgpu::TextureFormat,\n\n}\n\n\n\nimpl BasicPipeline {\n\n pub fn from_path(\n\n device: &wgpu::Device,\n\n format: wgpu::TextureFormat,\n\n path: &Path,\n\n compiler: &mut ShaderCompiler,\n", "file_path": "src/context/pipelines/basic_with_camera.rs", "rank": 51, "score": 47460.846028509986 }, { "content": " Self {\n\n pipeline,\n\n surface_format,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> BasicPipeline {\n\n pub fn record<'pass>(\n\n &'a self,\n\n rpass: &mut wgpu::RenderPass<'pass>,\n\n uniform_bind_group: &'a GlobalUniformBinding,\n\n camera_bind_group: &'a CameraBinding,\n\n ) where\n\n 'a: 'pass,\n\n {\n\n rpass.set_pipeline(&self.pipeline);\n\n rpass.set_bind_group(0, &uniform_bind_group.binding, &[]);\n\n rpass.set_bind_group(1, &camera_bind_group.bind_group, &[]);\n\n rpass.draw(0..3, 0..1);\n\n }\n\n}\n\n\n\nimpl ReloadablePipeline for BasicPipeline {\n\n fn reload(&mut self, device: &wgpu::Device, module: &wgpu::ShaderModule) {\n\n *self = Self::new_with_module(device, self.surface_format, module);\n\n }\n\n}\n", "file_path": "src/context/pipelines/basic_with_camera.rs", "rank": 52, "score": 47452.685293805676 }, { "content": " ) -> Self {\n\n let shader = unsafe {\n\n device.create_shader_module_spirv(&wgpu::ShaderModuleDescriptorSpirV {\n\n label: path.to_str(),\n\n source: compiler.create_shader_module(path).unwrap().into(),\n\n })\n\n };\n\n Self::new_with_module(device, format, &shader)\n\n }\n\n\n\n pub fn new_with_module(\n\n device: &wgpu::Device,\n\n surface_format: wgpu::TextureFormat,\n\n shader: &wgpu::ShaderModule,\n\n ) -> Self {\n\n let global_bind_group_layout = device.create_bind_group_layout(&Uniform::DESC);\n\n let camera_bind_group_layout = device.create_bind_group_layout(&CameraBinding::DESC);\n\n let layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {\n\n label: Some(\"Screen Pass Layout\"),\n\n bind_group_layouts: &[&global_bind_group_layout, &camera_bind_group_layout],\n", "file_path": "src/context/pipelines/basic_with_camera.rs", "rank": 53, "score": 47450.07269568609 }, { "content": " push_constant_ranges: &[],\n\n });\n\n let pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {\n\n label: Some(\"Render with Camera Pipeline\"),\n\n layout: Some(&layout),\n\n fragment: Some(wgpu::FragmentState {\n\n module: shader,\n\n entry_point: \"fs_main\",\n\n targets: &[surface_format.into()],\n\n }),\n\n vertex: wgpu::VertexState {\n\n module: shader,\n\n entry_point: \"vs_main\",\n\n buffers: &[],\n\n },\n\n primitive: wgpu::PrimitiveState::default(),\n\n depth_stencil: None,\n\n multisample: wgpu::MultisampleState::default(),\n\n multiview: None,\n\n });\n", "file_path": "src/context/pipelines/basic_with_camera.rs", "rank": 54, "score": 47445.31102443588 }, { "content": "use color_eyre::eyre::Result;\n\nuse notify::{\n\n event::{AccessKind, AccessMode},\n\n Config, EventKind, Watcher as WatcherTrait,\n\n};\n\nuse winit::event_loop::EventLoop;\n\n\n\nuse std::{\n\n ffi::OsStr,\n\n path::{Path, PathBuf},\n\n rc::Rc,\n\n sync::Arc,\n\n};\n\n\n\nuse crate::{\n\n context::PipelineHandle,\n\n utils::{shader_compiler::ShaderCompiler, ContiniousHashMap},\n\n};\n\nuse crate::{shader_compiler::CompilerError, SHADER_FOLDER};\n\n\n", "file_path": "src/watcher.rs", "rank": 55, "score": 29333.6438021848 }, { "content": " Ok(x) => {\n\n let device_ref = device.upgrade().unwrap();\n\n let module = unsafe {\n\n device_ref.create_shader_module_spirv(\n\n &wgpu::ShaderModuleDescriptorSpirV {\n\n label: path.to_str(),\n\n source: x.into(),\n\n },\n\n )\n\n };\n\n proxy\n\n .send_event((path, module))\n\n .expect(\"Event Loop have been dropped\");\n\n crate::utils::green_blink();\n\n }\n\n Err(err) => match err {\n\n CompilerError::Compile { error, source } => {\n\n let file_name = match path.file_name().and_then(|x| x.to_str()) {\n\n Some(name) => name,\n\n None => \"wgsl\",\n", "file_path": "src/watcher.rs", "rank": 56, "score": 29325.257463413735 }, { "content": " };\n\n error.emit_to_stderr_with_path(&source, file_name);\n\n }\n\n _ => eprintln!(\"{err}\"),\n\n },\n\n };\n\n }\n\n }\n\n }\n\n Err(err) => {\n\n eprintln!(\"File watcher error: {err}\");\n\n }\n\n }\n\n}\n", "file_path": "src/watcher.rs", "rank": 57, "score": 29315.325945618155 }, { "content": " hash_dump: ContiniousHashMap::new(),\n\n })\n\n }\n\n\n\n pub fn register<T: ReloadablePipeline + 'static>(\n\n &mut self,\n\n path: &impl AsRef<Path>,\n\n pipeline: T,\n\n ) -> PipelineHandle<T> {\n\n let pipeline_ref = Rc::new(pipeline);\n\n self.hash_dump\n\n .push_value(path.as_ref().canonicalize().unwrap(), pipeline_ref.clone());\n\n pipeline_ref\n\n }\n\n}\n\n\n", "file_path": "src/watcher.rs", "rank": 58, "score": 29314.70052838402 }, { "content": "use crate::utils::NonZeroSized;\n\nuse glam::{Mat4, Vec3};\n\nuse wgpu::util::DeviceExt;\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone, Debug, bytemuck::Pod, bytemuck::Zeroable)]\n\npub struct CameraUniform {\n\n pub view_position: [f32; 4],\n\n pub proj_view: [[f32; 4]; 4],\n\n pub inv_proj: [[f32; 4]; 4],\n\n}\n\n\n\nimpl Default for CameraUniform {\n\n fn default() -> Self {\n\n Self {\n\n view_position: [0.0; 4],\n\n proj_view: Mat4::IDENTITY.to_cols_array_2d(),\n\n inv_proj: Mat4::IDENTITY.to_cols_array_2d(),\n\n }\n\n }\n", "file_path": "src/camera.rs", "rank": 59, "score": 29070.069351634807 }, { "content": " self.updated = true;\n\n }\n\n\n\n pub fn add_yaw(&mut self, delta: f32) {\n\n self.set_yaw(self.yaw + delta);\n\n }\n\n\n\n fn fix_eye(&mut self) {\n\n let pitch_cos = self.pitch.cos();\n\n self.eye = self.target\n\n - self.zoom\n\n * Vec3::new(\n\n self.yaw.sin() * pitch_cos,\n\n self.pitch.sin(),\n\n self.yaw.cos() * pitch_cos,\n\n );\n\n }\n\n\n\n pub fn set_aspect(&mut self, width: u32, height: u32) {\n\n self.aspect = width as f32 / height as f32;\n", "file_path": "src/camera.rs", "rank": 60, "score": 29066.547537762355 }, { "content": " pub up: Vec3,\n\n pub aspect: f32,\n\n\n\n updated: bool,\n\n}\n\n\n\nimpl Camera {\n\n const ZFAR: f32 = 100.;\n\n const ZNEAR: f32 = 0.1;\n\n const FOVY: f32 = std::f32::consts::PI / 2.0;\n\n const UP: Vec3 = Vec3::Y;\n\n\n\n pub fn new(zoom: f32, pitch: f32, yaw: f32, target: Vec3, aspect: f32) -> Self {\n\n let mut camera = Self {\n\n zoom,\n\n pitch,\n\n yaw,\n\n eye: Vec3::ZERO,\n\n target,\n\n up: Self::UP,\n", "file_path": "src/camera.rs", "rank": 61, "score": 29065.992478404805 }, { "content": " pub fn add_zoom(&mut self, delta: f32) {\n\n self.set_zoom(self.zoom + delta);\n\n }\n\n\n\n pub fn set_pitch(&mut self, pitch: f32) {\n\n self.pitch = pitch.clamp(\n\n -std::f32::consts::PI / 2.0 + f32::EPSILON,\n\n std::f32::consts::PI / 2.0 - f32::EPSILON,\n\n );\n\n self.fix_eye();\n\n self.updated = true;\n\n }\n\n\n\n pub fn add_pitch(&mut self, delta: f32) {\n\n self.set_pitch(self.pitch + delta);\n\n }\n\n\n\n pub fn set_yaw(&mut self, yaw: f32) {\n\n self.yaw = yaw;\n\n self.fix_eye();\n", "file_path": "src/camera.rs", "rank": 62, "score": 29065.666573001956 }, { "content": "}\n\n\n\npub struct CameraBinding {\n\n pub buffer: wgpu::Buffer,\n\n pub bind_group: wgpu::BindGroup,\n\n}\n\n\n\nimpl CameraBinding {\n\n pub const DESC: wgpu::BindGroupLayoutDescriptor<'static> = wgpu::BindGroupLayoutDescriptor {\n\n label: Some(\"Camera Bind Group Layout\"),\n\n entries: &[wgpu::BindGroupLayoutEntry {\n\n binding: 0,\n\n visibility: wgpu::ShaderStages::VERTEX_FRAGMENT.union(wgpu::ShaderStages::COMPUTE),\n\n ty: wgpu::BindingType::Buffer {\n\n ty: wgpu::BufferBindingType::Uniform,\n\n has_dynamic_offset: false,\n\n min_binding_size: Some(CameraUniform::SIZE),\n\n },\n\n count: None,\n\n }],\n", "file_path": "src/camera.rs", "rank": 63, "score": 29063.51077463862 }, { "content": "\n\n pub fn update(&mut self, queue: &wgpu::Queue, camera: &mut Camera) {\n\n if camera.updated {\n\n queue.write_buffer(\n\n &self.buffer,\n\n 0,\n\n bytemuck::bytes_of(&camera.get_proj_view_matrix()),\n\n );\n\n camera.updated = false;\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct Camera {\n\n pub zoom: f32,\n\n pub target: Vec3,\n\n pub eye: Vec3,\n\n pub pitch: f32,\n\n pub yaw: f32,\n", "file_path": "src/camera.rs", "rank": 64, "score": 29062.246870341693 }, { "content": " aspect,\n\n\n\n updated: false,\n\n };\n\n camera.fix_eye();\n\n camera\n\n }\n\n\n\n pub fn build_projection_view_matrix(&self) -> Mat4 {\n\n let view = Mat4::look_at_rh(self.eye, self.target, self.up);\n\n let proj = Mat4::perspective_rh(Self::FOVY, self.aspect, Self::ZNEAR, Self::ZFAR);\n\n proj * view\n\n }\n\n\n\n pub fn set_zoom(&mut self, zoom: f32) {\n\n self.zoom = zoom.clamp(0.3, Self::ZFAR / 2.);\n\n self.fix_eye();\n\n self.updated = true;\n\n }\n\n\n", "file_path": "src/camera.rs", "rank": 65, "score": 29061.665747061434 }, { "content": " };\n\n\n\n pub fn new(device: &wgpu::Device) -> Self {\n\n let buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {\n\n label: Some(\"Camera Buffer\"),\n\n contents: bytemuck::bytes_of(&CameraUniform::default()),\n\n usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,\n\n });\n\n let layout = device.create_bind_group_layout(&Self::DESC);\n\n let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {\n\n label: Some(\"Camera Bind Group\"),\n\n layout: &layout,\n\n entries: &[wgpu::BindGroupEntry {\n\n binding: 0,\n\n resource: buffer.as_entire_binding(),\n\n }],\n\n });\n\n\n\n Self { buffer, bind_group }\n\n }\n", "file_path": "src/camera.rs", "rank": 66, "score": 29059.849341251676 }, { "content": " self.updated = true;\n\n }\n\n\n\n pub fn get_proj_view_matrix(&self) -> CameraUniform {\n\n let proj_view = self.build_projection_view_matrix();\n\n CameraUniform {\n\n view_position: [self.eye.x, self.eye.y, self.eye.z, 1.0],\n\n proj_view: proj_view.to_cols_array_2d(),\n\n inv_proj: proj_view.inverse().to_cols_array_2d(),\n\n }\n\n }\n\n}\n", "file_path": "src/camera.rs", "rank": 67, "score": 29056.82385244844 }, { "content": "use present_pipeline::PresentPipeline;\n\n\n\nuse crate::{\n\n camera::{Camera, CameraBinding},\n\n utils::{frame_counter::FrameCounter, shader_compiler::ShaderCompiler},\n\n utils::{input::Input, ImageDimentions},\n\n watcher::Watcher,\n\n};\n\n\n\npub use global_ubo::GlobalUniformBinding;\n\npub use global_ubo::Uniform;\n\npub use volume_texture::VolumeTexture;\n\n\n\nuse screenshot::ScreenshotCtx;\n\n\n\npub type PipelineHandle<T> = Rc<T>;\n\n\n\npub struct Context {\n\n pub watcher: Watcher,\n\n pub shader_compiler: ShaderCompiler,\n", "file_path": "src/context.rs", "rank": 68, "score": 27361.894718938234 }, { "content": " wgpu::DeviceType::Cpu => \"CPU\",\n\n }\n\n }\n\n\n\n pub fn update(&mut self, frame_counter: &FrameCounter, input: &Input) {\n\n self.global_uniform.time = self.timeline.elapsed().as_secs_f32();\n\n self.global_uniform.time_delta = frame_counter.time_delta();\n\n self.global_uniform.frame = frame_counter.frame_count;\n\n self.global_uniform.resolution = [self.width as _, self.height as _];\n\n input.process_position(&mut self.global_uniform);\n\n\n\n self.global_uniform_binding\n\n .update(&self.queue, &self.global_uniform);\n\n\n\n self.camera_binding.update(&self.queue, &mut self.camera);\n\n }\n\n\n\n pub fn resize(&mut self, width: u32, height: u32) {\n\n self.width = width;\n\n self.height = height;\n", "file_path": "src/context.rs", "rank": 69, "score": 27359.2624773926 }, { "content": " timeline: Instant,\n\n\n\n pub global_uniform: Uniform,\n\n pub global_uniform_binding: GlobalUniformBinding,\n\n\n\n present_pipeline: PipelineHandle<PresentPipeline>,\n\n}\n\n\n\nimpl Context {\n\n /// Create a new window with a given `window`\n\n pub async fn new(\n\n window: &Window,\n\n event_loop: &winit::event_loop::EventLoop<(PathBuf, wgpu::ShaderModule)>,\n\n camera: Option<Camera>,\n\n ) -> Result<Self> {\n\n // Create new instance using first-tier backend of WGPU\n\n // One of Vulkan + Metal + DX12 + Browser WebGPU\n\n let instance = Instance::new(wgpu::Backends::PRIMARY);\n\n\n\n // Create a `surface` represents a platform-specific window\n", "file_path": "src/context.rs", "rank": 70, "score": 27355.798070632853 }, { "content": "use std::{\n\n path::{Path, PathBuf},\n\n rc::Rc,\n\n sync::Arc,\n\n time::Instant,\n\n};\n\n\n\nuse color_eyre::eyre::{eyre, Result};\n\nuse wgpu::Instance;\n\nuse winit::{dpi::PhysicalSize, window::Window};\n\n\n\nmod global_ubo;\n\nmod hdr_backbuffer;\n\n#[allow(dead_code)]\n\nmod pipelines;\n\nmod present_pipeline;\n\nmod screenshot;\n\nmod volume_texture;\n\n\n\npub use hdr_backbuffer::HdrBackBuffer;\n", "file_path": "src/context.rs", "rank": 71, "score": 27352.467118681845 }, { "content": "\n\n let mut watcher = Watcher::new(device.clone(), event_loop)?;\n\n\n\n let camera = camera.unwrap_or_else(|| {\n\n Camera::new(\n\n 1.,\n\n 0.5,\n\n 1.,\n\n (0., 0., 0.).into(),\n\n width as f32 / height as f32,\n\n )\n\n });\n\n let render_backbuffer = HdrBackBuffer::new(&device, HdrBackBuffer::DEFAULT_RESOLUTION);\n\n let rgb_texture = create_rgb_framebuffer(&device, &surface_config);\n\n\n\n let mut shader_compiler = ShaderCompiler::new();\n\n\n\n let present_shader = Path::new(\"shaders/present.wgsl\");\n\n let present_pipeline = PresentPipeline::from_path(\n\n &device,\n", "file_path": "src/context.rs", "rank": 72, "score": 27352.464810055906 }, { "content": " .capture_frame(&self.device, &self.queue, &self.rgb_texture)\n\n }\n\n\n\n pub fn capture_image_dimentions(&self) -> ImageDimentions {\n\n self.screenshot_ctx.image_dimentions\n\n }\n\n\n\n pub fn register_shader_change(&mut self, path: PathBuf, shader: wgpu::ShaderModule) {\n\n if let Some(pipelines) = self.watcher.hash_dump.get_mut(&path) {\n\n for pipeline in pipelines.iter_mut() {\n\n // SAFETY: no safety\n\n let pipeline_ref = unsafe { Rc::get_mut_unchecked(pipeline) };\n\n pipeline_ref.reload(&self.device, &shader);\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct RendererInfo {\n", "file_path": "src/context.rs", "rank": 73, "score": 27351.335173399893 }, { "content": " surface_format,\n\n present_shader,\n\n &mut shader_compiler,\n\n );\n\n let present_pipeline = watcher.register(&present_shader, present_pipeline);\n\n\n\n Ok(Self {\n\n shader_compiler,\n\n camera,\n\n camera_binding: CameraBinding::new(&device),\n\n\n\n screenshot_ctx: ScreenshotCtx::new(\n\n &device,\n\n surface_config.width,\n\n surface_config.height,\n\n ),\n\n\n\n rgb_texture,\n\n\n\n render_backbuffer,\n", "file_path": "src/context.rs", "rank": 74, "score": 27349.038910641793 }, { "content": " self.surface_config.height = height;\n\n self.surface_config.width = width;\n\n self.surface.configure(&self.device, &self.surface_config);\n\n\n\n self.screenshot_ctx.resize(&self.device, width, height);\n\n self.rgb_texture = create_rgb_framebuffer(&self.device, &self.surface_config);\n\n\n\n self.camera.set_aspect(width, height);\n\n }\n\n\n\n pub fn render(&self) -> Result<(), wgpu::SurfaceError> {\n\n let frame = self.surface.get_current_texture()?;\n\n let frame_view = frame.texture.create_view(&Default::default());\n\n\n\n let mut encoder = self\n\n .device\n\n .create_command_encoder(&wgpu::CommandEncoderDescriptor {\n\n label: Some(\"Present Encoder\"),\n\n });\n\n\n", "file_path": "src/context.rs", "rank": 75, "score": 27347.01786346188 }, { "content": " ],\n\n depth_stencil_attachment: None,\n\n });\n\n\n\n self.present_pipeline.record(\n\n &mut rpass,\n\n &self.global_uniform_binding,\n\n &self.render_backbuffer.render_bind_group,\n\n );\n\n drop(rpass);\n\n\n\n self.queue.submit(Some(encoder.finish()));\n\n\n\n frame.present();\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn capture_frame(&self) -> (Vec<u8>, ImageDimentions) {\n\n self.screenshot_ctx\n", "file_path": "src/context.rs", "rank": 76, "score": 27345.28584424803 }, { "content": "\n\n adapter: wgpu::Adapter,\n\n pub device: Arc<wgpu::Device>,\n\n pub queue: wgpu::Queue,\n\n surface: wgpu::Surface,\n\n pub surface_config: wgpu::SurfaceConfiguration,\n\n pub limits: wgpu::Limits,\n\n\n\n screenshot_ctx: screenshot::ScreenshotCtx,\n\n\n\n pub camera: Camera,\n\n pub camera_binding: CameraBinding,\n\n\n\n pub render_backbuffer: HdrBackBuffer,\n\n\n\n rgb_texture: wgpu::Texture,\n\n\n\n pub width: u32,\n\n pub height: u32,\n\n\n", "file_path": "src/context.rs", "rank": 77, "score": 27342.883707196277 }, { "content": " .request_device(\n\n &wgpu::DeviceDescriptor {\n\n label: Some(\"Device Descriptor\"),\n\n features,\n\n limits: limits.clone(),\n\n },\n\n None,\n\n )\n\n .await?;\n\n let device = Arc::new(device);\n\n\n\n let PhysicalSize { width, height } = window.inner_size();\n\n let surface_config = wgpu::SurfaceConfiguration {\n\n usage: wgpu::TextureUsages::RENDER_ATTACHMENT,\n\n format: surface_format,\n\n width,\n\n height,\n\n present_mode: wgpu::PresentMode::Fifo,\n\n };\n\n surface.configure(&device, &surface_config);\n", "file_path": "src/context.rs", "rank": 78, "score": 27340.90679763427 }, { "content": "\n\n width,\n\n height,\n\n\n\n timeline: Instant::now(),\n\n\n\n watcher,\n\n\n\n present_pipeline,\n\n\n\n global_uniform: Uniform::default(),\n\n global_uniform_binding: GlobalUniformBinding::new(&device),\n\n\n\n device,\n\n adapter,\n\n queue,\n\n surface,\n\n surface_config,\n\n limits,\n\n })\n", "file_path": "src/context.rs", "rank": 79, "score": 27340.04708716968 }, { "content": " pub device_name: String,\n\n pub device_type: String,\n\n pub vendor_name: String,\n\n pub backend: String,\n\n pub screen_format: wgpu::TextureFormat,\n\n}\n\n\n\nimpl std::fmt::Display for RendererInfo {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n writeln!(f, \"Vendor name: {}\", self.vendor_name)?;\n\n writeln!(f, \"Device name: {}\", self.device_name)?;\n\n writeln!(f, \"Device type: {}\", self.device_type)?;\n\n writeln!(f, \"Backend: {}\", self.backend)?;\n\n write!(f, \"Screen format: {:?}\", self.screen_format)?;\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/context.rs", "rank": 80, "score": 27338.630171013017 }, { "content": " // onto which rendered images may be presented\n\n let surface = unsafe { instance.create_surface(&window) };\n\n\n\n // Get a handle to a physical device\n\n let adapter: wgpu::Adapter = instance\n\n .request_adapter(&wgpu::RequestAdapterOptions {\n\n power_preference: wgpu::PowerPreference::HighPerformance,\n\n force_fallback_adapter: false,\n\n compatible_surface: Some(&surface),\n\n })\n\n .await\n\n .ok_or(eyre!(\"Failed to create device adapter.\"))?;\n\n\n\n // Use default features and limits for your machine\n\n let features = adapter.features();\n\n let limits = adapter.limits();\n\n let surface_format = wgpu::TextureFormat::Bgra8Unorm;\n\n\n\n // Create the logical device and command queue\n\n let (device, queue) = adapter\n", "file_path": "src/context.rs", "rank": 81, "score": 27335.29954391537 }, { "content": " }\n\n\n\n pub fn get_info(&self) -> RendererInfo {\n\n let info = self.adapter.get_info();\n\n RendererInfo {\n\n device_name: info.name,\n\n device_type: self.get_device_type().to_string(),\n\n vendor_name: self.get_vendor_name().to_string(),\n\n backend: self.get_backend().to_string(),\n\n screen_format: self.surface_config.format,\n\n }\n\n }\n\n fn get_vendor_name(&self) -> &str {\n\n match self.adapter.get_info().vendor {\n\n 0x1002 => \"AMD\",\n\n 0x1010 => \"ImgTec\",\n\n 0x10DE => \"NVIDIA Corporation\",\n\n 0x13B5 => \"ARM\",\n\n 0x5143 => \"Qualcomm\",\n\n 0x8086 => \"INTEL Corporation\",\n", "file_path": "src/context.rs", "rank": 82, "score": 27333.85428709259 }, { "content": " _ => \"Unknown vendor\",\n\n }\n\n }\n\n fn get_backend(&self) -> &str {\n\n match self.adapter.get_info().backend {\n\n wgpu::Backend::Empty => \"Empty\",\n\n wgpu::Backend::Vulkan => \"Vulkan\",\n\n wgpu::Backend::Metal => \"Metal\",\n\n wgpu::Backend::Dx12 => \"Dx12\",\n\n wgpu::Backend::Dx11 => \"Dx11\",\n\n wgpu::Backend::Gl => \"GL\",\n\n wgpu::Backend::BrowserWebGpu => \"Browser WGPU\",\n\n }\n\n }\n\n fn get_device_type(&self) -> &str {\n\n match self.adapter.get_info().device_type {\n\n wgpu::DeviceType::Other => \"Other\",\n\n wgpu::DeviceType::IntegratedGpu => \"Integrated GPU\",\n\n wgpu::DeviceType::DiscreteGpu => \"Discrete GPU\",\n\n wgpu::DeviceType::VirtualGpu => \"Virtual GPU\",\n", "file_path": "src/context.rs", "rank": 83, "score": 27333.49124995721 }, { "content": " let rgb = self.rgb_texture.create_view(&Default::default());\n\n let mut rpass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {\n\n label: Some(\"Present Pass\"),\n\n color_attachments: &[\n\n wgpu::RenderPassColorAttachment {\n\n view: &frame_view,\n\n resolve_target: None,\n\n ops: wgpu::Operations {\n\n load: wgpu::LoadOp::Clear(wgpu::Color::BLACK),\n\n store: true,\n\n },\n\n },\n\n wgpu::RenderPassColorAttachment {\n\n view: &rgb,\n\n resolve_target: None,\n\n ops: wgpu::Operations {\n\n load: wgpu::LoadOp::Clear(wgpu::Color::BLACK),\n\n store: true,\n\n },\n\n },\n", "file_path": "src/context.rs", "rank": 84, "score": 27331.865366602986 }, { "content": "impl std::fmt::Display for Uniform {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let time = Duration::from_secs_f32(self.time);\n\n let time_delta = Duration::from_secs_f32(self.time_delta);\n\n write!(\n\n f,\n\n \"position:\\t{:?}\\n\\\n\n time:\\t\\t{:#.2?}\\n\\\n\n time delta:\\t{:#.3?}, fps: {:#.2?}\\n\\\n\n width, height:\\t{:?}\\nmouse:\\t\\t{:.2?}\\n\\\n\n frame:\\t\\t{}\\n\",\n\n // record_period:\\t{}\\n\",\n\n self.pos,\n\n time,\n\n time_delta,\n\n 1. / self.time_delta,\n\n self.resolution,\n\n self.mouse,\n\n self.frame,\n\n // self.record_period\n\n )\n\n }\n\n}\n", "file_path": "src/context/global_ubo.rs", "rank": 85, "score": 24306.20414221252 }, { "content": " pub time_delta: f32,\n\n _padding: f32,\n\n // pub record_period: f32,\n\n // _padding2: [f32; 3],\n\n}\n\n\n\nimpl Default for Uniform {\n\n fn default() -> Self {\n\n Self {\n\n pos: [0.; 3],\n\n time: 0.,\n\n resolution: [1920.0, 780.],\n\n mouse: [0.; 2],\n\n mouse_pressed: false as _,\n\n frame: 0,\n\n time_delta: 1. / 60.,\n\n _padding: 0.,\n\n // record_period: 10.,\n\n }\n\n }\n", "file_path": "src/context/global_ubo.rs", "rank": 86, "score": 24301.964724627833 }, { "content": "use std::path::Path;\n\n\n\nuse crate::{\n\n context::{global_ubo::GlobalUniformBinding, Uniform},\n\n utils::shader_compiler::ShaderCompiler,\n\n watcher::ReloadablePipeline,\n\n};\n\n\n\npub struct PresentPipeline {\n\n pub pipeline: wgpu::RenderPipeline,\n\n surface_format: wgpu::TextureFormat,\n\n sampler_bind_group: wgpu::BindGroup,\n\n}\n\n\n\nimpl PresentPipeline {\n\n pub fn from_path(\n\n device: &wgpu::Device,\n\n surface_format: wgpu::TextureFormat,\n\n path: &Path,\n\n compiler: &mut ShaderCompiler,\n", "file_path": "src/context/present_pipeline.rs", "rank": 87, "score": 24298.576853272476 }, { "content": " Self {\n\n binding: uniform,\n\n buffer,\n\n }\n\n }\n\n\n\n pub fn update(&mut self, queue: &wgpu::Queue, uniform: &Uniform) {\n\n queue.write_buffer(&self.buffer, 0, bytemuck::bytes_of(uniform))\n\n }\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Debug, Clone, Copy, Pod, Zeroable)]\n\npub struct Uniform {\n\n pub pos: [f32; 3],\n\n pub frame: u32,\n\n pub resolution: [f32; 2],\n\n pub mouse: [f32; 2],\n\n pub mouse_pressed: u32,\n\n pub time: f32,\n", "file_path": "src/context/global_ubo.rs", "rank": 88, "score": 24297.301337361136 }, { "content": " resolution: [f32; 2],\n\n mouse: [f32; 2],\n\n mouse_pressed: u32,\n\n time: f32,\n\n time_delta: f32,\n\n frame: u32,\n\n ) -> Self {\n\n Self {\n\n pos,\n\n resolution,\n\n mouse,\n\n mouse_pressed,\n\n time,\n\n time_delta,\n\n frame,\n\n _padding: 0.,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/context/global_ubo.rs", "rank": 89, "score": 24295.64500830843 }, { "content": "use crate::utils::NonZeroSized;\n\nuse std::time::Duration;\n\n\n\nuse bytemuck::{Pod, Zeroable};\n\nuse wgpu::util::DeviceExt;\n\n\n\npub struct GlobalUniformBinding {\n\n pub binding: wgpu::BindGroup,\n\n buffer: wgpu::Buffer,\n\n}\n\n\n\nimpl GlobalUniformBinding {\n\n pub fn new(device: &wgpu::Device) -> Self {\n\n let buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {\n\n label: Some(\"Global Uniform\"),\n\n usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,\n\n contents: bytemuck::bytes_of(&Uniform::default()),\n\n });\n\n\n\n let layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {\n", "file_path": "src/context/global_ubo.rs", "rank": 90, "score": 24294.84211251716 }, { "content": "\n\nimpl<'a> PresentPipeline {\n\n pub fn record<'pass>(\n\n &'a self,\n\n rpass: &mut wgpu::RenderPass<'pass>,\n\n uniform_bind_group: &'a GlobalUniformBinding,\n\n input_texture_binding: &'a wgpu::BindGroup,\n\n ) where\n\n 'a: 'pass,\n\n {\n\n rpass.set_pipeline(&self.pipeline);\n\n rpass.set_bind_group(0, &uniform_bind_group.binding, &[]);\n\n rpass.set_bind_group(1, input_texture_binding, &[]);\n\n rpass.set_bind_group(2, &self.sampler_bind_group, &[]);\n\n rpass.draw(0..3, 0..1);\n\n }\n\n}\n\n\n\nimpl ReloadablePipeline for PresentPipeline {\n\n fn reload(&mut self, device: &wgpu::Device, module: &wgpu::ShaderModule) {\n\n *self = Self::new_with_module(device, self.surface_format, module);\n\n }\n\n}\n", "file_path": "src/context/present_pipeline.rs", "rank": 91, "score": 24292.35060742562 }, { "content": "}\n\n\n\nimpl Uniform {\n\n pub const DESC: wgpu::BindGroupLayoutDescriptor<'static> = wgpu::BindGroupLayoutDescriptor {\n\n label: Some(\"Global Uniform Bind Group Layout\"),\n\n entries: &[wgpu::BindGroupLayoutEntry {\n\n binding: 0,\n\n visibility: wgpu::ShaderStages::VERTEX_FRAGMENT.union(wgpu::ShaderStages::COMPUTE),\n\n ty: wgpu::BindingType::Buffer {\n\n ty: wgpu::BufferBindingType::Uniform,\n\n has_dynamic_offset: false,\n\n min_binding_size: Some(Uniform::SIZE),\n\n },\n\n count: None,\n\n }],\n\n };\n\n\n\n #[allow(dead_code)]\n\n pub fn new(\n\n pos: [f32; 3],\n", "file_path": "src/context/global_ubo.rs", "rank": 92, "score": 24292.269135839426 }, { "content": " pub fn new(device: &wgpu::Device, (width, height): (u32, u32)) -> Self {\n\n let size = wgpu::Extent3d {\n\n width,\n\n height,\n\n depth_or_array_layers: 1,\n\n };\n\n\n\n let texture = device.create_texture(&wgpu::TextureDescriptor {\n\n label: Some(\"Texture: HdrBackbuffer\"),\n\n size,\n\n mip_level_count: 1,\n\n sample_count: 1,\n\n dimension: wgpu::TextureDimension::D2,\n\n format: Self::FORMAT,\n\n usage: wgpu::TextureUsages::RENDER_ATTACHMENT\n\n | wgpu::TextureUsages::STORAGE_BINDING\n\n | wgpu::TextureUsages::TEXTURE_BINDING\n\n | wgpu::TextureUsages::COPY_SRC,\n\n });\n\n let texture_view = texture.create_view(&Default::default());\n", "file_path": "src/context/hdr_backbuffer.rs", "rank": 93, "score": 24289.628755404767 }, { "content": " ) -> Self {\n\n let shader = unsafe {\n\n device.create_shader_module_spirv(&wgpu::ShaderModuleDescriptorSpirV {\n\n label: path.to_str(),\n\n source: compiler.create_shader_module(path).unwrap().into(),\n\n })\n\n };\n\n Self::new_with_module(device, surface_format, &shader)\n\n }\n\n\n\n pub fn new_with_module(\n\n device: &wgpu::Device,\n\n surface_format: wgpu::TextureFormat,\n\n shader: &wgpu::ShaderModule,\n\n ) -> Self {\n\n let global_bind_group_layout = device.create_bind_group_layout(&Uniform::DESC);\n\n let texture_bind_group_layout =\n\n device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {\n\n label: Some(\"Present Texture BGL\"),\n\n entries: &[wgpu::BindGroupLayoutEntry {\n", "file_path": "src/context/present_pipeline.rs", "rank": 94, "score": 24288.948374775213 }, { "content": " count: None,\n\n },\n\n wgpu::BindGroupLayoutEntry {\n\n binding: 1,\n\n visibility: wgpu::ShaderStages::VERTEX_FRAGMENT,\n\n ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering),\n\n count: None,\n\n },\n\n ],\n\n };\n\n\n\n pub fn new(device: &wgpu::Device, queue: &wgpu::Queue) -> Self {\n\n let data = include_bytes!(\"../../bonsai_256x256x256_uint8.raw\");\n\n let size = wgpu::Extent3d {\n\n width: 256,\n\n height: 256,\n\n depth_or_array_layers: 256,\n\n };\n\n let texture = device.create_texture(&wgpu::TextureDescriptor {\n\n label: Some(\"Foot Texture\"),\n", "file_path": "src/context/volume_texture.rs", "rank": 95, "score": 24288.920791752083 }, { "content": "use std::num::NonZeroU32;\n\n\n\npub struct VolumeTexture {\n\n pub texture: wgpu::Texture,\n\n pub bind_group: wgpu::BindGroup,\n\n pub sampler: wgpu::Sampler,\n\n}\n\n\n\nimpl VolumeTexture {\n\n pub const DESC: wgpu::BindGroupLayoutDescriptor<'static> = wgpu::BindGroupLayoutDescriptor {\n\n label: Some(\"Foot BGL\"),\n\n entries: &[\n\n wgpu::BindGroupLayoutEntry {\n\n binding: 0,\n\n visibility: wgpu::ShaderStages::VERTEX_FRAGMENT,\n\n ty: wgpu::BindingType::Texture {\n\n sample_type: wgpu::TextureSampleType::Float { filterable: true },\n\n view_dimension: wgpu::TextureViewDimension::D3,\n\n multisampled: false,\n\n },\n", "file_path": "src/context/volume_texture.rs", "rank": 96, "score": 24287.706589656595 }, { "content": "pub struct HdrBackBuffer {\n\n pub texture: wgpu::Texture,\n\n pub texture_view: wgpu::TextureView,\n\n\n\n pub render_bind_group: wgpu::BindGroup,\n\n pub storage_bind_group: wgpu::BindGroup,\n\n}\n\n\n\nimpl HdrBackBuffer {\n\n pub const FORMAT: wgpu::TextureFormat = wgpu::TextureFormat::Rgba16Float;\n\n pub const DEFAULT_RESOLUTION: (u32, u32) = (1280, 720);\n\n pub const DESC_COMPUTE: wgpu::BindGroupLayoutDescriptor<'static> =\n\n wgpu::BindGroupLayoutDescriptor {\n\n label: Some(\"Storage Texture Layour\"),\n\n entries: &[wgpu::BindGroupLayoutEntry {\n\n binding: 0,\n\n visibility: wgpu::ShaderStages::COMPUTE,\n\n ty: wgpu::BindingType::StorageTexture {\n\n access: wgpu::StorageTextureAccess::ReadWrite,\n\n format: Self::FORMAT,\n", "file_path": "src/context/hdr_backbuffer.rs", "rank": 97, "score": 24287.33064091774 }, { "content": " label: Some(\"Global Uniform Bind Group Layout\"),\n\n entries: &[wgpu::BindGroupLayoutEntry {\n\n binding: 0,\n\n visibility: wgpu::ShaderStages::VERTEX_FRAGMENT | wgpu::ShaderStages::COMPUTE,\n\n ty: wgpu::BindingType::Buffer {\n\n ty: wgpu::BufferBindingType::Uniform,\n\n has_dynamic_offset: false,\n\n min_binding_size: Some(Uniform::SIZE),\n\n },\n\n count: None,\n\n }],\n\n });\n\n let uniform = device.create_bind_group(&wgpu::BindGroupDescriptor {\n\n label: Some(\"Global Uniform Bind Group\"),\n\n layout: &layout,\n\n entries: &[wgpu::BindGroupEntry {\n\n binding: 0,\n\n resource: buffer.as_entire_binding(),\n\n }],\n\n });\n", "file_path": "src/context/global_ubo.rs", "rank": 98, "score": 24286.527230184107 }, { "content": " size,\n\n mip_level_count: 1,\n\n sample_count: 1,\n\n dimension: wgpu::TextureDimension::D3,\n\n format: wgpu::TextureFormat::R8Unorm,\n\n usage: wgpu::TextureUsages::COPY_DST | wgpu::TextureUsages::TEXTURE_BINDING,\n\n });\n\n let texture_view = texture.create_view(&Default::default());\n\n\n\n queue.write_texture(\n\n texture.as_image_copy(),\n\n data,\n\n wgpu::ImageDataLayout {\n\n offset: 0,\n\n bytes_per_row: NonZeroU32::new(256),\n\n rows_per_image: NonZeroU32::new(256),\n\n },\n\n size,\n\n );\n\n\n", "file_path": "src/context/volume_texture.rs", "rank": 99, "score": 24284.001132742247 } ]
Rust
benches/crit_bench.rs
arthurprs/lz4_flex
4953936f618dc7b644b2f30a612149d466005718
extern crate criterion; use self::criterion::*; use lz4::block::compress as lz4_linked_block_compress; use lz_fear::raw::compress2; use lz_fear::raw::decompress_raw; use lz_fear::raw::U16Table; use lz_fear::raw::U32Table; const COMPRESSION1K: &'static [u8] = include_bytes!("compression_1k.txt"); const COMPRESSION34K: &'static [u8] = include_bytes!("compression_34k.txt"); const COMPRESSION65K: &'static [u8] = include_bytes!("compression_65k.txt"); const COMPRESSION66K: &'static [u8] = include_bytes!("compression_66k_JSON.txt"); const COMPRESSION95K_VERY_GOOD_LOGO: &'static [u8] = include_bytes!("../logo.jpg"); const ALL: &[&[u8]] = &[ COMPRESSION1K as &[u8], COMPRESSION34K as &[u8], ]; fn compress_lz4_fear(input: &[u8]) -> Vec<u8> { let mut buf = Vec::new(); if input.len() <= 0xFFFF { compress2(input, 0, &mut U16Table::default(), &mut buf).unwrap(); } else { compress2(input, 0, &mut U32Table::default(), &mut buf).unwrap(); } buf } fn bench_compression_throughput(c: &mut Criterion) { let plot_config = PlotConfiguration::default().summary_scale(AxisScale::Linear); let mut group = c.benchmark_group("Compress"); group.plot_config(plot_config); for input in ALL.iter() { let input_bytes = input.len() as u64; group.throughput(Throughput::Bytes(input_bytes)); group.bench_with_input( BenchmarkId::new("lz4_flexx_rust", input_bytes), &input, |b, i| b.iter(|| lz4_flex::compress(&i)), ); } group.finish(); } pub fn decompress_fear(input: &[u8]) -> Vec<u8> { let mut vec = Vec::new(); decompress_raw(input, &[], &mut vec, std::usize::MAX).unwrap(); vec } fn bench_decompression_throughput(c: &mut Criterion) { let plot_config = PlotConfiguration::default().summary_scale(AxisScale::Linear); let mut group = c.benchmark_group("Decompress"); group.plot_config(plot_config); for input in ALL.iter() { let input_bytes = input.len() as u64; group.throughput(Throughput::Bytes(input_bytes)); let comp_lz4 = lz4::block::compress(&input, None, false).unwrap(); group.bench_with_input( BenchmarkId::new("lz4_flexx_rust", input_bytes), &comp_lz4, |b, i| b.iter(|| lz4_flex::decompress(&i, input.len())), ); } group.finish(); } criterion_group!( benches, bench_decompression_throughput, bench_compression_throughput ); criterion_main!(benches);
extern crate criterion; use self::criterion::*; use lz4::block::compress as lz4_linked_block_compress; use lz_fear::raw::compress2; use lz_fear::raw::decompress_raw; use lz_fear::raw::U16Table; use lz_f
g = PlotConfiguration::default().summary_scale(AxisScale::Linear); let mut group = c.benchmark_group("Decompress"); group.plot_config(plot_config); for input in ALL.iter() { let input_bytes = input.len() as u64; group.throughput(Throughput::Bytes(input_bytes)); let comp_lz4 = lz4::block::compress(&input, None, false).unwrap(); group.bench_with_input( BenchmarkId::new("lz4_flexx_rust", input_bytes), &comp_lz4, |b, i| b.iter(|| lz4_flex::decompress(&i, input.len())), ); } group.finish(); } criterion_group!( benches, bench_decompression_throughput, bench_compression_throughput ); criterion_main!(benches);
ear::raw::U32Table; const COMPRESSION1K: &'static [u8] = include_bytes!("compression_1k.txt"); const COMPRESSION34K: &'static [u8] = include_bytes!("compression_34k.txt"); const COMPRESSION65K: &'static [u8] = include_bytes!("compression_65k.txt"); const COMPRESSION66K: &'static [u8] = include_bytes!("compression_66k_JSON.txt"); const COMPRESSION95K_VERY_GOOD_LOGO: &'static [u8] = include_bytes!("../logo.jpg"); const ALL: &[&[u8]] = &[ COMPRESSION1K as &[u8], COMPRESSION34K as &[u8], ]; fn compress_lz4_fear(input: &[u8]) -> Vec<u8> { let mut buf = Vec::new(); if input.len() <= 0xFFFF { compress2(input, 0, &mut U16Table::default(), &mut buf).unwrap(); } else { compress2(input, 0, &mut U32Table::default(), &mut buf).unwrap(); } buf } fn bench_compression_throughput(c: &mut Criterion) { let plot_config = PlotConfiguration::default().summary_scale(AxisScale::Linear); let mut group = c.benchmark_group("Compress"); group.plot_config(plot_config); for input in ALL.iter() { let input_bytes = input.len() as u64; group.throughput(Throughput::Bytes(input_bytes)); group.bench_with_input( BenchmarkId::new("lz4_flexx_rust", input_bytes), &input, |b, i| b.iter(|| lz4_flex::compress(&i)), ); } group.finish(); } pub fn decompress_fear(input: &[u8]) -> Vec<u8> { let mut vec = Vec::new(); decompress_raw(input, &[], &mut vec, std::usize::MAX).unwrap(); vec } fn bench_decompression_throughput(c: &mut Criterion) { let plot_confi
random
[ { "content": "use lz4_flex::block::DecompressError;\n\nuse tokio::fs::File;\n\nuse tokio::prelude::*; // for write_all()\n\nuse argh::FromArgs;\n\n\n\n#[macro_use]\n\nextern crate quick_error;\n\n\n\n#[derive(FromArgs, Debug)]\n\n/// Reach new heights.\n", "file_path": "lz4_bin/src/main.rs", "rank": 2, "score": 9.23879143107962 }, { "content": "// }\n\n// use crate::block::END_OFFSET;\n\n// use crate::block::LZ4_MIN_LENGTH;\n\n// use crate::block::MAX_DISTANCE;\n\n// use crate::block::MFLIMIT;\n\n// use crate::block::MINMATCH;\n\n\n\n#[cfg(feature = \"safe-encode\")]\n\nuse std::convert::TryInto;\n\n\n\n/// Increase step size after 1<<INCREASE_STEPSIZE_BITSHIFT non matches\n\nconst INCREASE_STEPSIZE_BITSHIFT: usize = 5;\n\n\n", "file_path": "src/test_bins/profile_comp.rs", "rank": 4, "score": 8.29911407094798 }, { "content": "//! Tests.\n\n\n\n#[macro_use]\n\nextern crate more_asserts;\n\n// extern crate test;\n\n\n\n// use crate::block::compress::compress_into_2;\n\nuse lz4::block::{compress as lz4_cpp_block_compress, decompress as lz4_cpp_block_decompress};\n\nuse lz4_compress::compress as lz4_rust_compress;\n\nuse lz4_flex::block::{compress_prepend_size, decompress_size_prepended};\n\nuse lz4_flex::{compress, decompress};\n\nuse std::str;\n\n\n\nconst COMPRESSION1K: &'static [u8] = include_bytes!(\"../benches/compression_1k.txt\");\n\nconst COMPRESSION34K: &'static [u8] = include_bytes!(\"../benches/compression_34k.txt\");\n\nconst COMPRESSION65: &'static [u8] = include_bytes!(\"../benches/compression_65k.txt\");\n\nconst COMPRESSION66JSON: &'static [u8] = include_bytes!(\"../benches/compression_66k_JSON.txt\");\n\n// const COMPRESSION10MB: &'static [u8] = include_bytes!(\"../benches/dickens.txt\");\n\n\n\n// #[bench]\n", "file_path": "tests/tests.rs", "rank": 5, "score": 8.268764698429045 }, { "content": "//! Test suite for the Web and headless browsers.\n\n\n\n#![cfg(target_arch = \"wasm32\")]\n\n\n\nextern crate wasm_bindgen_test;\n\nuse wasm_bindgen_test::*;\n\nuse lz4_flex::block::compress::compress_prepend_size;\n\nuse lz4_flex::block::decompress::decompress_size_prepended;\n\n\n\n#[wasm_bindgen_test]\n", "file_path": "lz4-wasm/tests/node.rs", "rank": 6, "score": 7.974851674346144 }, { "content": "extern crate lz4_flex;\n\n\n", "file_path": "src/test_bins/comp_debug.rs", "rank": 7, "score": 7.767042130792168 }, { "content": "//! The compression algorithm.\n\n//!\n\n//! We make use of hash tables to find duplicates. This gives a reasonable compression ratio with a\n\n//! high performance. It has fixed memory usage, which contrary to other approachs, makes it less\n\n//! memory hungry.\n\n\n\nuse crate::block::hashtable::get_table_size;\n\nuse crate::block::hashtable::HashTable;\n\nuse crate::block::hashtable::{HashTableU16, HashTableU32, HashTableUsize};\n\nuse crate::block::END_OFFSET;\n\nuse crate::block::LZ4_MIN_LENGTH;\n\nuse crate::block::MAX_DISTANCE;\n\nuse crate::block::MFLIMIT;\n\nuse crate::block::MINMATCH;\n\nuse alloc::vec::Vec;\n\n\n\n#[cfg(feature = \"safe-encode\")]\n\nuse core::convert::TryInto;\n\n\n\n/// Increase step size after 1<<INCREASE_STEPSIZE_BITSHIFT non matches\n\nconst INCREASE_STEPSIZE_BITSHIFT: usize = 5;\n\n\n\n/// hashes and right shifts to a maximum value of 16bit, 65535\n\n/// The right shift is done in order to not exceed, the hashtables capacity\n", "file_path": "src/block/compress.rs", "rank": 8, "score": 7.7374438760418105 }, { "content": "extern crate lz4_flex;\n\n\n\n\n\nuse lz4_flex::block::DecompressError;\n\n\n\nconst COMPRESSION10MB: &[u8] = include_bytes!(\"../../benches/dickens.txt\");\n\n// const COMPRESSION10MB: &'static [u8] = include_bytes!(\"../../benches/compression_66k_JSON.txt\");\n\n\n", "file_path": "src/test_bins/profile_decomp.rs", "rank": 9, "score": 7.6276389153671476 }, { "content": "//! Test suite for the Web and headless browsers.\n\n\n\n#![cfg(target_arch = \"wasm32\")]\n\n\n\nextern crate wasm_bindgen_test;\n\nuse wasm_bindgen_test::*;\n\nuse lz4_flex::block::compress::compress_prepend_size;\n\nuse lz4_flex::block::decompress::decompress_size_prepended;\n\nwasm_bindgen_test_configure!(run_in_browser);\n\n\n\n#[wasm_bindgen_test]\n", "file_path": "lz4-wasm/tests/web.rs", "rank": 10, "score": 7.499768511020312 }, { "content": "use crate::block::compress::compress as compress_block;\n\nuse std::io::Read;\n\nuse std::io::Write;\n\n\n\n/// Compress all bytes of `input` into `output`.\n\n#[allow(dead_code)]\n\n#[inline]\n", "file_path": "src/frame/compress.rs", "rank": 11, "score": 6.692083973181951 }, { "content": "- `std` enables dependency on the standard library. _enabled by default_\n\n\n\nFor maximum performance use `no-default-features`.\n\n\n\n*/\n\n\n\n#![cfg_attr(not(feature = \"std\"), no_std)]\n\n\n\nextern crate alloc;\n\n\n\npub mod block;\n\n#[cfg(feature = \"std\")]\n\nmod frame;\n\n\n\npub use block::compress::{compress, compress_into, compress_prepend_size};\n\n\n\n#[cfg(feature = \"safe-decode\")]\n\npub use block::decompress_safe::{decompress, decompress_into, decompress_size_prepended};\n\n\n\n#[cfg(not(feature = \"safe-decode\"))]\n\npub use block::decompress::{decompress, decompress_into, decompress_size_prepended};\n", "file_path": "src/lib.rs", "rank": 12, "score": 6.6033187544201155 }, { "content": "//! The decompression algorithm.\n\nuse crate::block::wild_copy_from_src_16;\n\nuse crate::block::DecompressError;\n\nuse alloc::vec::Vec;\n\n\n\n// copy_on_self uses 16byte wild copy, to avoid overlapping copies we add 12. Minimum length of match_length is 4 totaling to 16.\n\nconst SAFE_DUPLICATE_COPY_RANGE: usize = 12;\n\n\n\n/// Copies data to output_ptr by self-referential copy from start and match_length\n\n#[inline]\n", "file_path": "src/block/decompress.rs", "rank": 13, "score": 6.568100515853292 }, { "content": "/// Duplicate code here for analysis with VTune\n\nextern crate lz4_flex;\n\n\n\nconst COMPRESSION10MB: &[u8] = include_bytes!(\"../../benches/dickens.txt\");\n\n\n", "file_path": "src/test_bins/profile_comp.rs", "rank": 14, "score": 6.059184788580836 }, { "content": "//! The decompression algorithm.\n\nuse byteorder::{ByteOrder, LittleEndian};\n\n\n\n#[macro_use]\n\nextern crate quick_error;\n\n\n\n// const FASTLOOP_SAFE_DISTANCE : usize = 64;\n\n\n\nquick_error! {\n\n /// An error representing invalid compressed data.\n\n #[derive(Debug)]\n\n pub enum Error {\n\n /// Expected another byte, but none found.\n\n ExpectedAnotherByte {\n\n description(\"Expected another byte, found none.\")\n\n }\n\n /// Deduplication offset out of bounds (not in buffer).\n\n OffsetOutOfBounds {\n\n description(\"The offset to copy is not contained in the decompressed buffer.\")\n\n }\n\n }\n\n}\n\n\n\n// const COMPRESSION10MB: &'static [u8] = include_bytes!(\"../../benches/dickens.txt\");\n\n// const COMPRESSION10MB: &[u8] = include_bytes!(\"../../benches/compression_34k.txt\");\n\nconst COMPRESSION10MB: &'static [u8] = include_bytes!(\"../../benches/compression_66k_JSON.txt\");\n\n//\n", "file_path": "src/test_bins/decompress_with_stats.rs", "rank": 15, "score": 5.011146309486534 }, { "content": "//! The decompression algorithm.\n\n\n\nuse crate::block::DecompressError;\n\nuse alloc::vec::Vec;\n\n\n\n/// Read an integer LSIC (linear small integer code) encoded.\n\n///\n\n/// In LZ4, we encode small integers in a way that we can have an arbitrary number of bytes. In\n\n/// particular, we add the bytes repeatedly until we hit a non-0xFF byte. When we do, we add\n\n/// this byte to our sum and terminate the loop.\n\n///\n\n/// # Example\n\n///\n\n/// ```notest\n\n/// 255, 255, 255, 4, 2, 3, 4, 6, 7\n\n/// ```\n\n///\n\n/// is encoded to _255 + 255 + 255 + 4 = 769_. The bytes after the first 4 is ignored, because\n\n/// 4 is the first non-0xFF byte.\n\n#[inline]\n", "file_path": "src/block/decompress_safe.rs", "rank": 16, "score": 4.511555053581215 }, { "content": "## Benchmarks\n\nThe benchmark is run with criterion, the test files are in the benches folder.\n\n\n\nCurrently 3 implementations are compared, this one, the [redox-version](https://crates.io/crates/lz4-compress), [lz-fear](https://github.com/main--/rust-lz-fear) and the [c++ version via rust bindings](https://crates.io/crates/lz4). \n\nThe lz4-flex version is tested with the feature flags safe-decode and safe-encode switched on and off.\n\n\n\n- lz4_redox_rust: https://crates.io/crates/lz4-compress\n\n- lz4_cpp: https://crates.io/crates/lz4\n\n- lz-fear: https://github.com/main--/rust-lz-fear\n\n\n\n### Results v0.7.2 18-01-2021 (safe-decode and safe-encode off)\n\n`cargo bench --no-default-features`\n\n\n\nExecuted on Core i7-6700 Linux Mint.\n\n\n\n![Compress](./compress_bench.svg)\n\n\n\n![Decompress](./decompress_bench.svg)\n\n\n\n### Results v0.7.2 18-01-2021 (safe-decode and safe-encode on)\n\n`cargo bench`\n\n\n\nExecuted on Core i7-6700 Linux Mint.\n\n\n\n![Compress](./compress_bench_safe.svg)\n\n\n\n![Decompress](./decompress_bench_safe.svg)\n\n\n\n## Miri\n\n\n\n[Miri](https://github.com/rust-lang/miri) can be used to find issues related to incorrect unsafe usage:\n\n\n\n`MIRIFLAGS=\"-Zmiri-disable-isolation -Zmiri-disable-stacked-borrows\" cargo miri test --no-default-features`\n\n\n\n## Fuzzer\n\nThis fuzz target generates corrupted data for the decompressor. Make sure to switch to the checked_decode version in `fuzz/Cargo.toml` before testing this.\n\n`cargo fuzz run fuzz_decomp_corrupted_data`\n\n\n\nThis fuzz target asserts that a compression and decompression rountrip returns the original input.\n\n`cargo fuzz run fuzz_roundtrip`\n\n\n\nThis fuzz target asserts compression with cpp and decompression with lz4_flex returns the original input.\n\n`cargo fuzz run fuzz_roundtrip_cpp_compress`\n\n\n\n## TODO\n\n- Frame format\n\n- High compression\n\n- Dictionary Compression\n\n\n", "file_path": "README.md", "rank": 17, "score": 4.1879712897861285 }, { "content": "![Rust](https://github.com/PSeitz/lz4_flex/workflows/Rust/badge.svg)\n\n[![Docs](https://docs.rs/lz4_flex/badge.svg)](https://docs.rs/crate/lz4_flex/)\n\n[![Crates.io](https://img.shields.io/crates/v/lz4_flex.svg)](https://crates.io/crates/lz4_flex)\n\n\n\n# lz4_flex\n\n\n\n![lz4_flex_logo](https://raw.githubusercontent.com/PSeitz/lz4_flex/master/logo.jpg)\n\n\n\nFastest LZ4 implementation in Rust. Originally based on [redox-os' lz4 compression](https://crates.io/crates/lz4-compress), but now a complete rewrite.\n\nThe results in the table are from a benchmark in this project (66Kb JSON).\n\n\n\n| Compressor | Compression | Decompression | Ratio\t\t |\n\n|----------------------|-------------|---------------|---------------|\n\n| lz4_flex unsafe | 947 MiB/s | 5017 MiB/s | 0.2270 \t |\n\n| lz4_flex safe | 649 MiB/s | 1433 MiB/s | 0.2270 \t |\n\n| lz4_rs (lz4 1.8.1) | 1001 MiB/s | 4627 MiB/s | 0.2283 \t |\n\n| lz4_fear | 456 MiB/s | 809 MiB/s | 0.2283\t |\n\n\n\n## Features\n\n- Very good logo\n\n- LZ4 Block format\n\n- High performance\n\n- 0,5s clean release build time\n\n- Feature flags to configure safe/unsafe code usage\n\n- no-std support (thanks @coolreader18)\n\n- 32-bit support\n\n\n\n## Usage: \n\nCompression and decompression uses no usafe via the default feature flags \"safe-encode\" and \"safe-decode\". If you need more performance you can disable them (e.g. with no-default-features).\n\n\n\nSafe:\n\n```\n\nlz4_flex = { version = \"0.7.5\" }\n\n```\n\n\n\nPerformance:\n\n```\n\nlz4_flex = { version = \"0.7.5\", default-features = false }\n\n```\n\n\n\nWarning: If you don't trust your input, use checked-decode in order to avoid out of bounds access.\n\n```\n\nlz4_flex = { version = \"0.7.5\", default-features = false, features = [\"checked-decode\"] }\n\n```\n\n\n\n```rust\n\nuse lz4_flex::{compress_prepend_size, decompress_size_prepended};\n\n\n\nfn main(){\n\n let input: &[u8] = b\"Hello people, what's up?\";\n\n let compressed = compress_prepend_size(input);\n\n let uncompressed = decompress_size_prepended(&compressed).unwrap();\n\n assert_eq!(input, uncompressed);\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 18, "score": 4.1369230879596905 }, { "content": "#![no_main]\n\nuse std::convert::TryInto;\n\nuse libfuzzer_sys::fuzz_target;\n\n\n\nuse lz4_flex::decompress_size_prepended;\n\nfuzz_target!(|data: &[u8]| {\n\n\tif data.len() >= 4 {\n\n\t\tlet size = u32::from_le_bytes(data[0..4].try_into().unwrap());\n\n\t\tif size > 20_000_000 {\n\n\t\t\treturn;\n\n\t\t}\n\n\t}\n\n // should not panic\n\n decompress_size_prepended(&data);\n\n});\n", "file_path": "fuzz/fuzz_targets/fuzz_decomp_corrupted_data.rs", "rank": 20, "score": 3.681208198058499 }, { "content": "mod utils;\n\n\n\nuse lz4_flex::block::compress::compress_prepend_size;\n\nuse lz4_flex::block::decompress::decompress_size_prepended;\n\nuse wasm_bindgen::prelude::*;\n\n\n\n// When the `wee_alloc` feature is enabled, use `wee_alloc` as the global\n\n// allocator.\n\n#[cfg(feature = \"wee_alloc\")]\n\n#[global_allocator]\n\nstatic ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;\n\n\n\n#[wasm_bindgen]\n", "file_path": "lz4-wasm/src/lib.rs", "rank": 21, "score": 3.6459811667887614 }, { "content": "#![no_main]\n\nuse libfuzzer_sys::fuzz_target;\n\n\n\nuse lz4_flex::decompress_size_prepended;\n\nuse lz4_flex::compress_prepend_size;\n\nfuzz_target!(|data: &[u8]| {\n\n // fuzzed code goes here\n\n let compressed = compress_prepend_size(data);\n\n let decompressed = decompress_size_prepended(&compressed).unwrap();\n\n assert_eq!(data, decompressed.as_slice());\n\n});\n", "file_path": "fuzz/fuzz_targets/fuzz_roundtrip.rs", "rank": 22, "score": 3.4957148056275447 }, { "content": "const compressed = wasm.compress(enc.encode(\"compress this text, compress this text pls. thx. thx. thx. thx. thx\"));\n", "file_path": "lz4-wasm/example_project/index.js", "rank": 23, "score": 3.4025583966260275 }, { "content": "#![no_main]\n\nuse libfuzzer_sys::fuzz_target;\n\n\n\nuse lz4_flex::decompress_size_prepended;\n\nuse lz4::block::compress as lz4_linked_block_compress;\n\n\n\nfuzz_target!(|data: &[u8]| {\n\n // fuzzed code goes here\n\n let compressed = lz4_linked_block_compress(data, None, true).unwrap();\n\n let decompressed = decompress_size_prepended(&compressed).unwrap();\n\n assert_eq!(data, decompressed.as_slice());\n\n});\n", "file_path": "fuzz/fuzz_targets/fuzz_roundtrip_cpp_compress.rs", "rank": 24, "score": 3.3509831539308967 }, { "content": "\n\n#[cfg_attr(feature = \"safe-encode\", forbid(unsafe_code))]\n\npub mod compress;\n\npub mod hashtable;\n\n\n\n#[cfg_attr(feature = \"safe-decode\", forbid(unsafe_code))]\n\npub mod decompress_safe;\n\n#[cfg(feature = \"safe-decode\")]\n\npub use decompress_safe as decompress;\n\n\n\n#[cfg(not(feature = \"safe-decode\"))]\n\npub mod decompress;\n\n\n\npub use compress::compress_prepend_size;\n\n\n\n#[cfg(feature = \"safe-decode\")]\n\npub use decompress_safe::decompress_size_prepended;\n\n\n\n#[cfg(not(feature = \"safe-decode\"))]\n\npub use decompress::decompress_size_prepended;\n", "file_path": "src/block/mod.rs", "rank": 25, "score": 3.293606245149296 }, { "content": "/// Returns the maximum output size of the compressed data.\n\n/// Can be used to preallocate capacity on the output vector\n\npub fn get_maximum_output_size(input_len: usize) -> usize {\n\n 16 + 4 + (input_len as f64 * 1.1) as usize\n\n}\n\n\n\n/// Compress all bytes of `input` into `output`.\n\n/// The method chooses an appropriate hashtable to lookup duplicates and calls `compress_into_with_table`\n\n///\n\n/// The method will reserve the required space on the output vec.\n", "file_path": "src/block/compress.rs", "rank": 26, "score": 3.200721738875862 }, { "content": "\n\n // Calculate the start of this duplicate segment.\n\n let start_ptr = unsafe { output_ptr.sub(offset as usize) };\n\n\n\n // We'll do a bound check to in checked-decode.\n\n #[cfg(feature = \"checked-decode\")]\n\n {\n\n if (start_ptr as usize) >= (output_ptr as usize) {\n\n return Err(DecompressError::OffsetOutOfBounds);\n\n };\n\n }\n\n duplicate(&mut output_ptr, start_ptr, match_length);\n\n }\n\n Ok(())\n\n}\n\n\n\nuse core::convert::TryInto;\n\nuse core::{ptr};\n\n\n\n/// Decompress all bytes of `input` into a new vec. The first 4 bytes are the uncompressed size in litte endian.\n\n/// Can be used in conjuction with `compress_prepend_size`\n", "file_path": "src/test_bins/profile_decomp.rs", "rank": 27, "score": 2.9807507392907846 }, { "content": " \"Ratio 34k\",\n\n COMPRESSION34K.len(),\n\n compress(COMPRESSION34K).len(),\n\n );\n\n }\n\n\n\n mod lz4_linked {\n\n use super::*;\n\n use std::io;\n\n fn get_compressed_size(mut input: &[u8]) -> usize {\n\n let mut cache = vec![];\n\n let mut encoder = lz4::EncoderBuilder::new()\n\n .level(2)\n\n .build(&mut cache)\n\n .unwrap();\n\n io::copy(&mut input, &mut encoder).unwrap();\n\n let (output, _result) = encoder.finish();\n\n output.len()\n\n }\n\n\n", "file_path": "tests/tests.rs", "rank": 28, "score": 2.7850047168995213 }, { "content": "/// The Hashtable trait used by the compression to store hashed bytes to their position.\n\n/// `val` can be maximum the size of the input in bytes.\n\n///\n\n/// `pos` can have a maximum value of u16::MAX or 65535\n\n/// If the hashtable is smaller it needs to reduce the pos to its space, e.g. by right shifting.\n\n///\n\n/// Duplication dictionary size.\n\n///\n\n/// Every four bytes is assigned an entry. When this number is lower, fewer entries exists, and\n\n/// thus collisions are more likely, hurting the compression ratio.\n\n///\n\nuse alloc::vec::Vec;\n\n\n", "file_path": "src/block/hashtable.rs", "rank": 29, "score": 2.714939474251366 }, { "content": "/*! Pure Rust, high performance implementation of LZ4 compression.\n\n\n\nA detailed explanation of the algorithm can be found [here](http://ticki.github.io/blog/how-lz4-works/).\n\n\n\n\n\n# Examples\n\n```\n\nuse lz4_flex::{compress_prepend_size, decompress_size_prepended};\n\nlet input: &[u8] = b\"Hello people, what's up?\";\n\nlet compressed = compress_prepend_size(input);\n\nlet uncompressed = decompress_size_prepended(&compressed).unwrap();\n\nassert_eq!(input, uncompressed);\n\n\n\n```\n\n\n\n## Feature Flags\n\n\n\n- `safe-encode` uses only safe rust for encode. _enabled by default_\n\n- `safe-decode` uses only safe rust for encode. _enabled by default_\n\n- `checked-decode` will add aditional checks if `safe-decode` is not enabled, to avoid out of bounds access. This should be enabled for untrusted input.\n", "file_path": "src/lib.rs", "rank": 30, "score": 2.7141935017919825 }, { "content": " /// Write a buffer to the output stream.\n\n ///\n\n /// The reason this doesn't take `&mut self` is that we need partial borrowing due to the rules\n\n /// of the borrow checker. For this reason, we instead take some number of segregated\n\n /// references so we can read and write them independently.\n\n // #[inline(never)]\n\n fn output(output: &mut Vec<u8>, buf: &[u8]) {\n\n // We use simple memcpy to extend the vector.\n\n output.extend_from_slice(&buf);\n\n }\n\n\n\n /// Write an already decompressed match to the output stream.\n\n ///\n\n /// This is used for the essential part of the algorithm: deduplication. We start at some\n\n /// position `start` and then keep pushing the following element until we've added\n\n /// `match_length` elements.\n\n // #[inline(never)]\n\n fn duplicate(&mut self, start: usize, match_length: usize) {\n\n // We cannot simply use memcpy or `extend_from_slice`, because these do not allow\n\n // self-referential copies: http://ticki.github.io/img/lz4_runs_encoding_diagram.svg\n", "file_path": "src/test_bins/decompress_with_stats.rs", "rank": 31, "score": 2.5435954300830015 }, { "content": " crate::block::decompress::decompress_into_with_dict(&out, &mut trip, &input).unwrap();\n\n assert_eq!(input, trip);\n\n assert!(out.len() < compress(input).len());\n\n }\n\n\n\n #[test]\n\n fn test_compress_at_mf_limit() {\n\n let input: &[u8] = b\"aaaaaaaabaaa\\n\";\n\n let out = compress(&input);\n\n assert!(out.len() < input.len());\n\n }\n\n}\n", "file_path": "src/block/compress.rs", "rank": 32, "score": 2.334736380870182 }, { "content": " actual_size: output.capacity(),\n\n expected_size: 0,\n\n });\n\n };\n\n }\n\n duplicate(&mut output_ptr, start_ptr, match_length);\n\n }\n\n Ok(())\n\n}\n\n\n\n/// Decompress all bytes of `input` into a new vec. The first 4 bytes are the uncompressed size in litte endian.\n\n/// Can be used in conjuction with `compress_prepend_size`\n", "file_path": "src/block/decompress.rs", "rank": 33, "score": 2.2936668966926197 }, { "content": " res\n\n }\n\n\n\n /// writes block descriptor byte\n\n /// Block Maximum Size\n\n ///\n\n /// This information is useful to help the decoder allocate memory. Size here refers to the original (uncompressed) data size.\n\n /// Block Maximum Size is one value among the following table :\n\n /// 0 1 2 3 4 5 6 7\n\n /// N/A N/A N/A N/A 64 KB 256 KB 1 MB 4 MB\n\n fn write_bd_byte(&self) -> u32 {\n\n 1 << (self.block_size_id as u32)\n\n }\n\n}\n", "file_path": "src/frame/mod.rs", "rank": 34, "score": 1.9603413647163412 }, { "content": "\n\nuse core::convert::TryInto;\n\nuse core::{fmt, ptr};\n\n\n\n/// https://github.com/lz4/lz4/blob/dev/doc/lz4_Block_format.md#end-of-block-restrictions\n\n/// The last match must start at least 12 bytes before the end of block. The last match is part of the penultimate sequence.\n\n/// It is followed by the last sequence, which contains only literals.\n\n///\n\n/// Note that, as a consequence, an independent block < 13 bytes cannot be compressed, because the match must copy \"something\",\n\n/// so it needs at least one prior byte.\n\n///\n\n/// When a block can reference data from another block, it can start immediately with a match and no literal, so a block of 12 bytes can be compressed.\n\nconst MFLIMIT: usize = 12;\n\n\n\n/// The last 5 bytes of input are always literals. Therefore, the last sequence contains at least 5 bytes.\n\nconst END_OFFSET: usize = 5;\n\n\n\n/// https://github.com/lz4/lz4/blob/dev/doc/lz4_Block_format.md#end-of-block-restrictions\n\n/// Minimum length of a block\n\n///\n", "file_path": "src/block/mod.rs", "rank": 35, "score": 1.9242408918375307 }, { "content": "pub mod compress;\n\npub mod decompress;\n\n\n\npub use compress::compress;\n\n\n\n#[allow(dead_code)]\n\n#[derive(Clone, Copy)]\n\npub enum BlockSize {\n\n Default = 0, // Default - 64KB\n\n Max64KB = 4,\n\n Max256KB = 5,\n\n Max1MB = 6,\n\n Max4MB = 7,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl BlockSize {\n\n pub fn get_size(&self) -> usize {\n\n match self {\n\n &BlockSize::Default | &BlockSize::Max64KB => 64 * 1024,\n", "file_path": "src/frame/mod.rs", "rank": 36, "score": 1.869767164418655 }, { "content": " // We continue if we got 255.\n\n extra == 0xFF\n\n } {}\n\n Ok(n)\n\n }\n\n\n\n /// Read a little-endian 16-bit integer from the input stream.\n\n // #[inline(never)]\n\n fn read_u16(&mut self) -> Result<u16, Error> {\n\n // We use byteorder to read an u16 in little endian.\n\n\n\n let num = LittleEndian::read_u16(&self.input[self.input_pos..]);\n\n\n\n self.move_cursor(&self.input, 2)?;\n\n Ok(num)\n\n }\n\n\n\n /// Read the literals section of a block.\n\n ///\n\n /// The literals section encodes some bytes which are to be copied to the output without any\n", "file_path": "src/test_bins/decompress_with_stats.rs", "rank": 37, "score": 1.8275478130714764 }, { "content": " // self.input_pos+=1;\n\n // match_length += extra as u32;\n\n\n\n // // We continue if we got 255.\n\n // extra == 0xFF\n\n // } {}\n\n\n\n // // match_length += self.read_integer()? as usize;\n\n // }\n\n\n\n // // We now copy from the already decompressed buffer. This allows us for storing duplicates\n\n // // by simply referencing the other location.\n\n\n\n // // Calculate the start of this duplicate segment. We use wrapping subtraction to avoid\n\n // // overflow checks, which we will catch later.\n\n // let start = self.output.len() - offset as usize;\n\n\n\n // // We'll do a bound check to avoid panicking.\n\n // self.duplicate(start, match_length as usize);\n\n // }\n", "file_path": "src/test_bins/decompress_with_stats.rs", "rank": 38, "score": 1.748581840732217 }, { "content": " 19 => self.match_full += 1,\n\n _ => self.match_fit += 1,\n\n }\n\n\n\n // The intial match length can maximally be 19. As with the literal length, this indicates\n\n // that there are more bytes to read.\n\n if match_length == 4 + 15 {\n\n // The match length took the maximal value, indicating that there is more bytes. We\n\n // read the extra integer.\n\n match_length += self.read_integer()? as usize;\n\n }\n\n\n\n if match_length < 256 {\n\n self.match_7bit_fit += 1;\n\n }\n\n\n\n // We now copy from the already decompressed buffer. This allows us for storing duplicates\n\n // by simply referencing the other location.\n\n\n\n // Calculate the start of this duplicate segment. We use wrapping subtraction to avoid\n", "file_path": "src/test_bins/decompress_with_stats.rs", "rank": 39, "score": 1.7116037508891577 }, { "content": "// println!(\"dict size 16384 {:?}\", bytes_written as f64/ COMPRESSION66K.len() as f64);\n\n// let bytes_written = compress_into_2(input, &mut vec, 32768, 1).unwrap();\n\n// println!(\"dict size 32768 {:?}\", bytes_written as f64/ COMPRESSION66K.len() as f64);\n\n\n\n// // let bytes_written = compress_into_2(input, &mut vec).unwrap();\n\n\n\n// }\n\n\n\n// the last 5 bytes need to be literals, so the last match block is not allowed to match to the end\n\n\n\n// #[test]\n\n// fn test_end_offset() {\n\n// inverse(&[122, 1, 0, 1, 0, 10, 1, 0]);\n\n// // inverse(\"AAAAAAAAAAAAAAAAAAAAAAAABBBBBBBBBaAAAAAAAAAAAAAAAAAAAAAAAA\");\n\n// }\n\n\n\n#[cfg(test)]\n\nmod checked_decode {\n\n use super::*;\n\n\n", "file_path": "tests/tests.rs", "rank": 40, "score": 1.6936950643192126 }, { "content": "<div align=\"center\">\n\n\n\n <h1><code>lz4-wasm</code></h1>\n\n\n\n <strong>Extremely fast compression(200MB/s Firefox, 350Mb/s Chrome) and decompression(600MB/s Firefox, 1400Mb/s Chrome) in the browser or nodejs using wasm.</strong>\n\n\n\n <sub>Built with Rust</a></sub>\n\n</div>\n\n\n\n\n\n## 🚴 Usage\n\n\n\n\n\nThe wasm module exposes two function compress and decompress.\n\nBoth accept and return UInt8Array. \n\nInternally the lz4 block api is used, the length of the original input is prepended in 32-bit little endian.\n\n\n\n\n\n```\n\n\n\nimport * as wasm from \"lz4-wasm\";\n\n\n\n// use TextEncoder to get bytes (UInt8Array) from string\n\nvar enc = new TextEncoder();\n\nconst compressed = wasm.compress(enc.encode(\"compress this text, compress this text pls. thx. thx. thx. thx. thx\"));\n\nconst original = wasm.decompress(compressed);\n\n\n\nvar dec = new TextDecoder(\"utf-8\");\n\nalert(dec.decode(original))\n\n\n\n```\n\n\n\n\n\nSee https://github.com/PSeitz/lz4_flex/tree/master/lz4-wasm/example_project for usage and benchmark.\n\n\n\n\n\n## Making New Releases\n\n\n\n### Release for bundler\n\n\n\nBuild. This will optimize usage for inside a bundler like webpack.\n\n```\n\nRUST_LOG=info wasm-pack build --release\n\n```\n\n\n\nDue to a long standing bug in wasm-pack 0.9.1, _manually_ add these files to package.json.\n\n\n\n```\n\n \"lz4_wasm_bg.wasm.d.ts\",\n\n \"lz4_wasm_bg.js\",\n\n```\n\n\n\n```\n\nRUST_LOG=info wasm-pack publish\n\n```\n\n\n\n\n\n### Release for nodejs\n\n\n\nset name in Cargo toml to\n\n```\n\nname = \"lz4-wasm-nodejs\"\n\n```\n\n\n\nBuild for nodejs\n\n```\n\nRUST_LOG=info wasm-pack build --release -t nodejs\n\n```\n\n\n\n```\n\nRUST_LOG=info wasm-pack publish\n", "file_path": "lz4-wasm/README.md", "rank": 41, "score": 1.6635444259853611 }, { "content": "}\n\n\n\n/// Frame Descriptor\n\n/// FLG BD (Content Size) (Dictionary ID) HC\n\n/// 1 byte 1 byte 0 - 8 bytes 0 - 4 bytes 1 byte\n\n#[allow(dead_code)]\n\n#[repr(C)]\n\npub(crate) struct LZ4FFrameInfo {\n\n pub content_size: Option<u64>,\n\n pub block_size_id: BlockSize,\n\n pub block_mode: BlockMode,\n\n pub content_checksum_flag: ContentChecksum,\n\n // pub reserved: [u32; 5],\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl LZ4FFrameInfo {\n\n fn read(input: &[u8]) -> LZ4FFrameInfo {\n\n // read flag bytes\n\n\n", "file_path": "src/frame/mod.rs", "rank": 42, "score": 1.5908591088612933 }, { "content": "// println!(\"{}\", bench_gen_env(\"COMPRESSION95K_VERY_GOOD_LOGO\", COMPRESSION95K_VERY_GOOD_LOGO.len(), || &COMPRESSION95K_VERY_GOOD_LOGO, |xs| lz4_flex::compress(&xs)));\n\n\n\n// let compression1_k_compressed = lz4_flex::compress_prepend_size(&COMPRESSION1K);\n\n// let compression34_k_compressed = lz4_flex::compress_prepend_size(&COMPRESSION34K);\n\n// let compression65_k_compressed = lz4_flex::compress_prepend_size(&COMPRESSION65K);\n\n// let compression66_k_compressed = lz4_flex::compress_prepend_size(&COMPRESSION66K);\n\n// let compression95_k_very_good_logo_compressed = lz4_flex::compress_prepend_size(&COMPRESSION95K_VERY_GOOD_LOGO);\n\n\n\n// println!(\"{}\", bench_gen_env(\"DECOMPRESSION1K\", compression1_k_compressed.len(), || &compression1_k_compressed, |xs| lz4_flex::decompress_size_prepended(&xs)));\n\n// println!(\"{}\", bench_gen_env(\"DECOMPRESSION34K\", compression34_k_compressed.len(), || &compression34_k_compressed, |xs| lz4_flex::decompress_size_prepended(&xs)));\n\n// println!(\"{}\", bench_gen_env(\"DECOMPRESSION65K\", compression65_k_compressed.len(), || &compression65_k_compressed, |xs| lz4_flex::decompress_size_prepended(&xs)));\n\n// println!(\"{}\", bench_gen_env(\"DECOMPRESSION66K\", compression66_k_compressed.len(), || &compression66_k_compressed, |xs| lz4_flex::decompress_size_prepended(&xs)));\n\n// println!(\"{}\", bench_gen_env(\"DECOMPRESSION95K_VERY_GOOD_LOGO\", compression95_k_very_good_logo_compressed.len(), || &compression95_k_very_good_logo_compressed, |xs| lz4_flex::decompress_size_prepended(&xs)));\n\n// }\n\n\n\n// #![feature(test)]\n\n// extern crate test;\n\n\n\n// const COMPRESSION1K: &'static [u8] = include_bytes!(\"compression_1k.txt\");\n\n// const COMPRESSION34K: &'static [u8] = include_bytes!(\"compression_34k.txt\");\n", "file_path": "benches/bench.rs", "rank": 43, "score": 1.562922120326852 }, { "content": " if literal == 15 {\n\n // The literal length took the maximal value, indicating that there is more than 15\n\n // literal bytes. We read the extra integer.\n\n literal += self.read_integer()? as usize;\n\n }\n\n\n\n // println!(\"{:?}\", literal);\n\n // Now we know the literal length. The number will be used to indicate how long the\n\n // following literal copied to the output buffer is.\n\n\n\n // Read the literals segment and output them without processing.\n\n let block = &self.input[self.input_pos..self.input_pos + literal];\n\n self.move_cursor(&self.input, literal)?;\n\n Self::output(&mut self.output, block);\n\n Ok(())\n\n }\n\n\n\n /// Read the duplicates section of the block.\n\n ///\n\n /// The duplicates section serves to reference an already decoded segment. This consists of two\n", "file_path": "src/test_bins/decompress_with_stats.rs", "rank": 44, "score": 1.5188807939695628 }, { "content": " // Exhaust the decoder by reading and decompressing all blocks until the remaining buffer\n\n // is empty.\n\n let in_len = self.input.len();\n\n // while in_len - self.input_pos >= FASTLOOP_SAFE_DISTANCE {\n\n // // Read the token. The token is the first byte in a block. It is divided into two 4-bit\n\n // // subtokens, the higher and the lower.\n\n\n\n // self.token = unsafe{*self.input.get_unchecked(self.input_pos)};\n\n // self.input_pos+=1;\n\n\n\n // // Now, we read the literals section.\n\n // let mut literal = (self.token >> 4) as usize;\n\n // if literal == 15 {\n\n // literal += self.read_integer()? as usize;\n\n // }\n\n\n\n // // Now we know the literal length. The number will be used to indicate how long the\n\n // // following literal copied to the output buffer is.\n\n\n\n // // Read the literals segment and output them without processing.\n", "file_path": "src/test_bins/decompress_with_stats.rs", "rank": 45, "score": 1.4772957022344957 }, { "content": " };\n\n }\n\n\n\n // Read the token. The token is the first byte in a block. It is divided into two 4-bit\n\n // subtokens, the higher and the lower.\n\n // This token contains to 4-bit \"fields\", a higher and a lower, representing the literals'\n\n // length and the back reference's length, respectively. LSIC is used if either are their\n\n // maximal values.\n\n let token = unsafe { *input.get_unchecked(input_pos) };\n\n input_pos += 1;\n\n\n\n // Checking for hot-loop.\n\n // In most cases the metadata does fit in a single 1byte token (statistically) and we are in a safe-distance to the end.\n\n // This enables some optmized handling.\n\n if does_token_fit(token) && is_safe_distance(input_pos, end_pos_check) {\n\n let literal_length = (token >> 4) as usize;\n\n\n\n #[cfg(feature = \"checked-decode\")]\n\n {\n\n // Check if literal is out of bounds for the input, and if there is enough space on the output\n", "file_path": "src/test_bins/profile_decomp.rs", "rank": 46, "score": 1.3767771219036478 }, { "content": " };\n\n }\n\n\n\n // Read the token. The token is the first byte in a block. It is divided into two 4-bit\n\n // subtokens, the higher and the lower.\n\n // This token contains to 4-bit \"fields\", a higher and a lower, representing the literals'\n\n // length and the back reference's length, respectively. LSIC is used if either are their\n\n // maximal values.\n\n let token = unsafe { *input.get_unchecked(input_pos) };\n\n input_pos += 1;\n\n\n\n // Checking for hot-loop.\n\n // In most cases the metadata does fit in a single 1byte token (statistically) and we are in a safe-distance to the end.\n\n // This enables some optmized handling.\n\n if does_token_fit(token) && is_safe_distance(input_pos, end_pos_check) {\n\n let literal_length = (token >> 4) as usize;\n\n\n\n #[cfg(feature = \"checked-decode\")]\n\n {\n\n // Check if literal is out of bounds for the input, and if there is enough space on the output\n", "file_path": "src/block/decompress.rs", "rank": 47, "score": 1.3767771219036478 }, { "content": "/// Switch for the hashtable size byU16\n\n// #[allow(dead_code)]\n\n// static LZ4_64KLIMIT: u32 = (64 * 1024) + (MFLIMIT - 1);\n\n\n\n// pub(crate) fn hash(sequence: u32) -> u32 {\n\n// (sequence.wrapping_mul(2654435761_u32)) >> (1 + (MINMATCH as u32 * 8) - (LZ4_HASHLOG + 1))\n\n// }\n\n\n\n// fn wild_copy_from_src(mut source: *const u8, mut dst_ptr: *mut u8, num_items: usize) {\n\n// // output.reserve(num_items);\n\n// unsafe {\n\n// // let mut dst_ptr = output.as_mut_ptr().add(output.len());\n\n// let dst_ptr_end = dst_ptr.add(num_items);\n\n\n\n// while dst_ptr < dst_ptr_end {\n\n// std::ptr::copy_nonoverlapping(source, dst_ptr, 8);\n\n// source = source.add(8);\n\n// dst_ptr = dst_ptr.add(8);\n\n// }\n\n// }\n", "file_path": "src/test_bins/profile_comp.rs", "rank": 48, "score": 1.3665601877130866 }, { "content": " loop {\n\n if input.len() < input_pos + 1 {\n\n return Err(DecompressError::LiteralOutOfBounds);\n\n };\n\n\n\n // Read the token. The token is the first byte in a block. It is divided into two 4-bit\n\n // subtokens, the higher and the lower.\n\n // This token contains to 4-bit \"fields\", a higher and a lower, representing the literals'\n\n // length and the back reference's length, respectively. LSIC is used if either are their\n\n // maximal values.\n\n let token = input[input_pos];\n\n input_pos += 1;\n\n\n\n // Checking for hot-loop.\n\n // In most cases the metadata does fit in a single 1byte token (statistically) and we are in a safe-distance to the end.\n\n // This enables some optimized handling.\n\n if does_token_fit(token) && is_safe_distance(input_pos, end_pos_check) {\n\n let literal_length = (token >> 4) as usize;\n\n\n\n if input.len() < input_pos + literal_length {\n", "file_path": "src/block/decompress_safe.rs", "rank": 49, "score": 1.3537490756228685 }, { "content": "// fn bench_decompression_medium(b: &mut test::Bencher) {\n\n// let comp = compress(r#\"An iterator that knows its exact length.\n\n// Many Iterators don't know how many times they will iterate, but some do. If an iterator knows how many times it can iterate, providing access to that information can be useful. For example, if you want to iterate backwards, a good start is to know where the end is.\n\n// When implementing an ExactSizeIterator, you must also implement Iterator. When doing so, the implementation of size_hint must return the exact size of the iterator.\n\n// The len method has a default implementation, so you usually shouldn't implement it. However, you may be able to provide a more performant implementation than the default, so overriding it in this case makes sense.\"#.as_bytes());\n\n// b.iter(|| {\n\n// decompress(&comp)\n\n// })\n\n// }\n\n\n\n// #[bench]\n\n// fn bench_decompression_10_mb(b: &mut test::Bencher) {\n\n// let comp = compress(COMPRESSION10MB);\n\n// b.iter(|| {\n\n// decompress(&comp)\n\n// })\n\n// }\n\n\n\n/// Test that the compressed string decompresses to the original string.\n", "file_path": "tests/tests.rs", "rank": 50, "score": 1.2302824620756994 }, { "content": " /// parts:\n\n ///\n\n /// 1. A 16-bit little-endian integer defining the \"offset\", i.e. how long back we need to go\n\n /// in the decoded buffer and copy.\n\n /// 2. An LSIC integer extension to the duplicate length as defined by the first part of the\n\n /// token, if it takes the highest value (15).\n\n // #[inline(never)]\n\n fn read_duplicate_section(&mut self) -> Result<(), Error> {\n\n // Now, we will obtain the offset which we will use to copy from the output. It is an\n\n // 16-bit integer.\n\n let offset = self.read_u16()?;\n\n // Obtain the initial match length. The match length is the length of the duplicate segment\n\n // which will later be copied from data previously decompressed into the output buffer. The\n\n // initial length is derived from the second part of the token (the lower nibble), we read\n\n // earlier. Since having a match length of less than 4 would mean negative compression\n\n // ratio, we start at 4.\n\n let mut match_length = (4 + (self.token & 0xF)) as usize;\n\n\n\n match match_length {\n\n 0 => self.match_unused += 1,\n", "file_path": "src/test_bins/decompress_with_stats.rs", "rank": 51, "score": 1.2210024900524443 }, { "content": "// fn bench_compression_small(b: &mut test::Bencher) {\n\n// b.iter(|| {\n\n// let _compressed = compress(\"To cute to die! Save the red panda!\".as_bytes());\n\n// })\n\n// }\n\n\n\n// #[bench]\n\n// fn bench_compression_medium(b: &mut test::Bencher) {\n\n// b.iter(|| {\n\n// let _compressed = compress(r#\"An iterator that knows its exact length.\n\n// Many Iterators don't know how many times they will iterate, but some do. If an iterator knows how many times it can iterate, providing access to that information can be useful. For example, if you want to iterate backwards, a good start is to know where the end is.\n\n// When implementing an ExactSizeIterator, you must also implement Iterator. When doing so, the implementation of size_hint must return the exact size of the iterator.\n\n// The len method has a default implementation, so you usually shouldn't implement it. However, you may be able to provide a more performant implementation than the default, so overriding it in this case makes sense.\"#.as_bytes());\n\n// })\n\n// }\n\n\n\n// #[bench]\n\n// fn bench_compression_65k(b: &mut test::Bencher) {\n\n// b.iter(|| {\n\n// compress(COMPRESSION65);\n", "file_path": "tests/tests.rs", "rank": 52, "score": 1.1939839682977187 }, { "content": "// use quickbench::bench_gen_env;\n\n\n\n// const COMPRESSION1K: &'static [u8] = include_bytes!(\"compression_1k.txt\");\n\n// const COMPRESSION34K: &'static [u8] = include_bytes!(\"compression_34k.txt\");\n\n// const COMPRESSION65K: &'static [u8] = include_bytes!(\"compression_65k.txt\");\n\n// const COMPRESSION66K: &'static [u8] = include_bytes!(\"compression_66k_JSON.txt\");\n\n// const COMPRESSION95K_VERY_GOOD_LOGO: &'static [u8] = include_bytes!(\"../logo.jpg\");\n\n\n\n// fn main() {\n\n\n\n// // let inputs = [COMPRESSION1K\n\n// // COMPRESSION34K\n\n// // COMPRESSION65K\n\n// // COMPRESSION66K\n\n// // COMPRESSION95K_VERY_GOOD_LOGO];\n\n\n\n// println!(\"{}\", bench_gen_env(\"COMPRESSION1K\", COMPRESSION1K.len(), || &COMPRESSION1K, |xs| lz4_flex::compress(&xs)));\n\n// println!(\"{}\", bench_gen_env(\"COMPRESSION34K\", COMPRESSION34K.len(), || &COMPRESSION34K, |xs| lz4_flex::compress(&xs)));\n\n// println!(\"{}\", bench_gen_env(\"COMPRESSION65K\", COMPRESSION65K.len(), || &COMPRESSION65K, |xs| lz4_flex::compress(&xs)));\n\n// println!(\"{}\", bench_gen_env(\"COMPRESSION66K\", COMPRESSION66K.len(), || &COMPRESSION66K, |xs| lz4_flex::compress(&xs)));\n", "file_path": "benches/bench.rs", "rank": 53, "score": 1.1353625419107312 } ]
Rust
src/record.rs
phip1611/beat-detector
ad00de5348bb0325298abd5a1e31d59d5f34845e
/* MIT License Copyright (c) 2021 Philipp Schuster Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ use crate::{BeatInfo, StrategyKind}; use cpal::traits::{DeviceTrait, HostTrait, StreamTrait}; use cpal::{BufferSize, Device, Host, InputCallbackInfo, SampleFormat, StreamConfig, StreamError}; use std::collections::{BTreeMap, HashMap}; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; use std::thread::{spawn, JoinHandle}; use std::time::Instant; pub fn start_listening( on_beat_cb: impl Fn(BeatInfo) + Send + 'static, input_dev: Option<Device>, strategy: StrategyKind, keep_recording: Arc<AtomicBool>, ) -> Result<JoinHandle<()>, String> { if !keep_recording.load(Ordering::SeqCst) { return Err("Variable keep_recording is false from the beginning!?".to_string()); } let in_dev = input_dev.map(Ok).unwrap_or_else(|| { let host = cpal::default_host(); host.default_input_device() .ok_or_else(|| "Must have input device!".to_string()) })?; let in_dev_cfg = in_dev.default_input_config().unwrap(); let sampling_rate = in_dev_cfg.sample_rate(); let sample_format = in_dev_cfg.sample_format(); eprintln!("Using input device: {:?}", in_dev.name().unwrap()); eprintln!(" sampling_rate: {}", sampling_rate.0); eprintln!(" sample_format: {:?}", sample_format); let err_cb = |err: StreamError| { eprintln!("Record error occurred: {:#?}", err); }; #[cfg(not(target_os = "linux"))] let preferred_window_length = 1024; let in_stream_cfg = StreamConfig { channels: 1, sample_rate: sampling_rate, #[cfg(not(target_os = "linux"))] buffer_size: BufferSize::Fixed(preferred_window_length), #[cfg(target_os = "linux")] buffer_size: BufferSize::Default, }; let detector = strategy.detector(sampling_rate.0); let handle = spawn(move || { let stream = match sample_format { SampleFormat::F32 => in_dev.build_input_stream( &in_stream_cfg, move |data: &[f32], _info: &InputCallbackInfo| { let now = Instant::now(); if let Some(info) = detector.is_beat(&f32_data_to_i16(data)) { on_beat_cb(info); } let millis = now.elapsed().as_millis(); if millis > 20 { eprintln!("calculation took {}ms", millis); } }, err_cb, ), SampleFormat::I16 => in_dev.build_input_stream( &in_stream_cfg, move |data: &[i16], _info: &InputCallbackInfo| { let now = Instant::now(); if let Some(info) = detector.is_beat(data) { on_beat_cb(info); } let millis = now.elapsed().as_millis(); if millis > 20 { eprintln!("calculation took {}ms", millis); } }, err_cb, ), SampleFormat::U16 => in_dev.build_input_stream( &in_stream_cfg, move |data: &[u16], _info: &InputCallbackInfo| { let now = Instant::now(); if let Some(info) = detector.is_beat(&u16_data_to_i16(data)) { on_beat_cb(info); } let millis = now.elapsed().as_millis(); if millis > 20 { eprintln!("calculation took {}ms", millis); } }, err_cb, ), } .map_err(|err| format!("Can't open stream: {:?}", err)) .unwrap(); stream.play().unwrap(); loop { if !keep_recording.load(Ordering::SeqCst) { break; } } }); Ok(handle) } #[inline(always)] fn u16_data_to_i16(data: &[u16]) -> Vec<i16> { data.iter() .map(|x| *x as i32) .map(|x| x - i16::MAX as i32 / 2) .map(|x| x as i16) .collect() } #[inline(always)] fn f32_data_to_i16(data: &[f32]) -> Vec<i16> { data.iter() .map(|x| x * i16::MAX as f32) .map(|x| x as i16) .collect() } pub fn audio_input_device_list() -> BTreeMap<String, Device> { let host = cpal::default_host(); let mut map = BTreeMap::new(); for (i, dev) in host.input_devices().unwrap().enumerate() { map.insert(dev.name().unwrap_or(format!("Unknown device #{}", i)), dev); } map } pub fn print_audio_input_device_configs() { let host = cpal::default_host(); for (i, dev) in host.input_devices().unwrap().enumerate() { eprintln!("--------"); let name = dev.name().unwrap_or(format!("Unknown device #{}", i)); eprintln!("[{}] default config:", name); eprintln!("{:#?}", dev.default_input_config().unwrap()); } } pub fn get_backends() -> HashMap<String, Host> { cpal::available_hosts() .into_iter() .map(|id| (format!("{:?}", id), cpal::host_from_id(id).unwrap())) .collect::<HashMap<_, _>>() } #[cfg(test)] mod tests { use super::*; }
/* MIT License Copyright (c) 2021 Philipp Schuster Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ use crate::{BeatInfo, StrategyKind}; use cpal::traits::{DeviceTrait, HostTrait, StreamTrait}; use cpal::{BufferSize, Device, Host, InputCallbackInfo, SampleFormat, StreamConfig, StreamError}; use std::collections::{BTreeMap, HashMap}; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; use std::thread::{spawn, JoinHandle}; use std::time::Instant; pub fn start_listening( on_beat_cb: impl Fn(BeatInfo) + Send + 'static, input_dev: Option<Device>, strategy: StrategyKind, keep_recording: Arc<AtomicBool>, ) -> Result<JoinHandle<()>, String> { if !keep_recording.load(Ordering::SeqCst) { return Err("Variable keep_recording is false from the beginning!?".to_string()); } let in_dev = input_dev.map(Ok).unwrap_or_else(|| { let host = cpal::default_host(); host.default_input_device() .ok_or_else(|| "Must have input device!".to_string()) })?; let in_dev_cfg = in_dev.default_input_config().unwrap(); let sampling_rate = in_dev_cfg.sample_rate(); let sample_format = in_dev_cfg.sample_format(); eprintln!("Using input device: {:?}", in_dev.name().unwrap()); eprintln!(" sampling_rate: {}", sampling_rate.0); eprintln!(" sample_format: {:?}", sample_format); let err_cb = |err: StreamError| { eprintln!("Record error occurred: {:#?}", err); }; #[cfg(not(target_os = "linux"))] let preferred_window_length = 1024; let in_stream_cfg = StreamConfig { channels: 1, sample_rate: sampling_rate, #[cfg(not(target_os = "linux"))] buffer_size: BufferSize::Fixed(preferred_window_length), #[cfg(target_os = "linux")] buffer_size: BufferSize::Default, }; let detector = strategy.detector(sampling_rate.0); let handle = spawn(move || { let stream = match sample_format { SampleFormat::F32 => in_dev.build_input_stream( &in_stream_cfg, move |data: &[f32], _info: &InputCallbackInfo| { let now = Instant::now(); if let Some(info) = detector.is_beat(&f32_data_to_i16(data)) { on_beat_cb(info); } let millis = now.elapsed().as_millis(); if millis > 20 { eprintln!("calculation took {}ms", millis); } }, err_cb, ), SampleFormat::I16 => in_dev.build_input_stream( &in_stream_cfg, move |data: &[i16], _info: &InputCallbackInfo| { let now = Instant::now(); if let Some(info) = detector.is_beat(data) { on_beat_cb(info); } let millis = now.elapsed().as_millis(); if millis > 20 { eprintln!("calculation took {}ms", millis); } }, err_cb, ), SampleFormat::U16 => in_dev.build_input_stream( &in_stream_cfg, move |data: &[u16], _info: &InputCallbackInfo| { let now = Instant::now(); if let Some(info) = detector.is_beat(&u16_data_to_i16(data)) { on_beat_cb(info); } let millis = now.elapsed().as_millis(); if millis > 20 { eprintln!("calculation took {}ms", millis); } }, err_cb, ), } .map_err(|err| format!("Can't open stream: {:?}", err)) .unwrap();
#[inline(always)] fn u16_data_to_i16(data: &[u16]) -> Vec<i16> { data.iter() .map(|x| *x as i32) .map(|x| x - i16::MAX as i32 / 2) .map(|x| x as i16) .collect() } #[inline(always)] fn f32_data_to_i16(data: &[f32]) -> Vec<i16> { data.iter() .map(|x| x * i16::MAX as f32) .map(|x| x as i16) .collect() } pub fn audio_input_device_list() -> BTreeMap<String, Device> { let host = cpal::default_host(); let mut map = BTreeMap::new(); for (i, dev) in host.input_devices().unwrap().enumerate() { map.insert(dev.name().unwrap_or(format!("Unknown device #{}", i)), dev); } map } pub fn print_audio_input_device_configs() { let host = cpal::default_host(); for (i, dev) in host.input_devices().unwrap().enumerate() { eprintln!("--------"); let name = dev.name().unwrap_or(format!("Unknown device #{}", i)); eprintln!("[{}] default config:", name); eprintln!("{:#?}", dev.default_input_config().unwrap()); } } pub fn get_backends() -> HashMap<String, Host> { cpal::available_hosts() .into_iter() .map(|id| (format!("{:?}", id), cpal::host_from_id(id).unwrap())) .collect::<HashMap<_, _>>() } #[cfg(test)] mod tests { use super::*; }
stream.play().unwrap(); loop { if !keep_recording.load(Ordering::SeqCst) { break; } } }); Ok(handle) }
function_block-function_prefix_line
[ { "content": "fn select_strategy() -> StrategyKind {\n\n println!(\"Available beat detection strategies:\");\n\n StrategyKind::values()\n\n .into_iter()\n\n .enumerate()\n\n .for_each(|(i, s)| {\n\n println!(\" [{}] {} - {}\", i, s.name(), s.description());\n\n });\n\n println!(\"Select strategy: input id and enter:\");\n\n let mut input = String::new();\n\n while stdin().read_line(&mut input).unwrap() == 0 {}\n\n let input = input\n\n .trim()\n\n .parse::<u8>()\n\n .expect(\"Input must be a valid number!\");\n\n match input {\n\n 0 => StrategyKind::LPF,\n\n 1 => StrategyKind::Spectrum,\n\n _ => panic!(\"Invalid strategy!\"),\n\n }\n\n}\n", "file_path": "examples/audio_input_beat_detection.rs", "rank": 4, "score": 93452.0099853567 }, { "content": "fn select_input_device(devs: BTreeMap<String, Device>) -> Device {\n\n println!(\"Available audio devices:\");\n\n for (i, (name, _)) in devs.iter().enumerate() {\n\n println!(\" [{}] {}\", i, name);\n\n }\n\n println!(\"Select audio device: input device number and enter:\");\n\n let mut input = String::new();\n\n while stdin().read_line(&mut input).unwrap() == 0 {}\n\n let input = input\n\n .trim()\n\n .parse::<u8>()\n\n .expect(\"Input must be a valid number!\");\n\n devs.into_iter()\n\n .enumerate()\n\n .filter(|(i, _)| *i == input as usize)\n\n .map(|(_i, (_name, dev))| dev)\n\n .take(1)\n\n .next()\n\n .unwrap()\n\n}\n\n\n", "file_path": "examples/audio_input_beat_detection.rs", "rank": 6, "score": 85427.26551065482 }, { "content": "fn select_strategy() -> StrategyKind {\n\n // todo implement user selection\n\n StrategyKind::Spectrum\n\n}\n", "file_path": "examples/minimal.rs", "rank": 7, "score": 84379.72760629944 }, { "content": "#[inline(always)]\n\npub fn sleep_busy_waiting_ms(ms: u64) {\n\n let target_time = Instant::now().add(Duration::from_millis(ms));\n\n loop {\n\n if Instant::now() >= target_time {\n\n break;\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/ws2812_spi_light_on_beat.rs", "rank": 8, "score": 79783.3223661994 }, { "content": "/// Returns n from args or default.\n\npub fn select_num_leds() -> u16 {\n\n println!(\"Input and enter how many LEDs are connected to your device (64, 150, ..):\");\n\n let mut input = String::new();\n\n while stdin().read_line(&mut input).unwrap() == 0 {}\n\n let input = input\n\n .trim()\n\n .parse::<u16>()\n\n .expect(\"Input must be a valid number!\");\n\n input\n\n}\n\n\n", "file_path": "examples/ws2812_spi_light_on_beat.rs", "rank": 9, "score": 78724.19044896573 }, { "content": "fn select_input_device(devs: BTreeMap<String, Device>) -> Device {\n\n println!(\"Available audio devices:\");\n\n for (i, (name, _)) in devs.iter().enumerate() {\n\n println!(\" [{}] {}\", i, name);\n\n }\n\n println!(\"Select audio device: input device number and enter:\");\n\n let mut input = String::new();\n\n while stdin().read_line(&mut input).unwrap() == 0 {}\n\n let input = input\n\n .trim()\n\n .parse::<u8>()\n\n .expect(\"Input must be a valid number!\");\n\n devs.into_iter()\n\n .enumerate()\n\n .filter(|(i, _)| *i == input as usize)\n\n .map(|(_i, (_name, dev))| dev)\n\n .take(1)\n\n .next()\n\n .unwrap()\n\n}\n\n\n", "file_path": "examples/ws2812_spi_light_on_beat.rs", "rank": 10, "score": 78401.37171081727 }, { "content": "fn select_strategy() -> StrategyKind {\n\n println!(\"Available beat detection strategies:\");\n\n StrategyKind::values()\n\n .into_iter()\n\n .enumerate()\n\n .for_each(|(i, s)| {\n\n println!(\" [{}] {} - {}\", i, s.name(), s.description());\n\n });\n\n println!(\"Select strategy: input id and enter:\");\n\n let mut input = String::new();\n\n while stdin().read_line(&mut input).unwrap() == 0 {}\n\n let input = input\n\n .trim()\n\n .parse::<u8>()\n\n .expect(\"Input must be a valid number!\");\n\n match input {\n\n 0 => StrategyKind::LPF,\n\n 1 => StrategyKind::Spectrum,\n\n _ => panic!(\"Invalid strategy!\"),\n\n }\n\n}\n\n\n", "file_path": "examples/ws2812_spi_light_on_beat.rs", "rank": 11, "score": 76723.85283434682 }, { "content": "fn select_input_device() -> Device {\n\n // todo implement user selection\n\n beat_detector::record::audio_input_device_list()\n\n .into_iter()\n\n .next()\n\n .expect(\"At least one audio input device must be available.\")\n\n .1\n\n}\n\n\n", "file_path": "examples/minimal.rs", "rank": 12, "score": 70756.96056400331 }, { "content": "/// Common abstraction over a beat detection strategy. Each strategy keeps ongoing\n\n/// audio samples, for example from microphone. Strategies should have an internal\n\n/// mutable state via interior mutability to compare sample windows (and analysis)\n\n/// against previous values.\n\npub trait Strategy {\n\n /// Checks if inside the samples window a new beat was recognized.\n\n /// If so, it returns `Some` with [`BeatInfo`] as payload.\n\n ///\n\n /// Implementations may buffer previous samples and combine them with the latest,\n\n /// i.e. make a sliding window.\n\n fn is_beat(&self, samples: &[i16]) -> Option<BeatInfo>;\n\n\n\n /// Convenient getter to get the [`StrategyKind`] of a strategy.\n\n /// This is a 1:1 mapping.\n\n fn kind(&self) -> StrategyKind;\n\n\n\n /// A nice name for the algorithm, displayable in user interfaces.\n\n // \"where Self: Sized\" => compiler gave me this hint\n\n // => prevents \"`Strategy` cannot be made into an object\"\n\n fn name() -> &'static str\n\n where\n\n Self: Sized;\n\n\n\n /// A textual description of the algorithm to help the user to select\n", "file_path": "src/lib.rs", "rank": 14, "score": 54896.301075378375 }, { "content": "#[inline(always)]\n\npub fn darken_rgb(r: u8, g: u8, b: u8, factor: f32) -> (u8, u8, u8) {\n\n (\n\n ((r as f32) * factor) as u8,\n\n ((g as f32) * factor) as u8,\n\n ((b as f32) * factor) as u8,\n\n )\n\n}\n\n\n\nconst MOVING_LIGHT_IMPULSE_LEN: usize = 15;\n\n\n\npub struct MovingLightStripsAnimation {\n\n led_count: usize,\n\n rgb_strip_vec_data: Vec<(u8, u8, u8)>,\n\n new_rgb_data_vec: Vec<(u8, u8, u8)>,\n\n}\n\n\n\nimpl MovingLightStripsAnimation {\n\n pub fn new(mut led_count: usize) -> Self {\n\n if led_count % 2 != 0 {\n\n led_count = led_count + 1;\n", "file_path": "examples/ws2812_spi_light_on_beat.rs", "rank": 15, "score": 53357.322351201474 }, { "content": "#[inline(always)]\n\npub fn sleep_busy_waiting_until(then: Instant) {\n\n loop {\n\n if Instant::now() >= then {\n\n break;\n\n }\n\n }\n\n}\n\n\n\n/// Returns a pixel with a random color and a minimal\n\n/// brightness. Tries to get real colors instead of white.\n", "file_path": "examples/ws2812_spi_light_on_beat.rs", "rank": 16, "score": 46770.78600328394 }, { "content": "fn main() {\n\n let recording = Arc::new(AtomicBool::new(true));\n\n let recording_cpy = recording.clone();\n\n ctrlc::set_handler(move || {\n\n eprintln!(\"Stopping recording\");\n\n recording_cpy.store(false, Ordering::SeqCst);\n\n })\n\n .expect(\"Ctrl-C handler doesn't work\");\n\n\n\n let devs = beat_detector::record::audio_input_device_list();\n\n if devs.is_empty() {\n\n panic!(\"No audio input devices found!\")\n\n }\n\n let dev = if devs.len() > 1 {\n\n select_input_device(devs)\n\n } else {\n\n devs.into_iter().next().unwrap().1\n\n };\n\n let strategy = select_strategy();\n\n let on_beat = |info| {\n\n println!(\"Found beat at {:?}ms\", info);\n\n };\n\n let handle =\n\n beat_detector::record::start_listening(on_beat, Some(dev), strategy, recording).unwrap();\n\n\n\n handle.join().unwrap();\n\n}\n\n\n", "file_path": "examples/audio_input_beat_detection.rs", "rank": 17, "score": 44391.64282349846 }, { "content": "#[inline(always)]\n\npub fn get_random_pixel_val() -> (u8, u8, u8) {\n\n const COLORS: [(u8, u8, u8); 28] = [\n\n // some colors are multiple times listed to increase\n\n // their probability.\n\n (255, 255, 255), // white\n\n (255, 0, 0), // red\n\n (255, 0, 0), // red\n\n (255, 0, 0), // red\n\n (0, 255, 0), // green\n\n (0, 0, 255), // blue\n\n (13, 255, 248), // turquoise\n\n (13, 255, 248), // turquoise\n\n (13, 255, 248), // turquoise\n\n (255, 168, 0), // dark orange\n\n (255, 168, 0), // dark orange\n\n (255, 189, 0), // bright orange\n\n (255, 189, 0), // bright orange\n\n (255, 189, 0), // bright orange\n\n (255, 255, 0), // yellow\n\n (255, 255, 0), // yellow\n", "file_path": "examples/ws2812_spi_light_on_beat.rs", "rank": 18, "score": 41877.42962333221 }, { "content": "/// Minimum example on how to use this library. Sets up the \"callback loop\".\n\nfn main() {\n\n let recording = Arc::new(AtomicBool::new(true));\n\n\n\n let recording_cpy = recording.clone();\n\n ctrlc::set_handler(move || {\n\n eprintln!(\"Stopping recording\");\n\n recording_cpy.store(false, Ordering::SeqCst);\n\n })\n\n .unwrap();\n\n\n\n let dev = select_input_device();\n\n let strategy = select_strategy();\n\n let on_beat = |info| {\n\n println!(\"Found beat at {:?}ms\", info);\n\n };\n\n // actually start listening in thread\n\n let handle =\n\n beat_detector::record::start_listening(on_beat, Some(dev), strategy, recording).unwrap();\n\n\n\n handle.join().unwrap();\n\n}\n\n\n", "file_path": "examples/minimal.rs", "rank": 19, "score": 28515.781826102186 }, { "content": "/// Binary created for the Raspberry Pi which consumes audio\n\n/// and outputs light on WS2812 LEDs via the SPI device.\n\nfn main() {\n\n println!(\"make sure you have \\\"SPI\\\" on your Pi enabled and that MOSI-Pin is connected with DIN-Pin!\");\n\n let mut adapter = WS28xxSpiAdapter::new(\"/dev/spidev0.0\").unwrap();\n\n\n\n let num_leds = select_num_leds();\n\n let anim = MovingLightStripsAnimation::new(num_leds as usize);\n\n let anim = Arc::new(Mutex::new(anim));\n\n\n\n let recording = Arc::new(AtomicBool::new(true));\n\n let recording_cpy = recording.clone();\n\n ctrlc::set_handler(move || {\n\n eprintln!(\"Stopping recording\");\n\n recording_cpy.store(false, Ordering::SeqCst);\n\n })\n\n .expect(\"Ctrl-C handler doesn't work\");\n\n\n\n let devs = beat_detector::record::audio_input_device_list();\n\n if devs.is_empty() {\n\n panic!(\"No audio input devices found!\")\n\n }\n", "file_path": "examples/ws2812_spi_light_on_beat.rs", "rank": 20, "score": 25037.500707658455 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "src/strategies/spectrum.rs", "rank": 21, "score": 21527.03049225738 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "src/strategies/lpf.rs", "rank": 22, "score": 21527.03049225738 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "src/strategies/mod.rs", "rank": 23, "score": 21527.03049225738 }, { "content": "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\nuse crate::strategies::window_stats::WindowStats;\n\nuse crate::strategies::AnalysisState;\n\nuse crate::{BeatInfo, Strategy, StrategyKind};\n\nuse lowpass_filter as lpf;\n\n\n\n/// Struct to provide a beat-detection strategy using a\n\n/// lowpass filter.The algorithm is pretty basic/stupid.\n\n/// It's not smart enough to cope with 'complex' music, like\n\n/// most of today's pop. But it will give pretty good results\n\n/// in 'easy' music, like most of 90s pop hits.\n\n#[derive(Debug)]\n\npub struct LpfBeatDetector {\n\n state: AnalysisState,\n\n}\n\n\n\nimpl LpfBeatDetector {\n\n #[inline(always)]\n", "file_path": "src/strategies/lpf.rs", "rank": 24, "score": 21457.605040099686 }, { "content": "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\nuse std::cell::Cell;\n\n\n\npub(crate) mod lpf;\n\npub(crate) mod spectrum;\n\npub mod window_stats;\n\n\n\n/// Structure that each [`super::Strategy`]-implementation shall use. It helps to keep\n\n/// internal state about the ongoing analysis, i.e. the progress in time. It is\n\n/// capable to work with different window/frame sizes. This is especially required\n\n/// for Linux because right now it doesn't work there to use a fixed buffer size -_-\n\n///\n\n/// This struct shall be updated live/on the fly while music is recorded. Therefore,\n\n/// the time inside this struct is almost the real (relative) time from the beginning\n\n/// of recording, despite some latency.\n\n#[derive(Debug)]\n\npub struct AnalysisState {\n\n /// Sampling rate of the measurement. This is immutable, i.e it assumes that\n", "file_path": "src/strategies/mod.rs", "rank": 25, "score": 21455.30366784665 }, { "content": "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\nuse crate::strategies::window_stats::WindowStats;\n\nuse crate::strategies::AnalysisState;\n\nuse crate::{BeatInfo, Strategy, StrategyKind};\n\nuse spectrum_analyzer::FrequencyLimit;\n\nuse ringbuffer::{ConstGenericRingBuffer, RingBufferWrite, RingBufferExt};\n\nuse std::cell::RefCell;\n\nuse spectrum_analyzer::scaling::divide_by_N;\n\n\n\n/// Struct to provide a beat-detection strategy using a\n\n/// Spectrum Analysis. The algorithm is pretty basic/stupid.\n\n/// It's not smart enough to cope with 'complex' music, like\n\n/// most of today's pop. But it will give pretty good results\n\n/// in 'easy' music, like most of 90s pop hits.\n\n#[derive(Debug)]\n\npub struct SABeatDetector {\n\n state: AnalysisState,\n\n // ring buffer with latest audio; necessary because we don't\n", "file_path": "src/strategies/spectrum.rs", "rank": 26, "score": 21455.101896605218 }, { "content": " // necessarily get 1024 samples at each callback but mostly\n\n // 500-540..\n\n audio_data_buf: RefCell<ConstGenericRingBuffer<f32, 1024>>,\n\n}\n\n\n\nimpl SABeatDetector {\n\n #[inline(always)]\n\n pub fn new(sampling_rate: u32) -> Self {\n\n const LEN: usize = 1024;\n\n let mut initial_buf = ConstGenericRingBuffer::<f32, LEN>::new();\n\n (0..LEN).for_each(|_| initial_buf.push(0.0));\n\n Self {\n\n state: AnalysisState::new(sampling_rate),\n\n audio_data_buf: RefCell::from(initial_buf),\n\n }\n\n }\n\n}\n\n\n\nimpl Strategy for SABeatDetector {\n\n /// Callback called when the audio input library got the next callback.\n", "file_path": "src/strategies/spectrum.rs", "rank": 27, "score": 21451.90363985401 }, { "content": " })\n\n }\n\n\n\n #[inline(always)]\n\n fn kind(&self) -> StrategyKind {\n\n StrategyKind::LPF\n\n }\n\n\n\n fn name() -> &'static str\n\n where\n\n Self: Sized,\n\n {\n\n \"Simple Lowpass Filter\"\n\n }\n\n\n\n fn description() -> &'static str\n\n where\n\n Self: Sized,\n\n {\n\n \"A simple beat detection using a lowpass filter. It's not smart enough \\\n", "file_path": "src/strategies/lpf.rs", "rank": 28, "score": 21449.096067566945 }, { "content": " #[inline(always)]\n\n fn kind(&self) -> StrategyKind {\n\n StrategyKind::Spectrum\n\n }\n\n\n\n fn name() -> &'static str\n\n where\n\n Self: Sized,\n\n {\n\n \"Simple Spectrum Analysis\"\n\n }\n\n\n\n fn description() -> &'static str\n\n where\n\n Self: Sized,\n\n {\n\n \"A simple beat detection using a spectrum analysis. It's not smart enough \\\n\n to cope with 'complex' music, like most of today's pop. But it will give \\\n\n pretty good results in 'easy' music, like most of 90s pop hits.\"\n\n }\n", "file_path": "src/strategies/spectrum.rs", "rank": 29, "score": 21448.01774605002 }, { "content": " pub fn new(sampling_rate: u32) -> Self {\n\n Self {\n\n state: AnalysisState::new(sampling_rate),\n\n }\n\n }\n\n}\n\n\n\nimpl Strategy for LpfBeatDetector {\n\n /// Analyzes if inside the window of samples a beat was found after\n\n /// applying a lowpass filter onto the data.\n\n #[inline(always)]\n\n fn is_beat(&self, samples: &[i16]) -> Option<BeatInfo> {\n\n // tell the state beforehand that we are analyzing the next window - important!\n\n self.state.update_time(samples.len());\n\n // skip if distance to last beat is not fair away enough\n\n if !self.last_beat_beyond_threshold(&self.state) {\n\n return None;\n\n };\n\n // skip if the amplitude is too low, e.g. noise or silence between songs\n\n let w_stats = WindowStats::from(samples);\n", "file_path": "src/strategies/lpf.rs", "rank": 30, "score": 21447.340110959914 }, { "content": " Self {\n\n sampling_rate,\n\n // Hertz => second => milli seconds\n\n ms_per_sample: 1.0 / sampling_rate as f32 * 1000.0,\n\n beat_time_ms: Cell::new(0),\n\n time_ms: Cell::new(0),\n\n last_beat_timestamp: Cell::new(0),\n\n }\n\n }\n\n\n\n /// Updates the total passed internal time. It does so by calculating the milliseconds of\n\n /// the amount of (mono, not stereo!) samples for the given [`sampling_rate`].\n\n /// It always adds the timestamp in the middle of the current window/frame to\n\n /// the current value.\n\n #[inline(always)]\n\n pub fn update_time(&self, frame_len: usize) {\n\n // if 44,1kHz is sampling rate and we have 44,1k samples => 1s\n\n let ms_of_frame = self.ms_per_sample * frame_len as f32;\n\n self.beat_time_ms.set(\n\n // beat time is in the half of the window/frame\n", "file_path": "src/strategies/mod.rs", "rank": 31, "score": 21446.446383747552 }, { "content": " pub fn last_beat_timestamp(&self) -> u32 {\n\n self.last_beat_timestamp.get()\n\n }\n\n\n\n /// Getter for [`beat_time_ms`].\n\n #[inline(always)]\n\n pub fn beat_time_ms(&self) -> u32 {\n\n self.beat_time_ms.get()\n\n }\n\n\n\n /// Getter for [`time_ms`].\n\n #[inline(always)]\n\n pub fn time_ms(&self) -> u32 {\n\n self.time_ms.get()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "src/strategies/mod.rs", "rank": 32, "score": 21446.422493128306 }, { "content": " if !self.amplitude_high_enough(&w_stats) {\n\n return None;\n\n };\n\n\n\n const CUTOFF_FR: u16 = 120;\n\n let mut samples = samples.to_vec();\n\n lpf::simple::sp::apply_lpf_i16_sp(\n\n &mut samples,\n\n self.state.sampling_rate() as u16,\n\n CUTOFF_FR,\n\n );\n\n // check if after the low pass filter we still have high amplitude\n\n // => then this is dominant in the window\n\n let threshold = (0.77 * w_stats.max() as f32) as i16;\n\n let is_beat = samples.iter().any(|s| s.abs() >= threshold);\n\n\n\n is_beat.then(|| {\n\n // mark we found a beat\n\n self.state.update_last_discovered_beat_timestamp();\n\n BeatInfo::new(self.state.beat_time_ms())\n", "file_path": "src/strategies/lpf.rs", "rank": 33, "score": 21446.330792884855 }, { "content": " /// this value doesn't change during ongoing analysis. Value is for example\n\n /// 44100 Hz.\n\n sampling_rate: u32,\n\n /// Calculated once by `1 / ms_per_sample`. Done once to speed up calculation.\n\n ms_per_sample: f32,\n\n /// This is always a bit shorter than [`time_ms`]. It is the timestamp\n\n /// in the middle of the current frame, whereas [`time_ms`] is the timestamp\n\n /// at the end. This time will be attached to a beat if one was found in the\n\n /// current frame/window.\n\n beat_time_ms: Cell<u32>,\n\n /// The ongoing relative progress in time in ms of the recording.\n\n time_ms: Cell<u32>,\n\n /// Timestamp of last beat. This is always a value that was previously in\n\n /// [`beat_time_ms`].\n\n last_beat_timestamp: Cell<u32>,\n\n}\n\n\n\nimpl AnalysisState {\n\n /// Constructor for [`AnalysisState`].\n\n pub fn new(sampling_rate: u32) -> Self {\n", "file_path": "src/strategies/mod.rs", "rank": 34, "score": 21445.72622552415 }, { "content": "\n\n let spectrum = spectrum_analyzer::samples_fft_to_spectrum(\n\n &audio_data_buf.to_vec(),\n\n self.state.sampling_rate(),\n\n FrequencyLimit::Max(90.0),\n\n // None,\n\n Some(&divide_by_N),\n\n ).unwrap();\n\n\n\n // I don't know what the value really means :D\n\n // figured out by testing.. :/\n\n if spectrum.max().1.val() > 2_100_000.0 {\n\n // mark we found a beat\n\n self.state.update_last_discovered_beat_timestamp();\n\n Some(BeatInfo::new(self.state.beat_time_ms()))\n\n } else {\n\n None\n\n }\n\n }\n\n\n", "file_path": "src/strategies/spectrum.rs", "rank": 35, "score": 21445.70363225609 }, { "content": " assert_eq!(\n\n (23.0 * 1.5) as u32,\n\n state.beat_time_ms(),\n\n \"Must return timestamp in middle of second window\"\n\n );\n\n assert_eq!(\n\n 46,\n\n state.time_ms(),\n\n \"Must return timestamp at end of second window\"\n\n );\n\n\n\n // pretend we analyze the next window of only 317 samples\n\n state.update_time(317);\n\n assert_eq!(\n\n (46 + (317.0 / 2.0 * 1.0 / 44100.0 * 1000.0) as u32),\n\n state.beat_time_ms(),\n\n \"Must return timestamp in middle of third window\"\n\n );\n\n assert_eq!(\n\n (46 + (317.0 / 1.0 / 44100.0 * 1000.0) as u32),\n\n state.time_ms(),\n\n \"Must return timestamp at end of third window\"\n\n );\n\n }\n\n}\n", "file_path": "src/strategies/mod.rs", "rank": 36, "score": 21444.494382886103 }, { "content": "\n\n /// Value chosen at will. It is really high because this strategy\n\n /// is stupid. It's not smart enough to detect a \"slowly decreasing beat\",\n\n /// i.e. it may detect the same beat twice otherwise.\n\n #[inline(always)]\n\n fn min_duration_between_beats_ms() -> u32\n\n where\n\n Self: Sized,\n\n {\n\n 400\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n // use super::*;\n\n\n\n}", "file_path": "src/strategies/spectrum.rs", "rank": 37, "score": 21444.271435786453 }, { "content": " self.time_ms.get() + (ms_of_frame / 2.0) as u32,\n\n );\n\n self.time_ms.set(self.time_ms.get() + ms_of_frame as u32);\n\n }\n\n\n\n /// Updates the timestamp of the last received beat.\n\n /// The time is relative to the beginning and in ms.\n\n #[inline(always)]\n\n pub fn update_last_discovered_beat_timestamp(&self) {\n\n self.last_beat_timestamp.replace(self.beat_time_ms.get());\n\n }\n\n\n\n /// Getter for [`sampling_rate`].\n\n #[inline(always)]\n\n pub const fn sampling_rate(&self) -> u32 {\n\n self.sampling_rate\n\n }\n\n\n\n /// Getter for [`last_beat_timestamp`].\n\n #[inline(always)]\n", "file_path": "src/strategies/mod.rs", "rank": 38, "score": 21443.989301764173 }, { "content": "\n\n #[test]\n\n fn test_analysis_state_get_relative_time_ms() {\n\n // 1/44100 * 1024 == 0.02322s == 23,22ms per 1024 frames\n\n let state = AnalysisState::new(44100);\n\n // pretend we analyze the first window of 1024 samples\n\n state.update_time(1024);\n\n assert_eq!(\n\n 23 / 2,\n\n state.beat_time_ms(),\n\n \"Must return timestamp in middle of first window\"\n\n );\n\n assert_eq!(\n\n 23,\n\n state.time_ms(),\n\n \"Must return timestamp at end of first window\"\n\n );\n\n\n\n // pretend we analyze the next window of 1024 samples\n\n state.update_time(1024);\n", "file_path": "src/strategies/mod.rs", "rank": 39, "score": 21443.864508980023 }, { "content": " #[inline(always)]\n\n fn is_beat(&self, callback_samples: &[i16]) -> Option<BeatInfo> {\n\n // make sure we have the latest 1024 audio samples in the buffer\n\n // => ready for FFT\n\n let mut audio_data_buf = self.audio_data_buf.borrow_mut();\n\n for sample in callback_samples {\n\n audio_data_buf.push(*sample as f32);\n\n }\n\n\n\n // tell the state beforehand that we are analyzing the next window - important!\n\n self.state.update_time(callback_samples.len());\n\n // skip if distance to last beat is not far away enough\n\n if !self.last_beat_beyond_threshold(&self.state) {\n\n return None;\n\n };\n\n // skip if the amplitude is too low, e.g. noise or silence between songs\n\n let w_stats = WindowStats::from(callback_samples);\n\n if !self.amplitude_high_enough(&w_stats) {\n\n return None;\n\n };\n", "file_path": "src/strategies/spectrum.rs", "rank": 40, "score": 21443.849809695643 }, { "content": " to cope with 'complex' music, like most of today's pop. But it will give \\\n\n pretty good results in 'easy' music, like most of 90s pop hits.\"\n\n }\n\n\n\n /// Value chosen at will. It is really high because this strategy\n\n /// is stupid. It's not smart enough to detect a \"slowly decreasing beat\",\n\n /// i.e. it may detect the same beat twice otherwise.\n\n #[inline(always)]\n\n fn min_duration_between_beats_ms() -> u32\n\n where\n\n Self: Sized,\n\n {\n\n 400\n\n }\n\n}\n", "file_path": "src/strategies/lpf.rs", "rank": 41, "score": 21441.932632287833 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "examples/audio_input_beat_detection.rs", "rank": 42, "score": 20515.523564476778 }, { "content": "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\nuse beat_detector::StrategyKind;\n\nuse cpal::Device;\n\nuse std::collections::BTreeMap;\n\nuse std::io::stdin;\n\nuse std::sync::atomic::{AtomicBool, Ordering};\n\nuse std::sync::Arc;\n\n\n", "file_path": "examples/audio_input_beat_detection.rs", "rank": 43, "score": 20447.498361908598 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "src/strategies/window_stats.rs", "rank": 44, "score": 20272.68961898071 }, { "content": "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\n//! Module for struct \"WindowStats\", i.e. analysis on sample windows.\n\n\n\n/// Holds information about the (original) audio window/data\n\n/// e.g. the maximum amplitude.\n\n#[derive(Debug)]\n\npub struct WindowStats {\n\n // the maximum amplitude inside a signed 16 bit sampled audio data window\n\n max: u16,\n\n}\n\n\n\nimpl WindowStats {\n\n #[inline(always)]\n\n pub const fn max(&self) -> u16 {\n\n self.max\n\n }\n\n}\n\n\n", "file_path": "src/strategies/window_stats.rs", "rank": 45, "score": 20201.980642562 }, { "content": "impl From<&[i16]> for WindowStats {\n\n #[inline(always)]\n\n fn from(samples: &[i16]) -> Self {\n\n let mut abs_samples_ordered = samples\n\n .iter()\n\n // to prevent any overflow in next step\n\n .map(|x| if *x == i16::MIN { x + 1 } else { *x })\n\n .map(|x| x.abs())\n\n .collect::<Vec<_>>();\n\n abs_samples_ordered.sort_unstable();\n\n let max = *abs_samples_ordered.last().unwrap() as u16;\n\n\n\n Self { max }\n\n }\n\n}\n", "file_path": "src/strategies/window_stats.rs", "rank": 46, "score": 20196.253008148393 }, { "content": "## How To Use\n\n**Cargo.toml**\n\n```toml\n\nbeat-detector = \"<latest version>\"\n\n```\n\n\n\n**code.rs**\n\n(also see `examples/` in repository!)\n\n```rust\n\n//! Minimum example on how to use this library. Sets up the \"callback loop\".\n\n\n\nuse cpal::Device;\n\nuse beat_detector::StrategyKind;\n\nuse std::sync::atomic::{AtomicBool, Ordering};\n\nuse std::sync::Arc;\n\n\n\n/// Minimum example on how to use this library. Sets up the \"callback loop\".\n\nfn main() {\n\n let recording = Arc::new(AtomicBool::new(true));\n\n\n\n let recording_cpy = recording.clone();\n\n ctrlc::set_handler(move || {\n\n eprintln!(\"Stopping recording\");\n\n recording_cpy.store(false, Ordering::SeqCst);\n\n }).unwrap();\n\n\n\n let dev = select_input_device();\n\n let strategy = select_strategy();\n\n let on_beat = |info| {\n\n println!(\"Found beat at {:?}ms\", info);\n\n };\n\n // actually start listening in thread\n\n let handle = beat_detector::record::start_listening(\n\n on_beat,\n\n Some(dev),\n\n strategy,\n\n recording,\n\n ).unwrap();\n\n\n\n handle.join().unwrap();\n\n}\n\n\n\nfn select_input_device() -> Device {\n\n // todo implement user selection\n\n beat_detector::record::audio_input_device_list().into_iter().next().expect(\"At least one audio input device must be available.\").1\n\n}\n\n\n\nfn select_strategy() -> StrategyKind {\n\n // todo implement user selection\n\n StrategyKind::Spectrum\n\n}\n\n```\n\n\n\n## MSRV (Minimal Supported Rust Version)\n\n1.52.1 stable\n", "file_path": "README.md", "rank": 47, "score": 14485.367189383553 }, { "content": "# Beat Detector - Audio Beat Detection Library Written In Rust\n\n\n\nThis is a Rust library that enables beat detection on live audio data input.\n\nOne use case is that you have an audio/aux-splitter on your computer where one \n\nend goes into the sound system whereas the other goes into the microphone input \n\nof a Raspberry Pi.\n\n\n\nThe crate provides multiple strategies that you can connect to the audio source.\n\nSo far it offers two strategies:\n\n- **Simple Lowpass Filter**\n\n - not really good, must be more fine-tuned\n\n- **Simple Spectrum Analysis**\n\n - good enough for most \"simple\" songs, like 90s pop hits or \"Kids\" by \"MGMT\"\n\n- Super Awesome Analysis (TODO) - **CODE CONTRIBUTIONS ARE WELCOME**\n\n\n\nI'm not an expert in audio analysis, but I'm proud what I achieved so far with the spectrum strategy.\n\nThis library needs a more \"bulletproof\" strategy, to cope with complex and fast songs.\n\n\n\nHere's a demo I recorded in my room. Of course, it was synced to music, when I recorded it. :)\n\n\n\n![Beat Detection Demo With WS2812 RGBs](demo.gif \"Beat Detection Demo With WS2812 RGBs\")\n\n\n", "file_path": "README.md", "rank": 48, "score": 14468.684526211102 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "examples/minimal.rs", "rank": 49, "score": 88.75215415170946 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "examples/ws2812_spi_light_on_beat.rs", "rank": 51, "score": 88.75215415170946 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "src/lib.rs", "rank": 52, "score": 88.75215415170943 }, { "content": " /// Convenient wrapper for ['Strategy::description'].\n\n pub fn description(&self) -> &'static str {\n\n match self {\n\n StrategyKind::LPF => LpfBeatDetector::description(),\n\n StrategyKind::Spectrum => SABeatDetector::description(),\n\n // _ => panic!(\"Unknown Strategy\"),\n\n }\n\n }\n\n\n\n /// Returns a vector with all strategy kinds to iterate over them.\n\n pub fn values() -> Vec<Self> {\n\n vec![Self::LPF, Self::Spectrum]\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use minimp3::{Decoder as Mp3Decoder, Error as Mp3Error, Frame as Mp3Frame};\n\n use std::collections::HashMap;\n", "file_path": "src/lib.rs", "rank": 58, "score": 20.394623473759413 }, { "content": "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\n//! Minimum example on how to use this library. Sets up the \"callback loop\".\n\n\n\nuse beat_detector::StrategyKind;\n\nuse cpal::Device;\n\nuse std::sync::atomic::{AtomicBool, Ordering};\n\nuse std::sync::Arc;\n\n\n\n/// Minimum example on how to use this library. Sets up the \"callback loop\".\n", "file_path": "examples/minimal.rs", "rank": 59, "score": 20.06764926031875 }, { "content": " #[inline(always)]\n\n fn amplitude_high_enough(&self, w_stats: &WindowStats) -> bool {\n\n const MIN_AMPLITUDE_THRESHOLD: i16 = (i16::MAX as f32 * 0.3) as i16;\n\n w_stats.max() >= MIN_AMPLITUDE_THRESHOLD as u16\n\n }\n\n}\n\n\n\n/// Enum that conveniently and easily makes all [`Strategy`]s provided by this crate accessible.\n\n/// This enum provides the bare minimum functionality to access the strategies. All deeper\n\n/// functionality must be defined inside the implementations.\n\n#[derive(Debug, PartialEq, Eq, Hash)]\n\n#[non_exhaustive] // more will come in the future\n\npub enum StrategyKind {\n\n /// Corresponds to [`strategies::lpf::LpfBeatDetector`].\n\n LPF,\n\n /// Corresponds to [`strategies::spectrum::SABeatDetector`]\n\n Spectrum,\n\n}\n\n\n\nimpl StrategyKind {\n", "file_path": "src/lib.rs", "rank": 60, "score": 19.97155827217368 }, { "content": " /// Creates a concrete detector object, i.e. a struct that implements\n\n /// [`Strategy`] on that you can continuously analyze your input audio data.\n\n #[inline(always)]\n\n fn detector(&self, sampling_rate: u32) -> Box<dyn Strategy + Send> {\n\n match self {\n\n StrategyKind::LPF => Box::new(LpfBeatDetector::new(sampling_rate)),\n\n StrategyKind::Spectrum => Box::new(SABeatDetector::new(sampling_rate)),\n\n // _ => panic!(\"Unknown Strategy\"),\n\n }\n\n }\n\n\n\n /// Convenient wrapper for ['Strategy::name'].\n\n pub fn name(&self) -> &'static str {\n\n match self {\n\n StrategyKind::LPF => LpfBeatDetector::name(),\n\n StrategyKind::Spectrum => SABeatDetector::name(),\n\n // _ => panic!(\"Unknown Strategy\"),\n\n }\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 61, "score": 19.202281673894152 }, { "content": " let dev = if devs.len() > 1 {\n\n select_input_device(devs)\n\n } else {\n\n devs.into_iter().next().unwrap().1\n\n };\n\n let strategy = select_strategy();\n\n let anim_t = anim.clone();\n\n let on_beat = move |info| {\n\n println!(\"Found beat at {:?}ms\", info);\n\n anim_t.lock().unwrap().add_next_light_impulse();\n\n };\n\n let handle =\n\n beat_detector::record::start_listening(on_beat, Some(dev), strategy, recording.clone())\n\n .unwrap();\n\n\n\n while recording.load(Ordering::SeqCst) {\n\n let next_timestamp = Instant::now().add(Duration::from_millis(ANIMATION_FREQUENCY_MS));\n\n {\n\n // drop lock early\n\n let mut anim = anim.lock().unwrap();\n", "file_path": "examples/ws2812_spi_light_on_beat.rs", "rank": 62, "score": 19.06895152634458 }, { "content": "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\nuse beat_detector::StrategyKind;\n\nuse cpal::Device;\n\nuse std::collections::BTreeMap;\n\nuse std::io::stdin;\n\nuse std::ops::Add;\n\nuse std::sync::atomic::{AtomicBool, Ordering};\n\nuse std::sync::{Arc, Mutex};\n\nuse std::time::{Duration, Instant};\n\nuse ws2818_rgb_led_spi_driver::adapter_gen::WS28xxAdapter;\n\nuse ws2818_rgb_led_spi_driver::adapter_spi::WS28xxSpiAdapter;\n\n\n\n// LED steps per second\n\npub const ANIMATION_FREQUENCY: u64 = 90; // in Hz\n\npub const ANIMATION_FREQUENCY_MS: u64 = 1000 / ANIMATION_FREQUENCY;\n\n\n\n/// Binary created for the Raspberry Pi which consumes audio\n\n/// and outputs light on WS2812 LEDs via the SPI device.\n", "file_path": "examples/ws2812_spi_light_on_beat.rs", "rank": 63, "score": 18.03460441246454 }, { "content": " }\n\n map.insert(strategy, beats);\n\n }\n\n\n\n map\n\n }\n\n\n\n /// Reads an MP3 and returns the audio data as mono channel + the sampling rate in Hertz.\n\n fn read_mp3_to_mono(file: &str) -> (Vec<i16>, u32) {\n\n let mut decoder = Mp3Decoder::new(File::open(file).unwrap());\n\n\n\n let mut sampling_rate = 0;\n\n let mut mono_samples = vec![];\n\n loop {\n\n match decoder.next_frame() {\n\n Ok(Mp3Frame {\n\n data: samples_of_frame,\n\n sample_rate,\n\n channels,\n\n ..\n", "file_path": "src/lib.rs", "rank": 64, "score": 15.935258250316988 }, { "content": "\n\nuse crate::strategies::lpf::LpfBeatDetector;\n\nuse crate::strategies::spectrum::SABeatDetector;\n\nuse crate::strategies::window_stats::WindowStats;\n\nuse crate::strategies::AnalysisState;\n\n\n\npub mod record;\n\nmod strategies;\n\n\n\n/// Struct that holds information about a detected beat.\n\n#[derive(Debug)]\n\npub struct BeatInfo {\n\n relative_ms: u32,\n\n // todo intensity\n\n}\n\nimpl BeatInfo {\n\n #[inline(always)]\n\n pub const fn new(relative_ms: u32) -> Self {\n\n Self { relative_ms }\n\n }\n", "file_path": "src/lib.rs", "rank": 65, "score": 15.251572233576969 }, { "content": "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\n\n\n#![deny(\n\nclippy::all,\n\nclippy::cargo,\n\nclippy::nursery,\n\n// clippy::restriction,\n\n// clippy::pedantic\n\n)]\n\n// now allow a few rules which are denied by the above statement\n\n// --> they are ridiculous and not necessary\n\n#![allow(\n\n clippy::suboptimal_flops,\n\n clippy::redundant_pub_crate,\n\n clippy::fallible_impl_from\n\n)]\n\n#![deny(missing_debug_implementations)]\n\n#![deny(rustdoc::all)]\n", "file_path": "src/lib.rs", "rank": 66, "score": 14.644692257428357 }, { "content": " for (i, beat) in beats.iter().enumerate() {\n\n let abs_diff =\n\n (SAMPLE_1_EXPECTED_BEATS_MS[i] as i64 - beat.relative_ms() as i64).abs() as u32;\n\n assert!(abs_diff < DIFF_ERROR_MS, \"[{:?}]: Recognized beat[{}] should not be more than {} ms away from the actual value; is {}ms\", strategy, i, DIFF_ERROR_MS, abs_diff);\n\n if abs_diff >= DIFF_WARN_MS {\n\n eprintln!(\"[{:?}]: WARN: Recognized beat[{}] should is less than {}ms away from the actual value; is: {}ms\", strategy, i, DIFF_WARN_MS, abs_diff);\n\n };\n\n }\n\n }\n\n }\n\n\n\n fn apply_samples_to_all_strategies(\n\n window_length: usize,\n\n samples: &[i16],\n\n _sampling_rate: u32,\n\n ) -> HashMap<StrategyKind, Vec<BeatInfo>> {\n\n // we pad with zeroes until the audio data length is a multiple\n\n // of the window length\n\n let mut samples = Vec::from(samples);\n\n let remainder = samples.len() % window_length;\n", "file_path": "src/lib.rs", "rank": 67, "score": 13.226429881230969 }, { "content": " use std::fs::File;\n\n\n\n // opened the file in Audacity and looked where the\n\n // beats are\n\n const SAMPLE_1_EXPECTED_BEATS_MS: [u32; 6] = [300, 2131, 2297, 4303, 6143, 6310];\n\n\n\n #[test]\n\n fn test_sample_1_print_beats() {\n\n let (sample_1_audio_data, sampling_rate) = read_mp3_to_mono(\"res/sample_1.mp3\");\n\n // assert 44,1kHz because it makes things easier\n\n assert_eq!(\n\n sampling_rate, 44100,\n\n \"The sampling rate of the MP3 examples must be 44100Hz.\"\n\n );\n\n\n\n // 1/44100 * 1024 == 1024/44100 == 0.046439s == 23,2ms\n\n let window_length = 1024;\n\n\n\n let map =\n\n apply_samples_to_all_strategies(window_length, &sample_1_audio_data, sampling_rate);\n", "file_path": "src/lib.rs", "rank": 68, "score": 13.145078297121323 }, { "content": " \"The sampling rate of the MP3 examples must be 44100Hz.\"\n\n );\n\n\n\n // 1/44100 * 1024 == 1024/44100 == 0.046439s == 23,2ms\n\n let window_length = 1024;\n\n\n\n let map =\n\n apply_samples_to_all_strategies(window_length, &sample_1_audio_data, sampling_rate);\n\n\n\n const DIFF_WARN_MS: u32 = 30;\n\n const DIFF_ERROR_MS: u32 = 60;\n\n\n\n for (strategy, beats) in map {\n\n assert_eq!(\n\n SAMPLE_1_EXPECTED_BEATS_MS.len(),\n\n beats.len(),\n\n \"Strategy {:?} must detect {} beats in sample 1!\",\n\n strategy,\n\n SAMPLE_1_EXPECTED_BEATS_MS.len()\n\n );\n", "file_path": "src/lib.rs", "rank": 69, "score": 12.257528784046794 }, { "content": " /// the right one.\n\n // \"where Self: Sized\" => compiler gave me this hint\n\n // => prevents \"`Strategy` cannot be made into an object\"\n\n fn description() -> &'static str\n\n where\n\n Self: Sized;\n\n\n\n /// Duration in ms after each beat. Useful do prevent the same beat to be\n\n /// detected as two beats. This is a constant per strategy, because more\n\n /// advanced strategies can cope with small durations (50ms) whereas\n\n /// \"stupid\"/basic strategies may need 400ms.\n\n /// This is a function instead of an associated constant, because\n\n /// otherwise the build fails with \"`Strategy` cannot be made into an object\"\n\n // \"where Self: Sized\" => compiler gave me this hint\n\n // => prevents \"`Strategy` cannot be made into an object\"\n\n fn min_duration_between_beats_ms() -> u32\n\n where\n\n Self: Sized;\n\n\n\n /// Common implementation for all strategies which checks if\n", "file_path": "src/lib.rs", "rank": 70, "score": 12.167036093473447 }, { "content": " }) => {\n\n // that's a bird weird of the original API. Why should channels or sampling\n\n // rate change from frame to frame?\n\n\n\n // Should be constant throughout the MP3 file.\n\n sampling_rate = sample_rate;\n\n\n\n if channels == 2 {\n\n for (i, sample) in samples_of_frame.iter().enumerate().step_by(2) {\n\n let sample = *sample as i32;\n\n let next_sample = samples_of_frame[i + 1] as i32;\n\n mono_samples.push(((sample + next_sample) as f32 / 2.0) as i16);\n\n }\n\n } else if channels == 1 {\n\n mono_samples.extend_from_slice(&samples_of_frame);\n\n } else {\n\n panic!(\"Unsupported number of channels={}\", channels);\n\n }\n\n }\n\n Err(Mp3Error::Eof) => break,\n\n Err(e) => panic!(\"{:?}\", e),\n\n }\n\n }\n\n\n\n (mono_samples, sampling_rate as u32)\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 71, "score": 10.634346059540391 }, { "content": " for i in 0..self.led_count / 2 {\n\n let i_left = i;\n\n let i_right = self.led_count - 1 - i;\n\n let is_in_center = i_left + 1 == i_right;\n\n\n\n if is_in_center {\n\n let new = self.new_rgb_data_vec.last().unwrap().clone();\n\n self.rgb_strip_vec_data[i_left] = new;\n\n self.rgb_strip_vec_data[i_right] = new;\n\n } else {\n\n let prev_left = self.rgb_strip_vec_data[i_left + 1].clone();\n\n self.rgb_strip_vec_data[i_left] = prev_left;\n\n let prev_right = self.rgb_strip_vec_data[i_right - 1].clone();\n\n self.rgb_strip_vec_data[i_right] = prev_right;\n\n }\n\n }\n\n\n\n for i in 0..MOVING_LIGHT_IMPULSE_LEN {\n\n let i = MOVING_LIGHT_IMPULSE_LEN - 1 - i;\n\n\n", "file_path": "examples/ws2812_spi_light_on_beat.rs", "rank": 72, "score": 10.633536828173998 }, { "content": " if remainder != 0 {\n\n samples.extend_from_slice(&vec![0; remainder])\n\n }\n\n\n\n let window_count = samples.len() / window_length;\n\n\n\n // all strategies\n\n let strategies = vec![StrategyKind::LPF, StrategyKind::Spectrum];\n\n\n\n let mut map = HashMap::new();\n\n\n\n for strategy in strategies {\n\n let detector = strategy.detector(44100);\n\n let mut beats = Vec::new();\n\n for i in 0..window_count {\n\n let window = &samples[i * window_length..(i + 1) * window_length];\n\n let beat = detector.is_beat(window);\n\n if let Some(beat) = beat {\n\n beats.push(beat);\n\n }\n", "file_path": "src/lib.rs", "rank": 73, "score": 8.476129431648971 }, { "content": " anim.shift_all_pixels();\n\n adapter.write_rgb(&anim.rgb_strip_vec_data).unwrap();\n\n }\n\n\n\n sleep_busy_waiting_until(next_timestamp);\n\n }\n\n\n\n handle.join().unwrap();\n\n}\n\n\n", "file_path": "examples/ws2812_spi_light_on_beat.rs", "rank": 74, "score": 8.06109456645063 }, { "content": "\n\n #[inline(always)]\n\n pub const fn relative_ms(&self) -> u32 {\n\n self.relative_ms\n\n }\n\n}\n\n\n\n/// Common abstraction over a beat detection strategy. Each strategy keeps ongoing\n\n/// audio samples, for example from microphone. Strategies should have an internal\n\n/// mutable state via interior mutability to compare sample windows (and analysis)\n\n/// against previous values.\n", "file_path": "src/lib.rs", "rank": 75, "score": 7.726409982345063 }, { "content": "\n\n for (strategy, beats) in map {\n\n println!(\"Strategy {:?} found beats at:\", strategy);\n\n for beat in beats {\n\n println!(\" {}ms\", beat.relative_ms());\n\n }\n\n }\n\n }\n\n\n\n /// TODO this test only works for a \"pretty good\" beat detection algorithm, because\n\n /// beats are close together. This doesn't work for the two existing ones.\n\n /// Make this test more tolerant, i.e. only for the \"good algorithms\" that\n\n /// hopefully come in the future.\n\n #[test]\n\n #[ignore]\n\n fn test_sample_1_beat_detection() {\n\n let (sample_1_audio_data, sampling_rate) = read_mp3_to_mono(\"res/sample_1.mp3\");\n\n // assert 44,1kHz because it makes things easier\n\n assert_eq!(\n\n sampling_rate, 44100,\n", "file_path": "src/lib.rs", "rank": 76, "score": 6.362006874662395 }, { "content": " }\n\n\n\n MovingLightStripsAnimation {\n\n led_count,\n\n rgb_strip_vec_data: vec![(0, 0, 0); led_count],\n\n new_rgb_data_vec: vec![(0, 0, 0); MOVING_LIGHT_IMPULSE_LEN],\n\n }\n\n }\n\n\n\n #[inline(always)]\n\n fn add_next_light_impulse(&mut self) {\n\n let (r, g, b) = get_random_pixel_val();\n\n self.new_rgb_data_vec[00] = darken_rgb(r, g, b, 0.1);\n\n self.new_rgb_data_vec[01] = darken_rgb(r, g, b, 0.2);\n\n self.new_rgb_data_vec[02] = darken_rgb(r, g, b, 0.4);\n\n self.new_rgb_data_vec[03] = darken_rgb(r, g, b, 0.6);\n\n self.new_rgb_data_vec[04] = darken_rgb(r, g, b, 0.7);\n\n self.new_rgb_data_vec[05] = darken_rgb(r, g, b, 0.8);\n\n self.new_rgb_data_vec[06] = darken_rgb(r, g, b, 0.9);\n\n self.new_rgb_data_vec[07] = (r, g, b);\n", "file_path": "examples/ws2812_spi_light_on_beat.rs", "rank": 77, "score": 5.634471864494498 }, { "content": " /// the last beat is beyond the threshold. Of not, we can return early\n\n /// and do not need to check if a beat is in the given sample.\n\n #[inline(always)]\n\n fn last_beat_beyond_threshold(&self, state: &AnalysisState) -> bool\n\n where\n\n Self: Sized,\n\n {\n\n // only check this if at least a single beat was recognized\n\n if state.beat_time_ms() > 0 {\n\n let threshold = state.last_beat_timestamp() + Self::min_duration_between_beats_ms();\n\n if state.beat_time_ms() < threshold {\n\n return false;\n\n }\n\n }\n\n true\n\n }\n\n\n\n /// Common implementation for all strategies which checks if\n\n /// the current windows/frames max amplitude (i16) is above a value where\n\n /// a beat could happen in theory (discard noise/silence/break between songs)\n", "file_path": "src/lib.rs", "rank": 78, "score": 5.634199745634453 }, { "content": " if i == 0 {\n\n self.new_rgb_data_vec[i] = (0, 0, 0);\n\n } else {\n\n let prev = self.new_rgb_data_vec[i - 1].clone();\n\n\n\n self.new_rgb_data_vec[i] = prev;\n\n }\n\n }\n\n }\n\n}\n", "file_path": "examples/ws2812_spi_light_on_beat.rs", "rank": 80, "score": 3.377950880561717 }, { "content": " self.new_rgb_data_vec[08] = darken_rgb(r, g, b, 0.9);\n\n self.new_rgb_data_vec[09] = darken_rgb(r, g, b, 0.8);\n\n self.new_rgb_data_vec[10] = darken_rgb(r, g, b, 0.7);\n\n self.new_rgb_data_vec[11] = darken_rgb(r, g, b, 0.6);\n\n self.new_rgb_data_vec[12] = darken_rgb(r, g, b, 0.4);\n\n self.new_rgb_data_vec[13] = darken_rgb(r, g, b, 0.2);\n\n self.new_rgb_data_vec[14] = darken_rgb(r, g, b, 0.1);\n\n }\n\n\n\n /// Shifts all pixel to the next position.\n\n /// Iterates backwards through `self.rgb_strip_vec_data` from both sides!\n\n /// Because our strip looks like this:\n\n ///\n\n /// ```\n\n /// [LED 0] [LED 1] ... [LED 5] [LED 6] ... [LED N]\n\n /// [RGB N/2] [RGB N/2 - 1] ... [RGB 0] [RGB 1] ... [RGB N/2] // RGB value; animated motion to the edges\n\n /// [Vec[0]] [Vec[1]] ... [Vec[x]] [Vec[y]] ... [Vec[N]]\n\n /// ```\n\n #[inline(always)]\n\n fn shift_all_pixels(&mut self) {\n", "file_path": "examples/ws2812_spi_light_on_beat.rs", "rank": 81, "score": 3.244918452364444 } ]
Rust
tests/defaults_encode_decode.rs
OragonEfreet/sage_mqtt
092cf0494e5c602d712591a4fb87e5cd713b7282
use sage_mqtt::{ Auth, ConnAck, Connect, Disconnect, Error, Packet, PubAck, PubComp, PubRec, PubRel, Publish, ReasonCode, SubAck, Subscribe, UnSubAck, UnSubscribe, }; use std::io::Cursor; #[tokio::test] async fn default_connect() { let mut encoded = Vec::new(); let send_packet: Packet = Connect::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode Connect packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode Connect"); if let Packet::Connect(receive_packet) = receive_result { assert_eq!(receive_packet, Connect::default()); } else { panic!("Incorrect packet type"); } } #[tokio::test] async fn connect_with_default_auth() { let mut encoded = Vec::new(); let send_packet: Packet = Connect { authentication: Some(Default::default()), ..Default::default() } .into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode Connect packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode Connect"); if let Packet::Connect(receive_packet) = receive_result { assert_eq!( receive_packet, Connect { authentication: Some(Default::default()), ..Default::default() } ); } else { panic!("Incorrect packet type"); } } #[tokio::test] async fn default_connack() { let mut encoded = Vec::new(); let send_packet: Packet = ConnAck::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode ConnAck packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode ConnAck"); if let Packet::ConnAck(receive_packet) = receive_result { assert_eq!(receive_packet, ConnAck::default()); } else { panic!("Incorrect packet type"); } } #[tokio::test] async fn default_publish() { let mut encoded = Vec::new(); let send_packet: Packet = Publish::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode Publish packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode Publish"); if let Packet::Publish(receive_packet) = receive_result { assert_eq!(receive_packet, Publish::default()); } else { panic!("Incorrect packet type"); } } #[tokio::test] async fn default_puback() { let mut encoded = Vec::new(); let send_packet: Packet = PubAck::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode PubAck packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode PubAck"); if let Packet::PubAck(receive_packet) = receive_result { assert_eq!(receive_packet, PubAck::default()); } else { panic!("Incorrect packet type"); } } #[tokio::test] async fn default_pubrec() { let mut encoded = Vec::new(); let send_packet: Packet = PubRec::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode PubRec packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode PubRec"); if let Packet::PubRec(receive_packet) = receive_result { assert_eq!(receive_packet, PubRec::default()); } else { panic!("Incorrect packet type"); } } #[tokio::test] async fn default_pubrel() { let mut encoded = Vec::new(); let send_packet: Packet = PubRel::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode PubRel packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode PubRel"); if let Packet::PubRel(receive_packet) = receive_result { assert_eq!(receive_packet, PubRel::default()); } else { panic!("Incorrect packet type"); } } #[tokio::test] async fn default_pubcomp() { let mut encoded = Vec::new(); let send_packet: Packet = PubComp::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode PubComp packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode PubComp"); if let Packet::PubComp(receive_packet) = receive_result { assert_eq!(receive_packet, PubComp::default()); } else { panic!("Incorrect packet type"); } } #[tokio::test] async fn default_subscribe() { let mut encoded = Vec::new(); let send_packet: Packet = Subscribe::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode Subscribe packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor).await; assert!(matches!( receive_result, Err(Error::Reason(ReasonCode::ProtocolError)) )); } #[tokio::test] async fn default_suback() { let mut encoded = Vec::new(); let send_packet: Packet = SubAck::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode SubAck packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode SubAck"); if let Packet::SubAck(receive_packet) = receive_result { assert_eq!(receive_packet, SubAck::default()); } else { panic!("Incorrect packet type"); } } #[tokio::test] async fn default_unsubscribe() { let mut encoded = Vec::new(); let send_packet: Packet = UnSubscribe::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode UnSubscribe packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor).await; assert!(matches!( receive_result, Err(Error::Reason(ReasonCode::ProtocolError)) )); } #[tokio::test] async fn default_unsuback() { let mut encoded = Vec::new(); let send_packet: Packet = UnSubAck::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode UnSubAck packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode UnSubAck"); if let Packet::UnSubAck(receive_packet) = receive_result { assert_eq!(receive_packet, UnSubAck::default()); } else { panic!("Incorrect packet type"); } } #[tokio::test] async fn default_pingreq() { let mut encoded = Vec::new(); let send_size = Packet::PingReq .encode(&mut encoded) .await .expect("Cannot encode PingReq packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode PingReq"); assert!(matches!(receive_result, Packet::PingReq)); } #[tokio::test] async fn default_pingresp() { let mut encoded = Vec::new(); let send_size = Packet::PingResp .encode(&mut encoded) .await .expect("Cannot encode PingResp packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode PingResp"); assert!(matches!(receive_result, Packet::PingResp)); } #[tokio::test] async fn default_disconnect() { let mut encoded = Vec::new(); let send_packet: Packet = Disconnect::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode Disconnect packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode Disconnect"); if let Packet::Disconnect(receive_packet) = receive_result { assert_eq!(receive_packet, Disconnect::default()); } else { panic!("Incorrect packet type"); } } #[tokio::test] async fn default_auth() { let mut encoded = Vec::new(); let send_packet: Packet = Auth::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode Auth packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode Auth"); if let Packet::Auth(receive_packet) = receive_result { assert_eq!(receive_packet, Auth::default()); } else { panic!("Incorrect packet type"); } }
use sage_mqtt::{ Auth, ConnAck, Connect, Disconnect, Error, Packet, PubAck, PubComp, PubRec, PubRel, Publish, ReasonCode, SubAck, Subscribe, UnSubAck, UnSubscribe, }; use std::io::Cursor; #[tokio::test] async fn default_connect() { let mut encoded = Vec::new(); let send_packet: Packet = Connect::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode Connect packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode Connect"); if let Packet::Connect(receive_packet) = receive_result { assert_eq!(receive_packet, Connect::default()); } else { panic!("Incorrect packet type"); } } #[tokio::test] async fn connect_with_default_auth() { let mut encoded = Vec::new(); let send_packet: Packet = Connect { authentication: Some(Default::default()), ..Default::default() } .into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode Connect packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode Connect");
} #[tokio::test] async fn default_connack() { let mut encoded = Vec::new(); let send_packet: Packet = ConnAck::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode ConnAck packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode ConnAck"); if let Packet::ConnAck(receive_packet) = receive_result { assert_eq!(receive_packet, ConnAck::default()); } else { panic!("Incorrect packet type"); } } #[tokio::test] async fn default_publish() { let mut encoded = Vec::new(); let send_packet: Packet = Publish::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode Publish packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode Publish"); if let Packet::Publish(receive_packet) = receive_result { assert_eq!(receive_packet, Publish::default()); } else { panic!("Incorrect packet type"); } } #[tokio::test] async fn default_puback() { let mut encoded = Vec::new(); let send_packet: Packet = PubAck::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode PubAck packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode PubAck"); if let Packet::PubAck(receive_packet) = receive_result { assert_eq!(receive_packet, PubAck::default()); } else { panic!("Incorrect packet type"); } } #[tokio::test] async fn default_pubrec() { let mut encoded = Vec::new(); let send_packet: Packet = PubRec::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode PubRec packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode PubRec"); if let Packet::PubRec(receive_packet) = receive_result { assert_eq!(receive_packet, PubRec::default()); } else { panic!("Incorrect packet type"); } } #[tokio::test] async fn default_pubrel() { let mut encoded = Vec::new(); let send_packet: Packet = PubRel::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode PubRel packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode PubRel"); if let Packet::PubRel(receive_packet) = receive_result { assert_eq!(receive_packet, PubRel::default()); } else { panic!("Incorrect packet type"); } } #[tokio::test] async fn default_pubcomp() { let mut encoded = Vec::new(); let send_packet: Packet = PubComp::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode PubComp packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode PubComp"); if let Packet::PubComp(receive_packet) = receive_result { assert_eq!(receive_packet, PubComp::default()); } else { panic!("Incorrect packet type"); } } #[tokio::test] async fn default_subscribe() { let mut encoded = Vec::new(); let send_packet: Packet = Subscribe::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode Subscribe packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor).await; assert!(matches!( receive_result, Err(Error::Reason(ReasonCode::ProtocolError)) )); } #[tokio::test] async fn default_suback() { let mut encoded = Vec::new(); let send_packet: Packet = SubAck::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode SubAck packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode SubAck"); if let Packet::SubAck(receive_packet) = receive_result { assert_eq!(receive_packet, SubAck::default()); } else { panic!("Incorrect packet type"); } } #[tokio::test] async fn default_unsubscribe() { let mut encoded = Vec::new(); let send_packet: Packet = UnSubscribe::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode UnSubscribe packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor).await; assert!(matches!( receive_result, Err(Error::Reason(ReasonCode::ProtocolError)) )); } #[tokio::test] async fn default_unsuback() { let mut encoded = Vec::new(); let send_packet: Packet = UnSubAck::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode UnSubAck packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode UnSubAck"); if let Packet::UnSubAck(receive_packet) = receive_result { assert_eq!(receive_packet, UnSubAck::default()); } else { panic!("Incorrect packet type"); } } #[tokio::test] async fn default_pingreq() { let mut encoded = Vec::new(); let send_size = Packet::PingReq .encode(&mut encoded) .await .expect("Cannot encode PingReq packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode PingReq"); assert!(matches!(receive_result, Packet::PingReq)); } #[tokio::test] async fn default_pingresp() { let mut encoded = Vec::new(); let send_size = Packet::PingResp .encode(&mut encoded) .await .expect("Cannot encode PingResp packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode PingResp"); assert!(matches!(receive_result, Packet::PingResp)); } #[tokio::test] async fn default_disconnect() { let mut encoded = Vec::new(); let send_packet: Packet = Disconnect::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode Disconnect packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode Disconnect"); if let Packet::Disconnect(receive_packet) = receive_result { assert_eq!(receive_packet, Disconnect::default()); } else { panic!("Incorrect packet type"); } } #[tokio::test] async fn default_auth() { let mut encoded = Vec::new(); let send_packet: Packet = Auth::default().into(); let send_size = send_packet .encode(&mut encoded) .await .expect("Cannot encode Auth packet"); assert!(send_size > 0); let mut cursor = Cursor::new(encoded); let receive_result = Packet::decode(&mut cursor) .await .expect("Cannot decode Auth"); if let Packet::Auth(receive_packet) = receive_result { assert_eq!(receive_packet, Auth::default()); } else { panic!("Incorrect packet type"); } }
if let Packet::Connect(receive_packet) = receive_result { assert_eq!( receive_packet, Connect { authentication: Some(Default::default()), ..Default::default() } ); } else { panic!("Incorrect packet type"); }
if_condition
[ { "content": "use crate::QoS;\n\n\n\n/// The control packet type is present as the first element of the fixed header\n\n/// in an MQTT paquet. It is encoded in a 8bit flag set where the 4 most\n\n/// significant bits represent the type of the paquet and the 4 least are flags\n\n/// where values depend on the type.\n\n#[derive(Debug, Clone, Copy)]\n\npub enum PacketType {\n\n Reserved,\n\n Connect,\n\n ConnAck,\n\n Publish {\n\n duplicate: bool,\n\n qos: QoS,\n\n retain: bool,\n\n },\n\n PubAck,\n\n PubRec,\n\n PubRel,\n\n PubComp,\n", "file_path": "src/packet_type.rs", "rank": 0, "score": 65655.45762554191 }, { "content": " Subscribe,\n\n SubAck,\n\n UnSubscribe,\n\n UnSubAck,\n\n PingReq,\n\n PingResp,\n\n Disconnect,\n\n Auth,\n\n}\n\n\n", "file_path": "src/packet_type.rs", "rank": 1, "score": 65650.58509985305 }, { "content": " (0b1010, 0b0010) => PacketType::UnSubscribe,\n\n (0b1011, 0b0000) => PacketType::UnSubAck,\n\n (0b1100, 0b0000) => PacketType::PingReq,\n\n (0b1101, 0b0000) => PacketType::PingResp,\n\n (0b1110, 0b0000) => PacketType::Disconnect,\n\n (0b1111, 0b0000) => PacketType::Auth,\n\n _ => return Err(MalformedPacket.into()),\n\n };\n\n Ok(packet_type)\n\n}\n\n\n\n#[cfg(test)]\n\nmod unit {\n\n\n\n use crate::{Error, ReasonCode};\n\n use std::io::Cursor;\n\n\n\n use super::*;\n\n\n\n #[tokio::test]\n", "file_path": "src/codec/packet_type.rs", "rank": 19, "score": 62657.15816530182 }, { "content": "use crate::{codec, PacketType, ReasonCode::MalformedPacket, Result as SageResult};\n\nuse std::{convert::TryInto, marker::Unpin};\n\nuse tokio::io::{AsyncRead, AsyncWrite};\n\n\n\n/// Write the given `PacketType` in one byte according to\n\n/// MQTT5 specifications.\n\n/// In case of success, returns `1`.\n\npub async fn write_control_packet_type<W: AsyncWrite + Unpin>(\n\n cpt: PacketType,\n\n writer: &mut W,\n\n) -> SageResult<usize> {\n\n codec::write_byte(\n\n match cpt {\n\n PacketType::Reserved => 0b0000_0000,\n\n PacketType::Connect => 0b0001_0000,\n\n PacketType::ConnAck => 0b0010_0000,\n\n PacketType::Publish {\n\n duplicate,\n\n qos,\n\n retain,\n", "file_path": "src/codec/packet_type.rs", "rank": 20, "score": 62654.21819143591 }, { "content": "/// In case of success, returns a `PacketType` instance.\n\npub async fn read_control_packet_type<R: AsyncRead + Unpin>(\n\n reader: &mut R,\n\n) -> SageResult<PacketType> {\n\n let packet_type = codec::read_byte(reader).await?;\n\n let packet_type = match (packet_type >> 4, packet_type & 0b0000_1111) {\n\n (0b0000, 0b0000) => PacketType::Reserved,\n\n (0b0001, 0b0000) => PacketType::Connect,\n\n (0b0010, 0b0000) => PacketType::ConnAck,\n\n (0b0011, flags) => PacketType::Publish {\n\n duplicate: (flags & 0b0111) > 0,\n\n qos: ((flags & 0b0110) >> 1).try_into()?,\n\n retain: (flags & 0b0001) > 0,\n\n },\n\n (0b0100, 0b0000) => PacketType::PubAck,\n\n (0b0101, 0b0000) => PacketType::PubRec,\n\n (0b0110, 0b0010) => PacketType::PubRel,\n\n (0b0111, 0b0000) => PacketType::PubComp,\n\n (0b1000, 0b0010) => PacketType::Subscribe,\n\n (0b1001, 0b0000) => PacketType::SubAck,\n", "file_path": "src/codec/packet_type.rs", "rank": 21, "score": 62653.20472978833 }, { "content": " if i == *flags {\n\n continue;\n\n }\n\n let buffer = [*packet_type, *flags, 0x00];\n\n let mut test_stream = Cursor::new(buffer);\n\n assert!(matches!(\n\n read_control_packet_type(&mut test_stream).await,\n\n Err(Error::Reason(ReasonCode::MalformedPacket))\n\n ));\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/codec/packet_type.rs", "rank": 22, "score": 62651.95600649661 }, { "content": " } => 0b0011_0000 | (duplicate as u8) << 3 | (qos as u8) << 2 | retain as u8,\n\n PacketType::PubAck => 0b0100_0000,\n\n PacketType::PubRec => 0b0101_0000,\n\n PacketType::PubRel => 0b0110_0010,\n\n PacketType::PubComp => 0b0111_0000,\n\n PacketType::Subscribe => 0b1000_0010,\n\n PacketType::SubAck => 0b1001_0000,\n\n PacketType::UnSubscribe => 0b1010_0010,\n\n PacketType::UnSubAck => 0b1011_0000,\n\n PacketType::PingReq => 0b1100_0000,\n\n PacketType::PingResp => 0b1101_0000,\n\n PacketType::Disconnect => 0b1110_0000,\n\n PacketType::Auth => 0b1111_0000,\n\n },\n\n writer,\n\n )\n\n .await\n\n}\n\n\n\n/// Read the given `reader` for a `PacketType`.\n", "file_path": "src/codec/packet_type.rs", "rank": 23, "score": 62651.012548946586 }, { "content": " async fn mqtt_2_1_3_1() {\n\n let reserved_flags_per_type = [\n\n (0b0001, 0b0000),\n\n (0b0010, 0b0000),\n\n (0b0100, 0b0000),\n\n (0b0101, 0b0000),\n\n (0b0110, 0b0010),\n\n (0b0111, 0b0000),\n\n (0b1000, 0b0010),\n\n (0b1001, 0b0000),\n\n (0b1010, 0b0010),\n\n (0b1011, 0b0000),\n\n (0b1100, 0b0000),\n\n (0b1101, 0b0000),\n\n (0b1110, 0b0000),\n\n (0b1111, 0b0000),\n\n ];\n\n\n\n for (packet_type, flags) in &reserved_flags_per_type {\n\n for i in 0b0000..=0b1111 {\n", "file_path": "src/codec/packet_type.rs", "rank": 24, "score": 62640.69252806132 }, { "content": "enum PayloadRequirements {\n\n None,\n\n Required,\n\n Optional,\n\n}\n\n\n\nimpl From<PacketType> for PayloadRequirements {\n\n fn from(value: PacketType) -> Self {\n\n match value {\n\n PacketType::Publish { .. } => PayloadRequirements::Optional,\n\n PacketType::Connect\n\n | PacketType::Subscribe\n\n | PacketType::SubAck\n\n | PacketType::UnSubscribe\n\n | PacketType::UnSubAck => PayloadRequirements::Required,\n\n _ => PayloadRequirements::None,\n\n }\n\n }\n\n}\n", "file_path": "src/packet_type.rs", "rank": 25, "score": 59887.06342902828 }, { "content": "fn main() {\n\n // let mut broker = Broker {};\n\n\n\n // let listener = TcpListener::bind(\"127.0.0.1:7878\").unwrap();\n\n\n\n // for stream in listener.incoming() {\n\n // }\n\n}\n", "file_path": "examples/server.rs", "rank": 26, "score": 41881.387584076365 }, { "content": "fn main() {\n\n println!(\"{}\", Topic::from(\"/pouet//haha/+/chaise/#/\"));\n\n}\n", "file_path": "examples/sandbox.rs", "rank": 27, "score": 41881.387584076365 }, { "content": "#[derive(Debug)]\n\nstruct ConnectFlags {\n\n pub clean_start: bool,\n\n pub will: bool,\n\n pub will_qos: QoS,\n\n pub will_retain: bool,\n\n pub user_name: bool,\n\n pub password: bool,\n\n}\n\n\n\nimpl Connect {\n\n pub(crate) async fn write<W: AsyncWrite + Unpin>(self, writer: &mut W) -> SageResult<usize> {\n\n // Variable Header (into content)\n\n let mut n_bytes = codec::write_utf8_string(\"MQTT\", writer).await?;\n\n n_bytes += codec::write_byte(0x05, writer).await?;\n\n\n\n n_bytes += ConnectFlags {\n\n clean_start: self.clean_start,\n\n will: self.will.is_some(),\n\n will_qos: if let Some(w) = &self.will {\n\n w.qos\n", "file_path": "src/control/connect.rs", "rank": 28, "score": 38841.35931951211 }, { "content": "use crate::ReasonCode;\n\nuse std::{\n\n error::Error as StdError,\n\n fmt::{Display, Formatter, Result as FmtResult},\n\n io::Error as IOError,\n\n result::Result as StdResult,\n\n};\n\n\n\n/// Standard Result type for Sage MQTT\n\npub type Result<T> = StdResult<T, Error>;\n\n\n\n/// The error type for Sage MQTT operations\n\n#[derive(Debug)]\n\npub enum Error {\n\n /// Standard Rust IO Error\n\n Io(IOError),\n\n\n\n /// Error described using a MQTT Reason code\n\n Reason(ReasonCode),\n\n}\n", "file_path": "src/error.rs", "rank": 29, "score": 34804.74375873399 }, { "content": "\n\nimpl Display for Error {\n\n fn fmt(&self, f: &mut Formatter) -> FmtResult {\n\n match self {\n\n Error::Reason(rc) => write!(f, \"{:?}\", rc),\n\n Error::Io(ref e) => e.fmt(f),\n\n }\n\n }\n\n}\n\n\n\nimpl StdError for Error {\n\n fn source(&self) -> Option<&(dyn StdError + 'static)> {\n\n match *self {\n\n Error::Io(ref e) => Some(e),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl From<IOError> for Error {\n", "file_path": "src/error.rs", "rank": 30, "score": 34803.7107750289 }, { "content": " fn from(err: IOError) -> Self {\n\n Error::Io(err)\n\n }\n\n}\n\n\n\nimpl From<ReasonCode> for Error {\n\n fn from(rc: ReasonCode) -> Self {\n\n Error::Reason(rc)\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 31, "score": 34800.75443734178 }, { "content": " /// If the server does not support the requested method, it will respond\n\n /// with a `Connack` packet with reason code `NotAuthorized` or\n\n /// `BadAuthenticationMethod` and close the connection.\n\n pub method: String,\n\n\n\n /// Authentication may contains data. The content depends on the\n\n /// authentication method.\n\n pub data: Vec<u8>,\n\n}\n\n\n\nimpl Authentication {\n\n ///Write authentication data into `writer`, returning the written size\n\n /// in case of success.\n\n pub async fn write<W: AsyncWrite + Unpin>(self, writer: &mut W) -> SageResult<usize> {\n\n let mut n_bytes = Property::AuthenticationMethod(self.method)\n\n .encode(writer)\n\n .await?;\n\n if !self.data.is_empty() {\n\n n_bytes += Property::AuthenticationData(self.data)\n\n .encode(writer)\n", "file_path": "src/authentication.rs", "rank": 32, "score": 34797.129958070684 }, { "content": "use crate::{Property, Result as SageResult};\n\nuse std::marker::Unpin;\n\nuse tokio::io::AsyncWrite;\n\n\n\n/// By default, `Connect` packets provide optional `user_name` and `password`\n\n/// fields which can be used to provide basic authentication.\n\n/// Enhanced authentication can be provided by using an `Authentication`\n\n/// structure which will initialize a challenge / response style authentication.\n\n/// Ii might imply the exchange of several `Auth` with reason code\n\n/// `ContinueAuthentication` until eventually one is send with either `Success`\n\n/// or any relevant error code and, in that case, close the connection.\n\n/// The authentication method which is used as an agreement on how authentication\n\n/// exchanges will perform. Authentication data can be sent at any moment\n\n/// according to this agreement.\n\n/// See the section 4.12 (Enhanced Authentication) of the MQTT 5 specifications\n\n/// for examples.\n\n#[derive(Debug, PartialEq, Clone, Default)]\n\npub struct Authentication {\n\n /// Specifies the authentication method, such as \"SCRAM-SHA-1\" or \"GS2-KRB5\".\n\n /// The actual support for a given authentication method is up to the server.\n", "file_path": "src/authentication.rs", "rank": 33, "score": 34793.57463496277 }, { "content": " .await?;\n\n }\n\n Ok(n_bytes)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod unit {\n\n\n\n use super::*;\n\n\n\n #[tokio::test]\n\n async fn encode_empty() {\n\n let mut result = Vec::new();\n\n let test_data: Authentication = Default::default();\n\n\n\n assert_eq!(test_data.write(&mut result).await.unwrap(), 3);\n\n assert_eq!(result, vec![0x15, 0x00, 0x00]);\n\n }\n\n\n", "file_path": "src/authentication.rs", "rank": 34, "score": 34792.66119687541 }, { "content": " #[tokio::test]\n\n async fn encode() {\n\n let mut result = Vec::new();\n\n let test_data = Authentication {\n\n method: \"Willow\".into(),\n\n data: vec![0x0D, 0x15, 0xEA, 0x5E],\n\n };\n\n\n\n assert_eq!(test_data.write(&mut result).await.unwrap(), 16);\n\n assert_eq!(\n\n result,\n\n vec![21, 0, 6, 87, 105, 108, 108, 111, 119, 22, 0, 4, 13, 21, 234, 94]\n\n );\n\n }\n\n}\n", "file_path": "src/authentication.rs", "rank": 35, "score": 34789.53122156633 }, { "content": "use crate::{\n\n codec, Auth, ConnAck, Connect, Disconnect, PacketType, PingReq, PingResp, PubAck, PubComp,\n\n PubRec, PubRel, Publish, ReasonCode::ProtocolError, Result as SageResult, SubAck, Subscribe,\n\n UnSubAck, UnSubscribe,\n\n};\n\nuse std::{fmt, marker::Unpin};\n\nuse tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt};\n\n\n\n#[derive(Debug)]\n", "file_path": "src/packet.rs", "rank": 36, "score": 34314.090635691304 }, { "content": " PingReq,\n\n\n\n /// PINGRESP MQTT packet. Respond to a ping request.\n\n PingResp,\n\n\n\n /// DISCONNECT MQTT packet. Disconnect a connextion and optionally a session.\n\n Disconnect(Disconnect),\n\n\n\n /// AUTH MQTT packet. Performs authentication exchanges between clients and server.\n\n Auth(Auth),\n\n}\n\n\n\nimpl fmt::Display for Packet {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Packet::Connect(_) => write!(f, \"Connect\"),\n\n Packet::ConnAck(connack) => write!(f, \"ConnAck [{:?}]\", connack.reason_code),\n\n Packet::Publish(_) => write!(f, \"Publish\"),\n\n Packet::PubAck(_) => write!(f, \"PubAck\"),\n\n Packet::PubRec(_) => write!(f, \"PubRec\"),\n", "file_path": "src/packet.rs", "rank": 37, "score": 34311.66224876552 }, { "content": "}\n\nimpl From<Disconnect> for Packet {\n\n fn from(control: Disconnect) -> Self {\n\n Packet::Disconnect(control)\n\n }\n\n}\n\nimpl From<Auth> for Packet {\n\n fn from(control: Auth) -> Self {\n\n Packet::Auth(control)\n\n }\n\n}\n\n\n\nimpl Packet {\n\n /// Write the entire `Packet` to `writer`, returning the number of\n\n /// bytes written.\n\n /// In case of failure, the operation will return any MQTT-related error, or\n\n /// `std::io::Error`.\n\n pub async fn encode<W: AsyncWrite + Unpin>(self, writer: &mut W) -> SageResult<usize> {\n\n let mut variable_and_payload = Vec::new();\n\n let (packet_type, remaining_size) = match self {\n", "file_path": "src/packet.rs", "rank": 38, "score": 34311.49035164002 }, { "content": " let mut fixed_header_buffer = Vec::new();\n\n\n\n let fixed_size = FixedHeader {\n\n packet_type,\n\n remaining_size,\n\n }\n\n .encode(&mut fixed_header_buffer)\n\n .await?;\n\n\n\n writer.write_all(&fixed_header_buffer).await?;\n\n writer.write_all(&variable_and_payload).await?;\n\n Ok(fixed_size + remaining_size)\n\n }\n\n\n\n /// Read a control packet from `reader`, returning a new `Packet`.\n\n /// In case of failure, the operation will return any MQTT-related error, or\n\n /// `std::io::Error`.\n\n pub async fn decode<R: AsyncRead + Unpin>(reader: &mut R) -> SageResult<Self> {\n\n let fixed_header = FixedHeader::decode(reader).await?;\n\n\n", "file_path": "src/packet.rs", "rank": 39, "score": 34309.118695900856 }, { "content": " }\n\n PacketType::Disconnect => Packet::Disconnect(Disconnect::read(reader).await?),\n\n PacketType::PubComp => {\n\n Packet::PubComp(PubComp::read(reader, fixed_header.remaining_size == 2).await?)\n\n }\n\n\n\n PacketType::Subscribe => {\n\n Packet::Subscribe(Subscribe::read(reader, fixed_header.remaining_size).await?)\n\n }\n\n\n\n PacketType::UnSubAck => {\n\n Packet::UnSubAck(UnSubAck::read(reader, fixed_header.remaining_size).await?)\n\n }\n\n\n\n PacketType::Publish {\n\n duplicate,\n\n qos,\n\n retain,\n\n } => Packet::Publish(\n\n Publish::read(\n", "file_path": "src/packet.rs", "rank": 40, "score": 34308.721058963914 }, { "content": " packet.write(&mut variable_and_payload).await?,\n\n ),\n\n Packet::PubComp(packet) => (\n\n PacketType::PubComp,\n\n packet.write(&mut variable_and_payload).await?,\n\n ),\n\n Packet::Subscribe(packet) => (\n\n PacketType::Subscribe,\n\n packet.write(&mut variable_and_payload).await?,\n\n ),\n\n Packet::Publish(packet) => (\n\n PacketType::Publish {\n\n duplicate: packet.duplicate,\n\n qos: packet.qos,\n\n retain: packet.retain,\n\n },\n\n packet.write(&mut variable_and_payload).await?,\n\n ),\n\n };\n\n\n", "file_path": "src/packet.rs", "rank": 41, "score": 34308.21547774441 }, { "content": " packet.write(&mut variable_and_payload).await?,\n\n ),\n\n Packet::UnSubscribe(packet) => (\n\n PacketType::UnSubscribe,\n\n packet.write(&mut variable_and_payload).await?,\n\n ),\n\n Packet::PubRec(packet) => (\n\n PacketType::PubRec,\n\n packet.write(&mut variable_and_payload).await?,\n\n ),\n\n Packet::Disconnect(packet) => (\n\n PacketType::Disconnect,\n\n packet.write(&mut variable_and_payload).await?,\n\n ),\n\n Packet::PubRel(packet) => (\n\n PacketType::PubRel,\n\n packet.write(&mut variable_and_payload).await?,\n\n ),\n\n Packet::SubAck(packet) => (\n\n PacketType::SubAck,\n", "file_path": "src/packet.rs", "rank": 42, "score": 34308.1538534773 }, { "content": "}\n\n\n\n/// The standard type to manipulate a AsyncRead/AsyncWrite-able MQTT packet. Each packet\n\n/// is an enum value with its own type.\n\n#[derive(Debug, Clone)]\n\npub enum Packet {\n\n /// CONNECT MQTT packet. Opens a connection request.\n\n Connect(Connect),\n\n\n\n /// CONNACK MQTT packet. Aknowledge a connectio request.\n\n ConnAck(ConnAck),\n\n\n\n /// PUBLISH MQTT packet. Delivery a message to or from a server.\n\n Publish(Publish),\n\n\n\n /// PUBACK MQTT packet. Ackowledge a QoS 1 or QoS 2 message.\n\n PubAck(PubAck),\n\n\n\n /// PUBREC MQTT packet. Ackowledge a QoS 2 message.\n\n PubRec(PubRec),\n", "file_path": "src/packet.rs", "rank": 43, "score": 34307.633527975144 }, { "content": " Packet::Connect(packet) => (\n\n PacketType::Connect,\n\n packet.write(&mut variable_and_payload).await?,\n\n ),\n\n Packet::ConnAck(packet) => (\n\n PacketType::ConnAck,\n\n packet.write(&mut variable_and_payload).await?,\n\n ),\n\n Packet::PingReq => (PacketType::PingReq, 0),\n\n Packet::PingResp => (PacketType::PingResp, 0),\n\n Packet::UnSubAck(packet) => (\n\n PacketType::UnSubAck,\n\n packet.write(&mut variable_and_payload).await?,\n\n ),\n\n Packet::Auth(packet) => (\n\n PacketType::Auth,\n\n packet.write(&mut variable_and_payload).await?,\n\n ),\n\n Packet::PubAck(packet) => (\n\n PacketType::PubAck,\n", "file_path": "src/packet.rs", "rank": 44, "score": 34307.62819489655 }, { "content": "\n\n /// PUBREL MQTT packet. Ackowledge a QoS 2 message.\n\n PubRel(PubRel),\n\n\n\n /// PUBCOMP MQTT packet. Ackowledge a QoS 2 message.\n\n PubComp(PubComp),\n\n\n\n /// SUBSCRIBE MQTT packet. Subscribe a client to topics.\n\n Subscribe(Subscribe),\n\n\n\n /// SUBACK MQTT packet. Acknowledge a client SUBSCRIBE packet.\n\n SubAck(SubAck),\n\n\n\n /// UNSUBSCRIBE MQTT packet. Unsubscribe a client from topics.\n\n UnSubscribe(UnSubscribe),\n\n\n\n /// UNSUBACK MQTT packet. Acknowledge a client UNSUBSCRIBE packet.\n\n UnSubAck(UnSubAck),\n\n\n\n /// PINGREQ MQTT packet. Send a ping request.\n", "file_path": "src/packet.rs", "rank": 45, "score": 34307.314626650186 }, { "content": " let packet = match fixed_header.packet_type {\n\n PacketType::Connect => Packet::Connect(Connect::read(reader).await?),\n\n PacketType::ConnAck => Packet::ConnAck(ConnAck::read(reader).await?),\n\n PacketType::PubAck => {\n\n Packet::PubAck(PubAck::read(reader, fixed_header.remaining_size == 2).await?)\n\n }\n\n PacketType::PubRec => {\n\n Packet::PubRec(PubRec::read(reader, fixed_header.remaining_size == 2).await?)\n\n }\n\n PacketType::PingReq => Packet::PingReq,\n\n PacketType::PingResp => Packet::PingResp,\n\n PacketType::SubAck => {\n\n Packet::SubAck(SubAck::read(reader, fixed_header.remaining_size).await?)\n\n }\n\n PacketType::UnSubscribe => {\n\n Packet::UnSubscribe(UnSubscribe::read(reader, fixed_header.remaining_size).await?)\n\n }\n\n PacketType::Auth => Packet::Auth(Auth::read(reader).await?),\n\n PacketType::PubRel => {\n\n Packet::PubRel(PubRel::read(reader, fixed_header.remaining_size == 2).await?)\n", "file_path": "src/packet.rs", "rank": 46, "score": 34307.152194169605 }, { "content": " Packet::PubRel(_) => write!(f, \"PubRel\"),\n\n Packet::PubComp(_) => write!(f, \"PubComp\"),\n\n Packet::Subscribe(_) => write!(f, \"Subscribe\"),\n\n Packet::SubAck(_) => write!(f, \"SubAck\"),\n\n Packet::UnSubscribe(_) => write!(f, \"UnSubscribe\"),\n\n Packet::UnSubAck(_) => write!(f, \"UnSubAck\"),\n\n Packet::PingReq => write!(f, \"PingReq\"),\n\n Packet::PingResp => write!(f, \"PingResp\"),\n\n Packet::Disconnect(disconnect) => {\n\n write!(f, \"Disconnect [{:?}]\", disconnect.reason_code)\n\n }\n\n Packet::Auth(_) => write!(f, \"Auth\"),\n\n }\n\n }\n\n}\n\n\n\nimpl From<Connect> for Packet {\n\n fn from(control: Connect) -> Self {\n\n Packet::Connect(control)\n\n }\n", "file_path": "src/packet.rs", "rank": 47, "score": 34304.51537819326 }, { "content": " reader,\n\n duplicate,\n\n qos,\n\n retain,\n\n fixed_header.remaining_size as u64,\n\n )\n\n .await?,\n\n ),\n\n _ => return Err(ProtocolError.into()),\n\n };\n\n\n\n Ok(packet)\n\n }\n\n}\n", "file_path": "src/packet.rs", "rank": 48, "score": 34294.3205037104 }, { "content": "}\n\nimpl From<ConnAck> for Packet {\n\n fn from(control: ConnAck) -> Self {\n\n Packet::ConnAck(control)\n\n }\n\n}\n\nimpl From<Publish> for Packet {\n\n fn from(control: Publish) -> Self {\n\n Packet::Publish(control)\n\n }\n\n}\n\nimpl From<PubAck> for Packet {\n\n fn from(control: PubAck) -> Self {\n\n Packet::PubAck(control)\n\n }\n\n}\n\nimpl From<PubRec> for Packet {\n\n fn from(control: PubRec) -> Self {\n\n Packet::PubRec(control)\n\n }\n", "file_path": "src/packet.rs", "rank": 49, "score": 34292.94900151311 }, { "content": "}\n\nimpl From<PubRel> for Packet {\n\n fn from(control: PubRel) -> Self {\n\n Packet::PubRel(control)\n\n }\n\n}\n\nimpl From<PubComp> for Packet {\n\n fn from(control: PubComp) -> Self {\n\n Packet::PubComp(control)\n\n }\n\n}\n\nimpl From<Subscribe> for Packet {\n\n fn from(control: Subscribe) -> Self {\n\n Packet::Subscribe(control)\n\n }\n\n}\n\nimpl From<SubAck> for Packet {\n\n fn from(control: SubAck) -> Self {\n\n Packet::SubAck(control)\n\n }\n", "file_path": "src/packet.rs", "rank": 50, "score": 34292.918164872186 }, { "content": "}\n\nimpl From<UnSubscribe> for Packet {\n\n fn from(control: UnSubscribe) -> Self {\n\n Packet::UnSubscribe(control)\n\n }\n\n}\n\nimpl From<UnSubAck> for Packet {\n\n fn from(control: UnSubAck) -> Self {\n\n Packet::UnSubAck(control)\n\n }\n\n}\n\nimpl From<PingReq> for Packet {\n\n fn from(_: PingReq) -> Self {\n\n Packet::PingReq\n\n }\n\n}\n\nimpl From<PingResp> for Packet {\n\n fn from(_: PingResp) -> Self {\n\n Packet::PingResp\n\n }\n", "file_path": "src/packet.rs", "rank": 51, "score": 34292.875870546915 }, { "content": " reason_codes: vec![\n\n ReasonCode::PacketIdentifierInUse,\n\n ReasonCode::TopicFilterInvalid,\n\n ],\n\n }\n\n }\n\n\n\n #[tokio::test]\n\n async fn encode() {\n\n let test_data = decoded();\n\n let mut tested_result = Vec::new();\n\n let n_bytes = test_data.write(&mut tested_result).await.unwrap();\n\n assert_eq!(tested_result, encoded());\n\n assert_eq!(n_bytes, 20);\n\n }\n\n\n\n #[tokio::test]\n\n async fn decode() {\n\n let mut test_data = Cursor::new(encoded());\n\n let tested_result = SubAck::read(&mut test_data, 20).await.unwrap();\n\n assert_eq!(tested_result, decoded());\n\n }\n\n}\n", "file_path": "src/control/suback.rs", "rank": 52, "score": 33087.44551000834 }, { "content": " \"harder\".into(),\n\n \"better\".into(),\n\n \"faster\".into(),\n\n \"stronger\".into(),\n\n ],\n\n }\n\n }\n\n\n\n #[tokio::test]\n\n async fn encode() {\n\n let test_data = decoded();\n\n let mut tested_result = Vec::new();\n\n let n_bytes = test_data.write(&mut tested_result).await.unwrap();\n\n assert_eq!(tested_result, encoded());\n\n assert_eq!(n_bytes, 52);\n\n }\n\n\n\n #[tokio::test]\n\n async fn decode() {\n\n let mut test_data = Cursor::new(encoded());\n\n let tested_result = UnSubscribe::read(&mut test_data, 52).await.unwrap();\n\n assert_eq!(tested_result, decoded());\n\n }\n\n}\n", "file_path": "src/control/unsubscribe.rs", "rank": 53, "score": 33086.193444029086 }, { "content": "use crate::{codec, PropertiesDecoder, Property, ReasonCode::ProtocolError, Result as SageResult};\n\nuse std::marker::Unpin;\n\nuse tokio::io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt};\n\n\n\n/// An `Unsubscribe` packet is sent from the client to unsubsribe to a topic.\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct UnSubscribe {\n\n /// The packet identifier is used to identify the message throughout the\n\n /// communication.\n\n pub packet_identifier: u16,\n\n\n\n /// General purpose user-properties\n\n pub user_properties: Vec<(String, String)>,\n\n\n\n /// The list of topics to unsubsribe to. They can contains wildcards.\n\n pub subscriptions: Vec<String>,\n\n}\n\n\n\nimpl Default for UnSubscribe {\n\n fn default() -> Self {\n", "file_path": "src/control/unsubscribe.rs", "rank": 54, "score": 33083.85239019033 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod unit {\n\n use super::*;\n\n use std::io::Cursor;\n\n\n\n fn encoded() -> Vec<u8> {\n\n vec![\n\n 5, 57, 15, 38, 0, 7, 77, 111, 103, 119, 97, 195, 175, 0, 3, 67, 97, 116, 0, 6, 104, 97,\n\n 114, 100, 101, 114, 0, 6, 98, 101, 116, 116, 101, 114, 0, 6, 102, 97, 115, 116, 101,\n\n 114, 0, 8, 115, 116, 114, 111, 110, 103, 101, 114,\n\n ]\n\n }\n\n\n\n fn decoded() -> UnSubscribe {\n\n UnSubscribe {\n\n packet_identifier: 1337,\n\n user_properties: vec![(\"Mogwaï\".into(), \"Cat\".into())],\n\n subscriptions: vec![\n", "file_path": "src/control/unsubscribe.rs", "rank": 55, "score": 33083.284343986015 }, { "content": " UnSubscribe {\n\n packet_identifier: 0,\n\n user_properties: Default::default(),\n\n subscriptions: Default::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl UnSubscribe {\n\n pub(crate) async fn write<W: AsyncWrite + Unpin>(self, writer: &mut W) -> SageResult<usize> {\n\n let mut n_bytes = codec::write_two_byte_integer(self.packet_identifier, writer).await?;\n\n\n\n let mut properties = Vec::new();\n\n for (k, v) in self.user_properties {\n\n n_bytes += Property::UserProperty(k, v).encode(&mut properties).await?;\n\n }\n\n n_bytes += codec::write_variable_byte_integer(properties.len() as u32, writer).await?;\n\n writer.write_all(&properties).await?;\n\n\n\n for option in self.subscriptions {\n", "file_path": "src/control/unsubscribe.rs", "rank": 56, "score": 33082.24942480359 }, { "content": "use crate::{\n\n codec, PropertiesDecoder, Property,\n\n ReasonCode::{self, ProtocolError},\n\n Result as SageResult,\n\n};\n\nuse std::{convert::TryInto, marker::Unpin};\n\nuse tokio::io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt};\n\n\n\n/// The `SubAck` packet is sent by a server to confirm a `Subscribe` has been\n\n/// received and processed.\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct SubAck {\n\n /// The packet identifier is used to identify the message throughout the\n\n /// communication.\n\n pub packet_identifier: u16,\n\n\n\n /// User defined properties\n\n pub user_properties: Vec<(String, String)>,\n\n\n\n /// The reason codes. The array contains one `ReasonCode` per subscription.\n", "file_path": "src/control/suback.rs", "rank": 57, "score": 33080.756876516236 }, { "content": " _ => return Err(ProtocolError.into()),\n\n }\n\n }\n\n\n\n let mut subscriptions = Vec::new();\n\n\n\n while reader.limit() > 0 {\n\n subscriptions.push(codec::read_utf8_string(&mut reader).await?);\n\n }\n\n\n\n if subscriptions.is_empty() {\n\n Err(ProtocolError.into())\n\n } else {\n\n Ok(UnSubscribe {\n\n packet_identifier,\n\n user_properties,\n\n subscriptions,\n\n })\n\n }\n\n }\n", "file_path": "src/control/unsubscribe.rs", "rank": 58, "score": 33079.28183446126 }, { "content": " reason_codes,\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod unit {\n\n use super::*;\n\n use std::io::Cursor;\n\n\n\n fn encoded() -> Vec<u8> {\n\n vec![\n\n 5, 57, 15, 38, 0, 7, 77, 111, 103, 119, 97, 195, 175, 0, 3, 67, 97, 116, 145, 143,\n\n ]\n\n }\n\n\n\n fn decoded() -> SubAck {\n\n SubAck {\n\n packet_identifier: 1337,\n\n user_properties: vec![(\"Mogwaï\".into(), \"Cat\".into())],\n", "file_path": "src/control/suback.rs", "rank": 59, "score": 33079.17997054196 }, { "content": "\n\n let packet_identifier = codec::read_two_byte_integer(&mut reader).await?;\n\n let mut user_properties = Vec::new();\n\n let mut properties = PropertiesDecoder::take(&mut reader).await?;\n\n while properties.has_properties() {\n\n match properties.read().await? {\n\n Property::UserProperty(k, v) => user_properties.push((k, v)),\n\n _ => return Err(ProtocolError.into()),\n\n }\n\n }\n\n\n\n let mut reason_codes = Vec::new();\n\n\n\n while reader.limit() > 0 {\n\n reason_codes.push(codec::read_byte(&mut reader).await?.try_into()?);\n\n }\n\n\n\n Ok(SubAck {\n\n packet_identifier,\n\n user_properties,\n", "file_path": "src/control/suback.rs", "rank": 60, "score": 33078.95235564624 }, { "content": " n_bytes += codec::write_utf8_string(&option, writer).await?;\n\n }\n\n\n\n Ok(n_bytes)\n\n }\n\n\n\n pub(crate) async fn read<R: AsyncRead + Unpin>(\n\n reader: &mut R,\n\n remaining_size: usize,\n\n ) -> SageResult<Self> {\n\n let mut reader = reader.take(remaining_size as u64);\n\n\n\n let packet_identifier = codec::read_two_byte_integer(&mut reader).await?;\n\n\n\n let mut user_properties = Vec::new();\n\n\n\n let mut properties = PropertiesDecoder::take(&mut reader).await?;\n\n while properties.has_properties() {\n\n match properties.read().await? {\n\n Property::UserProperty(k, v) => user_properties.push((k, v)),\n", "file_path": "src/control/unsubscribe.rs", "rank": 61, "score": 33078.41522378669 }, { "content": " /// The indices in this array match the incides in the `Subscribe`'s\n\n /// subscriptions array.\n\n pub reason_codes: Vec<ReasonCode>,\n\n}\n\n\n\nimpl Default for SubAck {\n\n fn default() -> Self {\n\n SubAck {\n\n packet_identifier: 0,\n\n user_properties: Default::default(),\n\n reason_codes: Default::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl SubAck {\n\n pub(crate) async fn write<W: AsyncWrite + Unpin>(self, writer: &mut W) -> SageResult<usize> {\n\n let mut n_bytes = codec::write_two_byte_integer(self.packet_identifier, writer).await?;\n\n\n\n let mut properties = Vec::new();\n", "file_path": "src/control/suback.rs", "rank": 62, "score": 33077.61064291559 }, { "content": "\n\n for (k, v) in self.user_properties {\n\n n_bytes += Property::UserProperty(k, v).encode(&mut properties).await?;\n\n }\n\n\n\n n_bytes += codec::write_variable_byte_integer(properties.len() as u32, writer).await?;\n\n writer.write_all(&properties).await?;\n\n\n\n for reason_code in self.reason_codes {\n\n n_bytes += codec::write_reason_code(reason_code, writer).await?;\n\n }\n\n\n\n Ok(n_bytes)\n\n }\n\n\n\n pub(crate) async fn read<R: AsyncRead + Unpin>(\n\n reader: &mut R,\n\n remaining_size: usize,\n\n ) -> SageResult<Self> {\n\n let mut reader = reader.take(remaining_size as u64);\n", "file_path": "src/control/suback.rs", "rank": 63, "score": 33075.867732894774 }, { "content": "use crate::{\n\n codec, Authentication, PropertiesDecoder, Property,\n\n ReasonCode::{self, ProtocolError},\n\n Result as SageResult,\n\n};\n\nuse std::{convert::TryFrom, marker::Unpin};\n\nuse tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt};\n\n\n\n/// The `Auth` packet is used for enhanced authentication upon connection.\n\n/// When a client connects to a server, it can initiates an authentication using\n\n/// the `Authentication` structure. Then the client and server exchange `Auth`\n\n/// packets until either the the client sends a `Disconnect` packet or the\n\n/// server respond with a `Connack` packet.\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct Auth {\n\n /// The packet reason code. Can be any of:\n\n /// - Success: The authentication is successful\n\n /// - ReAuthenticate (client only): Ask for a new authentication\n\n /// - ContinueAuthentication: Continue the authentication with another step\n\n pub reason_code: ReasonCode,\n", "file_path": "src/control/auth.rs", "rank": 64, "score": 33073.36573255696 }, { "content": " user_properties: vec![(\"Mogwaï\".into(), \"Cat\".into())],\n\n }\n\n }\n\n\n\n #[tokio::test]\n\n async fn encode() {\n\n let test_data = decoded();\n\n let mut tested_result = Vec::new();\n\n let n_bytes = test_data.write(&mut tested_result).await.unwrap();\n\n assert_eq!(tested_result, encoded());\n\n assert_eq!(n_bytes, 40);\n\n }\n\n\n\n #[tokio::test]\n\n async fn decode() {\n\n let mut test_data = Cursor::new(encoded());\n\n let tested_result = Auth::read(&mut test_data).await.unwrap();\n\n assert_eq!(tested_result, decoded());\n\n }\n\n}\n", "file_path": "src/control/auth.rs", "rank": 65, "score": 33065.552330069724 }, { "content": "use crate::{\n\n codec, PropertiesDecoder, Property,\n\n ReasonCode::{self, ProtocolError},\n\n Result as SageResult,\n\n};\n\nuse std::{convert::TryInto, marker::Unpin};\n\nuse tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt};\n\n\n\n/// The `PubRec` packet is sent during an `ExactlyOnce` quality of service\n\n/// publish.\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct PubRec {\n\n /// The packet identifier is used to identify the message throughout the\n\n /// communication.\n\n pub packet_identifier: u16,\n\n\n\n /// The reason code for the acknowledgement. Can be any of:\n\n /// - `Success`\n\n /// - `NoMatchingSubscribers`\n\n /// - `UnspecifiedError`\n", "file_path": "src/control/pubrec.rs", "rank": 66, "score": 33064.191501928435 }, { "content": "use crate::{\n\n codec, PropertiesDecoder, Property,\n\n ReasonCode::{self, ProtocolError},\n\n Result as SageResult,\n\n};\n\nuse std::{convert::TryInto, marker::Unpin};\n\nuse tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt};\n\n\n\n/// The `PubRel` packet is sent during an `ExactlyOnce` quality of service\n\n/// publish.\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct PubRel {\n\n /// The packet identifier is used to identify the message throughout the\n\n /// communication.\n\n pub packet_identifier: u16,\n\n\n\n /// The reason code for the acknowledgement. Can be any of:\n\n /// - `Success`\n\n /// - `NoMatchingSubscribers`\n\n /// - `UnspecifiedError`\n", "file_path": "src/control/pubrel.rs", "rank": 67, "score": 33064.191501928435 }, { "content": "use crate::{\n\n codec, PropertiesDecoder, Property,\n\n ReasonCode::{self, ProtocolError},\n\n Result as SageResult,\n\n};\n\nuse std::{convert::TryInto, marker::Unpin};\n\nuse tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt};\n\n\n\n/// The `PubComp` packet is sent during an `ExactlyOnce` quality of service\n\n/// publish.\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct PubComp {\n\n /// The packet identifier is used to identify the message throughout the\n\n /// communication.\n\n pub packet_identifier: u16,\n\n\n\n /// The reason code for the acknowledgement. Can be any of:\n\n /// - `Success`\n\n /// - `NoMatchingSubscribers`\n\n /// - `UnspecifiedError`\n", "file_path": "src/control/pubcomp.rs", "rank": 68, "score": 33064.191501928435 }, { "content": "use crate::{\n\n codec, PropertiesDecoder, Property,\n\n ReasonCode::{self, ProtocolError},\n\n Result as SageResult,\n\n};\n\nuse std::{convert::TryInto, marker::Unpin};\n\nuse tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt};\n\n\n\n/// A `PubAck` is the response for a `Publish` message with `AtLeastOnce` as\n\n/// quality of service.\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct PubAck {\n\n /// The packet identifier is used to identify the message throughout the\n\n /// communication.\n\n pub packet_identifier: u16,\n\n\n\n /// The reason code for the acknowledgement. Can be any of:\n\n /// - `Success`\n\n /// - `NoMatchingSubscribers`\n\n /// - `UnspecifiedError`\n", "file_path": "src/control/puback.rs", "rank": 69, "score": 33063.75747980158 }, { "content": "mod unit {\n\n\n\n use super::*;\n\n use std::io::Cursor;\n\n\n\n fn encoded() -> Vec<u8> {\n\n vec![\n\n 24, 38, 21, 0, 6, 87, 105, 108, 108, 111, 119, 22, 0, 4, 13, 21, 234, 94, 31, 0, 4, 66,\n\n 105, 119, 105, 38, 0, 7, 77, 111, 103, 119, 97, 195, 175, 0, 3, 67, 97, 116,\n\n ]\n\n }\n\n\n\n fn decoded() -> Auth {\n\n Auth {\n\n reason_code: ReasonCode::ContinueAuthentication,\n\n authentication: Authentication {\n\n method: \"Willow\".into(),\n\n data: vec![0x0D, 0x15, 0xEA, 0x5E],\n\n },\n\n reason_string: Some(\"Biwi\".into()),\n", "file_path": "src/control/auth.rs", "rank": 70, "score": 33063.20843371833 }, { "content": "\n\n #[tokio::test]\n\n async fn encode() {\n\n let test_data = decoded();\n\n let mut tested_result = Vec::new();\n\n let n_bytes = test_data.write(&mut tested_result).await.unwrap();\n\n assert_eq!(tested_result, encoded());\n\n assert_eq!(n_bytes, 32);\n\n }\n\n\n\n #[tokio::test]\n\n async fn decode() {\n\n let mut test_data = Cursor::new(encoded());\n\n let tested_result = PubComp::read(&mut test_data, false).await.unwrap();\n\n assert_eq!(tested_result, decoded());\n\n }\n\n}\n", "file_path": "src/control/pubcomp.rs", "rank": 71, "score": 33063.12384810857 }, { "content": " #[tokio::test]\n\n async fn encode() {\n\n let test_data = decoded();\n\n let mut tested_result = Vec::new();\n\n let n_bytes = test_data.write(&mut tested_result).await.unwrap();\n\n assert_eq!(tested_result, encoded());\n\n assert_eq!(n_bytes, 33);\n\n }\n\n\n\n #[tokio::test]\n\n async fn decode() {\n\n let mut test_data = Cursor::new(encoded());\n\n let tested_result = PubRec::read(&mut test_data, false).await.unwrap();\n\n assert_eq!(tested_result, decoded());\n\n }\n\n}\n", "file_path": "src/control/pubrec.rs", "rank": 72, "score": 33063.12384810857 }, { "content": " #[tokio::test]\n\n async fn encode() {\n\n let test_data = decoded();\n\n let mut tested_result = Vec::new();\n\n let n_bytes = test_data.write(&mut tested_result).await.unwrap();\n\n assert_eq!(tested_result, encoded());\n\n assert_eq!(n_bytes, 33);\n\n }\n\n\n\n #[tokio::test]\n\n async fn decode() {\n\n let mut test_data = Cursor::new(encoded());\n\n let tested_result = PubRel::read(&mut test_data, false).await.unwrap();\n\n assert_eq!(tested_result, decoded());\n\n }\n\n}\n", "file_path": "src/control/pubrel.rs", "rank": 73, "score": 33063.12384810857 }, { "content": "\n\n #[tokio::test]\n\n async fn encode() {\n\n let test_data = decoded();\n\n let mut tested_result = Vec::new();\n\n let n_bytes = test_data.write(&mut tested_result).await.unwrap();\n\n assert_eq!(tested_result, encoded());\n\n assert_eq!(n_bytes, 33);\n\n }\n\n\n\n #[tokio::test]\n\n async fn decode() {\n\n let mut test_data = Cursor::new(encoded());\n\n let tested_result = PubAck::read(&mut test_data, false).await.unwrap();\n\n assert_eq!(tested_result, decoded());\n\n }\n\n}\n", "file_path": "src/control/puback.rs", "rank": 74, "score": 33063.12384810857 }, { "content": "\n\n #[tokio::test]\n\n async fn encode() {\n\n let test_data = decoded();\n\n let mut tested_result = Vec::new();\n\n let n_bytes = test_data.write(&mut tested_result).await.unwrap();\n\n assert_eq!(tested_result, encoded());\n\n assert_eq!(n_bytes, 41);\n\n }\n\n\n\n #[tokio::test]\n\n async fn decode() {\n\n let mut test_data = Cursor::new(encoded());\n\n let tested_result = UnSubAck::read(&mut test_data, 41).await.unwrap();\n\n assert_eq!(tested_result, decoded());\n\n }\n\n}\n", "file_path": "src/control/unsuback.rs", "rank": 75, "score": 33063.12384810857 }, { "content": " }\n\n\n\n pub(crate) async fn read<R: AsyncRead + Unpin>(reader: &mut R) -> SageResult<Self> {\n\n let reason_code = ReasonCode::try_from(codec::read_byte(reader).await?)?;\n\n\n\n let mut user_properties = Vec::new();\n\n let mut properties = PropertiesDecoder::take(reader).await?;\n\n let mut reason_string = None;\n\n let mut authentication_method = None;\n\n let mut authentication_data = Default::default();\n\n\n\n while properties.has_properties() {\n\n match properties.read().await? {\n\n Property::ReasonString(v) => reason_string = Some(v),\n\n Property::UserProperty(k, v) => user_properties.push((k, v)),\n\n Property::AuthenticationMethod(v) => authentication_method = Some(v),\n\n Property::AuthenticationData(v) => authentication_data = v,\n\n _ => return Err(ProtocolError.into()),\n\n }\n\n }\n", "file_path": "src/control/auth.rs", "rank": 76, "score": 33061.58470894531 }, { "content": " }\n\n}\n\n\n\nimpl Auth {\n\n pub(crate) async fn write<W: AsyncWrite + Unpin>(self, writer: &mut W) -> SageResult<usize> {\n\n let mut n_bytes = codec::write_reason_code(self.reason_code, writer).await?;\n\n let mut properties = Vec::new();\n\n\n\n n_bytes += self.authentication.write(&mut properties).await?;\n\n if let Some(v) = self.reason_string {\n\n n_bytes += Property::ReasonString(v).encode(&mut properties).await?;\n\n }\n\n for (k, v) in self.user_properties {\n\n n_bytes += Property::UserProperty(k, v).encode(&mut properties).await?;\n\n }\n\n\n\n n_bytes += codec::write_variable_byte_integer(properties.len() as u32, writer).await?;\n\n writer.write_all(&properties).await?;\n\n\n\n Ok(n_bytes)\n", "file_path": "src/control/auth.rs", "rank": 77, "score": 33061.42734118125 }, { "content": "mod unit {\n\n use super::*;\n\n use std::io::Cursor;\n\n\n\n fn encoded() -> Vec<u8> {\n\n vec![\n\n 5, 57, 131, 29, 31, 0, 11, 66, 108, 97, 99, 107, 32, 66, 101, 116, 116, 121, 38, 0, 7,\n\n 77, 111, 103, 119, 97, 195, 175, 0, 3, 67, 97, 116,\n\n ]\n\n }\n\n\n\n fn decoded() -> PubRec {\n\n PubRec {\n\n packet_identifier: 1337,\n\n reason_code: ReasonCode::ImplementationSpecificError,\n\n reason_string: Some(\"Black Betty\".into()),\n\n user_properties: vec![(\"Mogwaï\".into(), \"Cat\".into())],\n\n }\n\n }\n\n\n", "file_path": "src/control/pubrec.rs", "rank": 78, "score": 33060.67620643848 }, { "content": " Ok(n_bytes)\n\n }\n\n\n\n pub(crate) async fn read<R: AsyncRead + Unpin>(\n\n reader: &mut R,\n\n remaining_size: usize,\n\n ) -> SageResult<Self> {\n\n let mut reader = reader.take(remaining_size as u64);\n\n\n\n let packet_identifier = codec::read_two_byte_integer(&mut reader).await?;\n\n let mut user_properties = Vec::new();\n\n let mut properties = PropertiesDecoder::take(&mut reader).await?;\n\n let mut reason_string = None;\n\n while properties.has_properties() {\n\n match properties.read().await? {\n\n Property::ReasonString(v) => reason_string = Some(v),\n\n Property::UserProperty(k, v) => user_properties.push((k, v)),\n\n _ => return Err(ProtocolError.into()),\n\n }\n\n }\n", "file_path": "src/control/unsuback.rs", "rank": 79, "score": 33059.639347605604 }, { "content": "use crate::{\n\n codec, PropertiesDecoder, Property,\n\n ReasonCode::{self, ProtocolError},\n\n Result as SageResult,\n\n};\n\nuse std::{convert::TryInto, marker::Unpin};\n\nuse tokio::io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt};\n\n\n\n/// An `UnSubAck` is sent by the server to acknowledge an unsubscribe request.\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct UnSubAck {\n\n /// The packet identifier is used to identify the message throughout the\n\n /// communication\n\n pub packet_identifier: u16,\n\n\n\n /// An optional description of the acknowledgement.\n\n pub reason_string: Option<String>,\n\n\n\n /// General purpose user-defined properties\n\n pub user_properties: Vec<(String, String)>,\n", "file_path": "src/control/unsuback.rs", "rank": 80, "score": 33059.617133962915 }, { "content": " if shortened {\n\n pubcomp.reason_code = ReasonCode::Success;\n\n } else {\n\n pubcomp.reason_code = codec::read_byte(reader).await?.try_into()?;\n\n\n\n let mut properties = PropertiesDecoder::take(reader).await?;\n\n while properties.has_properties() {\n\n match properties.read().await? {\n\n Property::ReasonString(v) => pubcomp.reason_string = Some(v),\n\n Property::UserProperty(k, v) => pubcomp.user_properties.push((k, v)),\n\n _ => return Err(ProtocolError.into()),\n\n }\n\n }\n\n }\n\n\n\n Ok(pubcomp)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/control/pubcomp.rs", "rank": 81, "score": 33058.600531225726 }, { "content": " if shortened {\n\n pubrec.reason_code = ReasonCode::Success;\n\n } else {\n\n pubrec.reason_code = codec::read_byte(reader).await?.try_into()?;\n\n\n\n let mut properties = PropertiesDecoder::take(reader).await?;\n\n while properties.has_properties() {\n\n match properties.read().await? {\n\n Property::ReasonString(v) => pubrec.reason_string = Some(v),\n\n Property::UserProperty(k, v) => pubrec.user_properties.push((k, v)),\n\n _ => return Err(ProtocolError.into()),\n\n }\n\n }\n\n }\n\n\n\n Ok(pubrec)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/control/pubrec.rs", "rank": 82, "score": 33058.600531225726 }, { "content": " if shortened {\n\n pubrel.reason_code = ReasonCode::Success;\n\n } else {\n\n pubrel.reason_code = codec::read_byte(reader).await?.try_into()?;\n\n\n\n let mut properties = PropertiesDecoder::take(reader).await?;\n\n while properties.has_properties() {\n\n match properties.read().await? {\n\n Property::ReasonString(v) => pubrel.reason_string = Some(v),\n\n Property::UserProperty(k, v) => pubrel.user_properties.push((k, v)),\n\n _ => return Err(ProtocolError.into()),\n\n }\n\n }\n\n }\n\n\n\n Ok(pubrel)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/control/pubrel.rs", "rank": 83, "score": 33058.600531225726 }, { "content": " if shortened {\n\n puback.reason_code = ReasonCode::Success;\n\n } else {\n\n puback.reason_code = codec::read_byte(reader).await?.try_into()?;\n\n\n\n let mut properties = PropertiesDecoder::take(reader).await?;\n\n while properties.has_properties() {\n\n match properties.read().await? {\n\n Property::ReasonString(v) => puback.reason_string = Some(v),\n\n Property::UserProperty(k, v) => puback.user_properties.push((k, v)),\n\n _ => return Err(ProtocolError.into()),\n\n }\n\n }\n\n }\n\n\n\n Ok(puback)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/control/puback.rs", "rank": 84, "score": 33058.59752644046 }, { "content": "mod unit {\n\n\n\n use super::*;\n\n use std::io::Cursor;\n\n\n\n fn encoded() -> Vec<u8> {\n\n vec![\n\n 5, 57, 146, 28, 31, 0, 11, 66, 108, 97, 99, 107, 32, 66, 101, 116, 116, 121, 38, 0, 6,\n\n 72, 195, 166, 114, 121, 97, 0, 3, 67, 97, 116,\n\n ]\n\n }\n\n\n\n fn decoded() -> PubComp {\n\n PubComp {\n\n packet_identifier: 1337,\n\n reason_code: ReasonCode::PacketIdentifierNotFound,\n\n reason_string: Some(\"Black Betty\".into()),\n\n user_properties: vec![(\"Hærya\".into(), \"Cat\".into())],\n\n }\n\n }\n", "file_path": "src/control/pubcomp.rs", "rank": 85, "score": 33058.47468298715 }, { "content": "mod unit {\n\n use super::*;\n\n use std::io::Cursor;\n\n\n\n fn encoded() -> Vec<u8> {\n\n vec![\n\n 5, 57, 146, 29, 31, 0, 11, 66, 108, 97, 99, 107, 32, 66, 101, 116, 116, 121, 38, 0, 7,\n\n 77, 111, 103, 119, 97, 195, 175, 0, 3, 67, 97, 116,\n\n ]\n\n }\n\n\n\n fn decoded() -> PubRel {\n\n PubRel {\n\n packet_identifier: 1337,\n\n reason_code: ReasonCode::PacketIdentifierNotFound,\n\n reason_string: Some(\"Black Betty\".into()),\n\n user_properties: vec![(\"Mogwaï\".into(), \"Cat\".into())],\n\n }\n\n }\n\n\n", "file_path": "src/control/pubrel.rs", "rank": 86, "score": 33058.47468298715 }, { "content": " reason_string: None,\n\n user_properties: Default::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl PubRec {\n\n pub(crate) async fn write<W: AsyncWrite + Unpin>(self, writer: &mut W) -> SageResult<usize> {\n\n let mut n_bytes = codec::write_two_byte_integer(self.packet_identifier, writer).await?;\n\n\n\n let mut properties = Vec::new();\n\n\n\n if let Some(v) = self.reason_string {\n\n n_bytes += Property::ReasonString(v).encode(&mut properties).await?;\n\n }\n\n for (k, v) in self.user_properties {\n\n n_bytes += Property::UserProperty(k, v).encode(&mut properties).await?;\n\n }\n\n\n\n if n_bytes == 2 && self.reason_code != ReasonCode::Success {\n", "file_path": "src/control/pubrec.rs", "rank": 87, "score": 33058.463581874195 }, { "content": " reason_string: None,\n\n user_properties: Default::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl PubAck {\n\n pub(crate) async fn write<W: AsyncWrite + Unpin>(self, writer: &mut W) -> SageResult<usize> {\n\n let mut n_bytes = codec::write_two_byte_integer(self.packet_identifier, writer).await?;\n\n\n\n let mut properties = Vec::new();\n\n\n\n if let Some(v) = self.reason_string {\n\n n_bytes += Property::ReasonString(v).encode(&mut properties).await?;\n\n }\n\n for (k, v) in self.user_properties {\n\n n_bytes += Property::UserProperty(k, v).encode(&mut properties).await?;\n\n }\n\n\n\n if n_bytes == 2 && self.reason_code != ReasonCode::Success {\n", "file_path": "src/control/puback.rs", "rank": 88, "score": 33058.463581874195 }, { "content": " reason_string: None,\n\n user_properties: Default::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl PubComp {\n\n pub(crate) async fn write<W: AsyncWrite + Unpin>(self, writer: &mut W) -> SageResult<usize> {\n\n let mut n_bytes = codec::write_two_byte_integer(self.packet_identifier, writer).await?;\n\n\n\n let mut properties = Vec::new();\n\n\n\n if let Some(v) = self.reason_string {\n\n n_bytes += Property::ReasonString(v).encode(&mut properties).await?;\n\n }\n\n for (k, v) in self.user_properties {\n\n n_bytes += Property::UserProperty(k, v).encode(&mut properties).await?;\n\n }\n\n\n\n if n_bytes == 2 && self.reason_code != ReasonCode::Success {\n", "file_path": "src/control/pubcomp.rs", "rank": 89, "score": 33058.463581874195 }, { "content": " reason_string: None,\n\n user_properties: Default::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl PubRel {\n\n pub(crate) async fn write<W: AsyncWrite + Unpin>(self, writer: &mut W) -> SageResult<usize> {\n\n let mut n_bytes = codec::write_two_byte_integer(self.packet_identifier, writer).await?;\n\n\n\n let mut properties = Vec::new();\n\n\n\n if let Some(v) = self.reason_string {\n\n n_bytes += Property::ReasonString(v).encode(&mut properties).await?;\n\n }\n\n for (k, v) in self.user_properties {\n\n n_bytes += Property::UserProperty(k, v).encode(&mut properties).await?;\n\n }\n\n\n\n if n_bytes == 2 && self.reason_code != ReasonCode::Success {\n", "file_path": "src/control/pubrel.rs", "rank": 90, "score": 33058.463581874195 }, { "content": " Ok(2)\n\n } else {\n\n n_bytes += codec::write_reason_code(self.reason_code, writer).await?;\n\n n_bytes += codec::write_variable_byte_integer(properties.len() as u32, writer).await?;\n\n writer.write_all(&properties).await?;\n\n Ok(n_bytes)\n\n }\n\n }\n\n\n\n pub(crate) async fn read<R: AsyncRead + Unpin>(\n\n reader: &mut R,\n\n shortened: bool,\n\n ) -> SageResult<Self> {\n\n let packet_identifier = codec::read_two_byte_integer(reader).await?;\n\n\n\n let mut pubrel = PubRel {\n\n packet_identifier,\n\n ..Default::default()\n\n };\n\n\n", "file_path": "src/control/pubrel.rs", "rank": 91, "score": 33058.29240383938 }, { "content": " Ok(2)\n\n } else {\n\n n_bytes += codec::write_reason_code(self.reason_code, writer).await?;\n\n n_bytes += codec::write_variable_byte_integer(properties.len() as u32, writer).await?;\n\n writer.write_all(&properties).await?;\n\n Ok(n_bytes)\n\n }\n\n }\n\n\n\n pub(crate) async fn read<R: AsyncRead + Unpin>(\n\n reader: &mut R,\n\n shortened: bool,\n\n ) -> SageResult<Self> {\n\n let packet_identifier = codec::read_two_byte_integer(reader).await?;\n\n\n\n let mut pubcomp = PubComp {\n\n packet_identifier,\n\n ..Default::default()\n\n };\n\n\n", "file_path": "src/control/pubcomp.rs", "rank": 92, "score": 33058.29240383938 }, { "content": " Ok(2)\n\n } else {\n\n n_bytes += codec::write_reason_code(self.reason_code, writer).await?;\n\n n_bytes += codec::write_variable_byte_integer(properties.len() as u32, writer).await?;\n\n writer.write_all(&properties).await?;\n\n Ok(n_bytes)\n\n }\n\n }\n\n\n\n pub(crate) async fn read<R: AsyncRead + Unpin>(\n\n reader: &mut R,\n\n shortened: bool,\n\n ) -> SageResult<Self> {\n\n let packet_identifier = codec::read_two_byte_integer(reader).await?;\n\n\n\n let mut pubrec = PubRec {\n\n packet_identifier,\n\n ..Default::default()\n\n };\n\n\n", "file_path": "src/control/pubrec.rs", "rank": 93, "score": 33058.29240383938 }, { "content": " Ok(2)\n\n } else {\n\n n_bytes += codec::write_reason_code(self.reason_code, writer).await?;\n\n n_bytes += codec::write_variable_byte_integer(properties.len() as u32, writer).await?;\n\n writer.write_all(&properties).await?;\n\n Ok(n_bytes)\n\n }\n\n }\n\n\n\n pub(crate) async fn read<R: AsyncRead + Unpin>(\n\n reader: &mut R,\n\n shortened: bool,\n\n ) -> SageResult<Self> {\n\n let packet_identifier = codec::read_two_byte_integer(reader).await?;\n\n\n\n let mut puback = PubAck {\n\n packet_identifier,\n\n ..Default::default()\n\n };\n\n\n", "file_path": "src/control/puback.rs", "rank": 94, "score": 33058.29071064141 }, { "content": "\n\n let mut reason_codes = Vec::new();\n\n\n\n while reader.limit() > 0 {\n\n reason_codes.push(codec::read_byte(&mut reader).await?.try_into()?);\n\n }\n\n\n\n Ok(UnSubAck {\n\n packet_identifier,\n\n user_properties,\n\n reason_string,\n\n reason_codes,\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod unit {\n\n use super::*;\n\n use std::io::Cursor;\n", "file_path": "src/control/unsuback.rs", "rank": 95, "score": 33057.81528192148 }, { "content": "mod unit {\n\n\n\n use super::*;\n\n use std::io::Cursor;\n\n\n\n fn encoded() -> Vec<u8> {\n\n vec![\n\n 5, 57, 151, 29, 31, 0, 11, 66, 108, 97, 99, 107, 32, 66, 101, 116, 116, 121, 38, 0, 7,\n\n 77, 111, 103, 119, 97, 195, 175, 0, 3, 67, 97, 116,\n\n ]\n\n }\n\n\n\n fn decoded() -> PubAck {\n\n PubAck {\n\n packet_identifier: 1337,\n\n reason_code: ReasonCode::QuotaExceeded,\n\n reason_string: Some(\"Black Betty\".into()),\n\n user_properties: vec![(\"Mogwaï\".into(), \"Cat\".into())],\n\n }\n\n }\n", "file_path": "src/control/puback.rs", "rank": 96, "score": 33057.814388112005 }, { "content": " let mut n_bytes = codec::write_two_byte_integer(self.packet_identifier, writer).await?;\n\n\n\n let mut properties = Vec::new();\n\n\n\n if let Some(reason_string) = self.reason_string {\n\n n_bytes += Property::ReasonString(reason_string)\n\n .encode(&mut properties)\n\n .await?;\n\n }\n\n for (k, v) in self.user_properties {\n\n n_bytes += Property::UserProperty(k, v).encode(&mut properties).await?;\n\n }\n\n\n\n n_bytes += codec::write_variable_byte_integer(properties.len() as u32, writer).await?;\n\n writer.write_all(&properties).await?;\n\n\n\n for reason_code in self.reason_codes {\n\n n_bytes += codec::write_reason_code(reason_code, writer).await?;\n\n }\n\n\n", "file_path": "src/control/unsuback.rs", "rank": 97, "score": 33055.56994393603 }, { "content": "\n\n fn encoded() -> Vec<u8> {\n\n vec![\n\n 5, 57, 36, 31, 0, 18, 71, 105, 111, 114, 103, 105, 111, 32, 98, 121, 32, 77, 111, 114,\n\n 111, 100, 101, 114, 38, 0, 7, 77, 111, 103, 119, 97, 195, 175, 0, 3, 67, 97, 116, 145,\n\n 143,\n\n ]\n\n }\n\n\n\n fn decoded() -> UnSubAck {\n\n UnSubAck {\n\n packet_identifier: 1337,\n\n reason_string: Some(\"Giorgio by Moroder\".into()),\n\n user_properties: vec![(\"Mogwaï\".into(), \"Cat\".into())],\n\n reason_codes: vec![\n\n ReasonCode::PacketIdentifierInUse,\n\n ReasonCode::TopicFilterInvalid,\n\n ],\n\n }\n\n }\n", "file_path": "src/control/unsuback.rs", "rank": 98, "score": 33054.60388055022 }, { "content": "\n\n /// A list of reason codes ackowledging the unsubscribtion.\n\n /// Each `ReasonCode` at a given index correspond to a unsubscribe request\n\n /// from the `Unsubscribe` packet at the same index.\n\n pub reason_codes: Vec<ReasonCode>,\n\n}\n\n\n\nimpl Default for UnSubAck {\n\n fn default() -> Self {\n\n UnSubAck {\n\n packet_identifier: 0,\n\n reason_string: None,\n\n user_properties: Default::default(),\n\n reason_codes: Default::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl UnSubAck {\n\n pub(crate) async fn write<W: AsyncWrite + Unpin>(self, writer: &mut W) -> SageResult<usize> {\n", "file_path": "src/control/unsuback.rs", "rank": 99, "score": 33054.46473768194 } ]
Rust
exchanges/binance/src/exchange.rs
rcjmurillo/crypto-portfolio
3032bb100159875b5730da440b008d1e46df7539
use chrono::{DateTime, Utc}; use std::collections::HashSet; use anyhow::Result; use async_trait::async_trait; use futures::future::join_all; use crate::{ api_model::{Deposit, FiatOrder, MarginLoan, MarginRepay, Trade, Withdraw}, client::{BinanceFetcher, EndpointsGlobal, EndpointsUs, RegionGlobal, RegionUs}, }; use exchange::{self, AssetPair, AssetsInfo, ExchangeDataFetcher, ExchangeClient, Candle}; impl From<FiatOrder> for exchange::Deposit { fn from(d: FiatOrder) -> Self { Self { source_id: d.id, source: "binance".to_string(), asset: d.fiat_currency, amount: d.amount, fee: Some(d.platform_fee), time: d.create_time, is_fiat: true, } } } impl From<FiatOrder> for exchange::Withdraw { fn from(d: FiatOrder) -> Self { Self { source_id: d.id, source: "binance".to_string(), asset: d.fiat_currency, amount: d.amount, fee: d.transaction_fee + d.platform_fee, time: d.create_time, } } } impl From<Deposit> for exchange::Deposit { fn from(d: Deposit) -> Self { Self { source_id: d.id, source: "binance".to_string(), asset: d.coin, amount: d.amount, fee: None, time: d.insert_time, is_fiat: false, } } } impl From<Withdraw> for exchange::Withdraw { fn from(w: Withdraw) -> Self { Self { source_id: w.id, source: "binance".to_string(), asset: w.coin, amount: w.amount, time: w.apply_time, fee: w.transaction_fee, } } } impl From<Trade> for exchange::Trade { fn from(t: Trade) -> Self { Self { source_id: t.id.to_string(), source: "binance".to_string(), symbol: t.symbol, base_asset: t.base_asset, quote_asset: t.quote_asset, price: t.price, amount: t.qty, fee: t.commission, fee_asset: t.commission_asset, time: t.time, side: if t.is_buyer { exchange::TradeSide::Buy } else { exchange::TradeSide::Sell }, } } } impl From<MarginLoan> for exchange::Loan { fn from(m: MarginLoan) -> Self { Self { source_id: m.tx_id.to_string(), source: "binance".to_string(), asset: m.asset, amount: m.principal, time: m.timestamp, status: match m.status.as_str() { "CONFIRMED" => exchange::Status::Success, _ => exchange::Status::Failure, }, } } } impl From<MarginRepay> for exchange::Repay { fn from(r: MarginRepay) -> Self { Self { source_id: r.tx_id.to_string(), source: "binance".to_string(), asset: r.asset, amount: r.principal, interest: r.interest, time: r.timestamp, status: match r.status.as_str() { "CONFIRMED" => exchange::Status::Success, _ => exchange::Status::Failure, }, } } } #[async_trait] impl ExchangeDataFetcher for BinanceFetcher<RegionGlobal> { async fn trades(&self) -> Result<Vec<exchange::Trade>> { let all_symbols: Vec<String> = self .fetch_exchange_symbols(&EndpointsGlobal::ExchangeInfo.to_string()) .await? .into_iter() .map(|x| x.symbol) .collect(); let mut trades: Vec<Result<Vec<Trade>>> = Vec::new(); let endpoint = EndpointsGlobal::Trades.to_string(); let mut handles = Vec::new(); for symbol in self.symbols().iter() { if all_symbols.contains(&symbol.join("")) { handles.push(self.fetch_trades(&endpoint, symbol)); if handles.len() >= 10 { trades.extend(join_all(handles).await); handles = Vec::new(); } } } if handles.len() > 0 { trades.extend(join_all(handles).await); } flatten_results(trades) } async fn margin_trades(&self) -> Result<Vec<exchange::Trade>> { let all_symbols: Vec<String> = self .fetch_exchange_symbols(&EndpointsGlobal::ExchangeInfo.to_string()) .await? .into_iter() .map(|x| x.symbol) .collect(); let mut handles = Vec::new(); for symbol in self.symbols().iter() { if all_symbols.contains(&symbol.join("")) { handles.push(self.fetch_margin_trades(symbol)); } } flatten_results(join_all(handles).await) } async fn loans(&self) -> Result<Vec<exchange::Loan>> { let mut handles = Vec::new(); let exchange_symbols = self .fetch_exchange_symbols(&EndpointsGlobal::ExchangeInfo.to_string()) .await?; let all_symbols: Vec<String> = exchange_symbols.iter().map(|x| x.symbol.clone()).collect(); let mut processed_assets = HashSet::new(); for symbol in self.symbols().iter() { if all_symbols.contains(&symbol.join("")) { if !processed_assets.contains(&symbol.base) { handles.push(self.fetch_margin_loans(&symbol.base, None)); processed_assets.insert(&symbol.base); } handles.push(self.fetch_margin_loans(&symbol.base, Some(symbol))); } } flatten_results(join_all(handles).await) } async fn repays(&self) -> Result<Vec<exchange::Repay>> { let mut handles = Vec::new(); let exchange_symbols = self .fetch_exchange_symbols(&EndpointsGlobal::ExchangeInfo.to_string()) .await?; let all_symbols: Vec<String> = exchange_symbols.iter().map(|x| x.symbol.clone()).collect(); let mut processed_assets = HashSet::new(); for symbol in self.symbols().iter() { if all_symbols.contains(&symbol.join("")) { if !processed_assets.contains(&symbol.base) { handles.push(self.fetch_margin_repays(&symbol.base, None)); processed_assets.insert(symbol.base.clone()); } handles.push(self.fetch_margin_repays(&symbol.base, Some(symbol))); } } flatten_results(join_all(handles).await) } async fn deposits(&self) -> Result<Vec<exchange::Deposit>> { let mut deposits = Vec::new(); deposits.extend( self.fetch_fiat_deposits() .await? .into_iter() .map(|x| x.into()) .collect::<Vec<exchange::Deposit>>(), ); deposits.extend( self.fetch_deposits() .await? .into_iter() .map(|x| x.into()) .collect::<Vec<exchange::Deposit>>(), ); Ok(deposits) } async fn withdraws(&self) -> Result<Vec<exchange::Withdraw>> { let mut withdraws = Vec::new(); withdraws.extend( self.fetch_fiat_withdraws() .await? .into_iter() .map(|x| x.into()) .collect::<Vec<exchange::Withdraw>>(), ); withdraws.extend( self.fetch_withdraws() .await? .into_iter() .map(|x| x.into()) .collect::<Vec<exchange::Withdraw>>(), ); Ok(withdraws) } } #[async_trait] impl ExchangeClient for BinanceFetcher<RegionGlobal> { async fn prices( &self, asset_pair: &AssetPair, start: DateTime<Utc>, end: DateTime<Utc>, ) -> Result<Vec<Candle>> { self.fetch_prices_in_range( &EndpointsGlobal::Klines.to_string(), &asset_pair.join(""), start.timestamp_millis().try_into()?, end.timestamp_millis().try_into()?, ).await } } #[async_trait] impl ExchangeDataFetcher for BinanceFetcher<RegionUs> { async fn trades(&self) -> Result<Vec<exchange::Trade>> { let all_symbols: Vec<String> = self .fetch_exchange_symbols(&EndpointsUs::ExchangeInfo.to_string()) .await? .into_iter() .map(|x| x.symbol) .collect(); let endpoint = EndpointsUs::Trades.to_string(); let mut handles = Vec::new(); for symbol in self.symbols().iter() { if all_symbols.contains(&symbol.join("")) { handles.push(self.fetch_trades(&endpoint, &symbol)); } } flatten_results(join_all(handles).await) } async fn margin_trades(&self) -> Result<Vec<exchange::Trade>> { Ok(Vec::new()) } async fn loans(&self) -> Result<Vec<exchange::Loan>> { Ok(Vec::new()) } async fn repays(&self) -> Result<Vec<exchange::Repay>> { Ok(Vec::new()) } async fn deposits(&self) -> Result<Vec<exchange::Deposit>> { let mut deposits = Vec::new(); deposits.extend( self.fetch_fiat_deposits() .await? .into_iter() .map(|x| x.into()) .collect::<Vec<exchange::Deposit>>(), ); deposits.extend( self.fetch_deposits() .await? .into_iter() .map(|x| x.into()) .collect::<Vec<exchange::Deposit>>(), ); Ok(deposits) } async fn withdraws(&self) -> Result<Vec<exchange::Withdraw>> { let mut withdraws = Vec::new(); withdraws.extend( self.fetch_fiat_withdraws() .await? .into_iter() .map(|x| x.into()) .collect::<Vec<exchange::Withdraw>>(), ); withdraws.extend( self.fetch_withdraws() .await? .into_iter() .map(|x| x.into()) .collect::<Vec<exchange::Withdraw>>(), ); Ok(withdraws) } } #[async_trait] impl AssetsInfo for BinanceFetcher<RegionGlobal> { async fn price_at(&self, asset_pair: &AssetPair, time: &DateTime<Utc>) -> Result<f64> { self.fetch_price_at( &EndpointsGlobal::Prices.to_string(), &asset_pair.join(""), time, ) .await } } fn flatten_results<T, U>(results: Vec<Result<Vec<T>>>) -> Result<Vec<U>> where T: Into<U>, { Ok(results .into_iter() .collect::<Result<Vec<Vec<T>>>>()? .into_iter() .flatten() .map(|x| x.into()) .collect()) }
use chrono::{DateTime, Utc}; use std::collections::HashSet; use anyhow::Result; use async_trait::async_trait; use futures::future::join_all; use crate::{ api_model::{Deposit, FiatOrder, MarginLoan, MarginRepay, Trade, Withdraw}, client::{BinanceFetcher, EndpointsGlobal, EndpointsUs, RegionGlobal, RegionUs}, }; use exchange::{self, AssetPair, AssetsInfo, ExchangeDataFetcher, ExchangeClient, Candle}; impl From<FiatOrder> for exchange::Deposit { fn from(d: FiatOrder) -> Self { Self { source_id: d.id, source: "binance".to_string(), as
ance".to_string(), asset: r.asset, amount: r.principal, interest: r.interest, time: r.timestamp, status: match r.status.as_str() { "CONFIRMED" => exchange::Status::Success, _ => exchange::Status::Failure, }, } } } #[async_trait] impl ExchangeDataFetcher for BinanceFetcher<RegionGlobal> { async fn trades(&self) -> Result<Vec<exchange::Trade>> { let all_symbols: Vec<String> = self .fetch_exchange_symbols(&EndpointsGlobal::ExchangeInfo.to_string()) .await? .into_iter() .map(|x| x.symbol) .collect(); let mut trades: Vec<Result<Vec<Trade>>> = Vec::new(); let endpoint = EndpointsGlobal::Trades.to_string(); let mut handles = Vec::new(); for symbol in self.symbols().iter() { if all_symbols.contains(&symbol.join("")) { handles.push(self.fetch_trades(&endpoint, symbol)); if handles.len() >= 10 { trades.extend(join_all(handles).await); handles = Vec::new(); } } } if handles.len() > 0 { trades.extend(join_all(handles).await); } flatten_results(trades) } async fn margin_trades(&self) -> Result<Vec<exchange::Trade>> { let all_symbols: Vec<String> = self .fetch_exchange_symbols(&EndpointsGlobal::ExchangeInfo.to_string()) .await? .into_iter() .map(|x| x.symbol) .collect(); let mut handles = Vec::new(); for symbol in self.symbols().iter() { if all_symbols.contains(&symbol.join("")) { handles.push(self.fetch_margin_trades(symbol)); } } flatten_results(join_all(handles).await) } async fn loans(&self) -> Result<Vec<exchange::Loan>> { let mut handles = Vec::new(); let exchange_symbols = self .fetch_exchange_symbols(&EndpointsGlobal::ExchangeInfo.to_string()) .await?; let all_symbols: Vec<String> = exchange_symbols.iter().map(|x| x.symbol.clone()).collect(); let mut processed_assets = HashSet::new(); for symbol in self.symbols().iter() { if all_symbols.contains(&symbol.join("")) { if !processed_assets.contains(&symbol.base) { handles.push(self.fetch_margin_loans(&symbol.base, None)); processed_assets.insert(&symbol.base); } handles.push(self.fetch_margin_loans(&symbol.base, Some(symbol))); } } flatten_results(join_all(handles).await) } async fn repays(&self) -> Result<Vec<exchange::Repay>> { let mut handles = Vec::new(); let exchange_symbols = self .fetch_exchange_symbols(&EndpointsGlobal::ExchangeInfo.to_string()) .await?; let all_symbols: Vec<String> = exchange_symbols.iter().map(|x| x.symbol.clone()).collect(); let mut processed_assets = HashSet::new(); for symbol in self.symbols().iter() { if all_symbols.contains(&symbol.join("")) { if !processed_assets.contains(&symbol.base) { handles.push(self.fetch_margin_repays(&symbol.base, None)); processed_assets.insert(symbol.base.clone()); } handles.push(self.fetch_margin_repays(&symbol.base, Some(symbol))); } } flatten_results(join_all(handles).await) } async fn deposits(&self) -> Result<Vec<exchange::Deposit>> { let mut deposits = Vec::new(); deposits.extend( self.fetch_fiat_deposits() .await? .into_iter() .map(|x| x.into()) .collect::<Vec<exchange::Deposit>>(), ); deposits.extend( self.fetch_deposits() .await? .into_iter() .map(|x| x.into()) .collect::<Vec<exchange::Deposit>>(), ); Ok(deposits) } async fn withdraws(&self) -> Result<Vec<exchange::Withdraw>> { let mut withdraws = Vec::new(); withdraws.extend( self.fetch_fiat_withdraws() .await? .into_iter() .map(|x| x.into()) .collect::<Vec<exchange::Withdraw>>(), ); withdraws.extend( self.fetch_withdraws() .await? .into_iter() .map(|x| x.into()) .collect::<Vec<exchange::Withdraw>>(), ); Ok(withdraws) } } #[async_trait] impl ExchangeClient for BinanceFetcher<RegionGlobal> { async fn prices( &self, asset_pair: &AssetPair, start: DateTime<Utc>, end: DateTime<Utc>, ) -> Result<Vec<Candle>> { self.fetch_prices_in_range( &EndpointsGlobal::Klines.to_string(), &asset_pair.join(""), start.timestamp_millis().try_into()?, end.timestamp_millis().try_into()?, ).await } } #[async_trait] impl ExchangeDataFetcher for BinanceFetcher<RegionUs> { async fn trades(&self) -> Result<Vec<exchange::Trade>> { let all_symbols: Vec<String> = self .fetch_exchange_symbols(&EndpointsUs::ExchangeInfo.to_string()) .await? .into_iter() .map(|x| x.symbol) .collect(); let endpoint = EndpointsUs::Trades.to_string(); let mut handles = Vec::new(); for symbol in self.symbols().iter() { if all_symbols.contains(&symbol.join("")) { handles.push(self.fetch_trades(&endpoint, &symbol)); } } flatten_results(join_all(handles).await) } async fn margin_trades(&self) -> Result<Vec<exchange::Trade>> { Ok(Vec::new()) } async fn loans(&self) -> Result<Vec<exchange::Loan>> { Ok(Vec::new()) } async fn repays(&self) -> Result<Vec<exchange::Repay>> { Ok(Vec::new()) } async fn deposits(&self) -> Result<Vec<exchange::Deposit>> { let mut deposits = Vec::new(); deposits.extend( self.fetch_fiat_deposits() .await? .into_iter() .map(|x| x.into()) .collect::<Vec<exchange::Deposit>>(), ); deposits.extend( self.fetch_deposits() .await? .into_iter() .map(|x| x.into()) .collect::<Vec<exchange::Deposit>>(), ); Ok(deposits) } async fn withdraws(&self) -> Result<Vec<exchange::Withdraw>> { let mut withdraws = Vec::new(); withdraws.extend( self.fetch_fiat_withdraws() .await? .into_iter() .map(|x| x.into()) .collect::<Vec<exchange::Withdraw>>(), ); withdraws.extend( self.fetch_withdraws() .await? .into_iter() .map(|x| x.into()) .collect::<Vec<exchange::Withdraw>>(), ); Ok(withdraws) } } #[async_trait] impl AssetsInfo for BinanceFetcher<RegionGlobal> { async fn price_at(&self, asset_pair: &AssetPair, time: &DateTime<Utc>) -> Result<f64> { self.fetch_price_at( &EndpointsGlobal::Prices.to_string(), &asset_pair.join(""), time, ) .await } } fn flatten_results<T, U>(results: Vec<Result<Vec<T>>>) -> Result<Vec<U>> where T: Into<U>, { Ok(results .into_iter() .collect::<Result<Vec<Vec<T>>>>()? .into_iter() .flatten() .map(|x| x.into()) .collect()) }
set: d.fiat_currency, amount: d.amount, fee: Some(d.platform_fee), time: d.create_time, is_fiat: true, } } } impl From<FiatOrder> for exchange::Withdraw { fn from(d: FiatOrder) -> Self { Self { source_id: d.id, source: "binance".to_string(), asset: d.fiat_currency, amount: d.amount, fee: d.transaction_fee + d.platform_fee, time: d.create_time, } } } impl From<Deposit> for exchange::Deposit { fn from(d: Deposit) -> Self { Self { source_id: d.id, source: "binance".to_string(), asset: d.coin, amount: d.amount, fee: None, time: d.insert_time, is_fiat: false, } } } impl From<Withdraw> for exchange::Withdraw { fn from(w: Withdraw) -> Self { Self { source_id: w.id, source: "binance".to_string(), asset: w.coin, amount: w.amount, time: w.apply_time, fee: w.transaction_fee, } } } impl From<Trade> for exchange::Trade { fn from(t: Trade) -> Self { Self { source_id: t.id.to_string(), source: "binance".to_string(), symbol: t.symbol, base_asset: t.base_asset, quote_asset: t.quote_asset, price: t.price, amount: t.qty, fee: t.commission, fee_asset: t.commission_asset, time: t.time, side: if t.is_buyer { exchange::TradeSide::Buy } else { exchange::TradeSide::Sell }, } } } impl From<MarginLoan> for exchange::Loan { fn from(m: MarginLoan) -> Self { Self { source_id: m.tx_id.to_string(), source: "binance".to_string(), asset: m.asset, amount: m.principal, time: m.timestamp, status: match m.status.as_str() { "CONFIRMED" => exchange::Status::Success, _ => exchange::Status::Failure, }, } } } impl From<MarginRepay> for exchange::Repay { fn from(r: MarginRepay) -> Self { Self { source_id: r.tx_id.to_string(), source: "bin
random
[ { "content": "pub fn get_asset_price_bucket(bucket: u16, asset: &AssetPair) -> Result<Option<Vec<Candle>>> {\n\n let conn = Connection::open(DB_NAME)?;\n\n\n\n let mut stmt =\n\n conn.prepare(\"SELECT prices FROM asset_price_buckets WHERE bucket = ?1 AND asset = ?2\")?;\n\n let mut iter = stmt.query_map(params![bucket, asset.join(\"-\")], |row| {\n\n let prices: String = row.get(0)?;\n\n Ok(prices)\n\n })?;\n\n\n\n if let Some(s) = iter.next() {\n\n Ok(Some(serde_json::from_str(&s?).map_err(|e| {\n\n anyhow!(e).context(\"couldn't fetch asset prices bucket from db\")\n\n })?))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n\npub struct Db;\n", "file_path": "exchanges/exchange/src/operations/db.rs", "rank": 0, "score": 52641.349472230264 }, { "content": "fn mk_fetchers(\n\n config: &cli::Config,\n\n file_fetcher: Option<FileDataFetcher>,\n\n) -> Vec<(&'static str, Box<dyn ExchangeDataFetcher + Send + Sync>)> {\n\n let mut fetchers: Vec<(&'static str, Box<dyn ExchangeDataFetcher + Send + Sync>)> = Vec::new();\n\n\n\n // coinbase exchange disabled because it doesn't provide the full set of\n\n // operations and fees when converting coins.\n\n\n\n // let coinbase_config: Option<CoinbaseConfig> = config\n\n // .coinbase\n\n // .as_ref()\n\n // .and_then(|c| Some(c.try_into().unwrap()));\n\n // if let Some(config) = coinbase_config {\n\n // let coinbase_fetcher = CoinbaseFetcher::<Std>::new(config.clone());\n\n // fetchers.push((\n\n // \"Coinbase\",\n\n // Box::new(coinbase_fetcher) as Box<dyn ExchangeDataFetcher + Send + Sync>,\n\n // ));\n\n // }\n", "file_path": "portfolio/src/main.rs", "rank": 1, "score": 48114.76926557713 }, { "content": "fn default_zero() -> f64 {\n\n 0.0\n\n}\n\n\n\n#[derive(Debug, Deserialize, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct FiatOrder {\n\n #[serde(alias=\"orderId\", alias=\"orderNo\")]\n\n pub id: String,\n\n #[serde(default = \"default_currency_usd\")]\n\n pub fiat_currency: String,\n\n #[serde(with = \"float_from_str\")]\n\n pub amount: f64,\n\n #[serde(alias = \"totalFee\", with = \"float_from_str\")]\n\n pub transaction_fee: f64,\n\n #[serde(with = \"float_from_str\", default = \"default_zero\")]\n\n pub platform_fee: f64,\n\n #[serde(alias = \"orderStatus\")]\n\n pub status: String,\n\n #[serde(with = \"datetime_from_str\")]\n", "file_path": "exchanges/binance/src/api_model.rs", "rank": 2, "score": 42071.253411866004 }, { "content": "fn default_currency_usd() -> String {\n\n \"USD\".into()\n\n}\n\n\n", "file_path": "exchanges/binance/src/api_model.rs", "rank": 3, "score": 41065.70105583611 }, { "content": "fn default_buy() -> TransactionSide {\n\n TransactionSide::Buy\n\n}\n\n\n\n#[derive(Debug, Deserialize, Clone)]\n\npub struct Pagination {\n\n pub next_uri: Option<String>,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Clone)]\n\npub struct AccountCurrency {\n\n pub code: String,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Clone)]\n\npub struct Account {\n\n pub id: String,\n\n pub currency: AccountCurrency,\n\n}\n\n\n", "file_path": "exchanges/coinbase/src/api_model.rs", "rank": 4, "score": 41065.70105583611 }, { "content": "pub fn insert_asset_price_bucket(\n\n bucket: u16,\n\n asset_pair: &AssetPair,\n\n prices: Vec<Candle>,\n\n) -> Result<()> {\n\n let conn = Connection::open(DB_NAME)?;\n\n\n\n let mut stmt =\n\n conn.prepare(\"INSERT INTO asset_price_buckets (bucket, asset, prices) VALUES (?, ?, ?)\")?;\n\n\n\n match stmt.execute(params![\n\n bucket,\n\n asset_pair.join(\"-\"),\n\n serde_json::to_string(&prices).context(\"error while converting prices into JSON\")?\n\n ]) {\n\n Ok(_) => (),\n\n Err(err) => match err {\n\n Error::SqliteFailure(FfiError { code, .. }, ..) => {\n\n match code {\n\n ErrorCode::ConstraintViolation => (), // already exists, skip it\n\n _ => return Err(anyhow::Error::new(err)),\n\n }\n\n }\n\n err => return Err(anyhow::Error::new(err)),\n\n },\n\n };\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "exchanges/exchange/src/operations/db.rs", "rank": 5, "score": 40133.09828885105 }, { "content": "pub fn create_tables() -> Result<()> {\n\n let conn = Connection::open(DB_NAME)?;\n\n\n\n conn.execute(\n\n \"CREATE TABLE IF NOT EXISTS operations (\n\n op_id INTEGER, \n\n source_id VARCHAR(100), \n\n source VARCHAR(25),\n\n type VARCHAR(20),\n\n for_asset VARCHAR(15),\n\n for_amount FLOAT,\n\n asset VARCHAR(15),\n\n amount FLOAT,\n\n timestamp TIMESTAMP NULL,\n\n PRIMARY KEY (op_id, source_id, source, type, asset)\n\n )\",\n\n [],\n\n )?;\n\n\n\n conn.execute(\n", "file_path": "exchanges/exchange/src/operations/db.rs", "rank": 6, "score": 39184.23897822451 }, { "content": "pub fn get_operations() -> Result<Vec<Operation>> {\n\n let conn = Connection::open(DB_NAME)?;\n\n\n\n let mut stmt = conn.prepare(\n\n \"SELECT op_id, source_id, source,type, for_asset, for_amount, asset, amount, timestamp \n\n FROM operations\n\n ORDER BY timestamp ASC\",\n\n )?;\n\n let op_iter = stmt.query_map([], |row| {\n\n Ok(Operation {\n\n op_id: row.get(0)?,\n\n source_id: row.get(1)?,\n\n source: row.get(2)?,\n\n op_type: row.get(3)?,\n\n for_asset: row.get(4)?,\n\n for_amount: row.get(5)?,\n\n asset: row.get(6)?,\n\n amount: row.get(7)?,\n\n timestamp: row.get(8)?,\n\n })\n\n })?;\n\n\n\n op_iter\n\n .map(|o| o.map_err(|e| anyhow!(\"couldn't fetch operation from db\").context(e)))\n\n .collect()\n\n}\n\n\n", "file_path": "exchanges/exchange/src/operations/db.rs", "rank": 7, "score": 34732.347593742124 }, { "content": "pub fn insert_operations(ops: Vec<Operation>) -> Result<(usize, usize)> {\n\n let conn = Connection::open(DB_NAME)?;\n\n\n\n let mut stmt = conn.prepare_cached(\n\n \"INSERT INTO \n\n operations (op_id, source_id, source, type, for_asset, for_amount, asset, amount, timestamp) \n\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)\",\n\n )?;\n\n\n\n let mut inserted = 0;\n\n let mut skipped = 0;\n\n for op in ops {\n\n inserted += match stmt.execute(params![\n\n op.op_id,\n\n op.source_id,\n\n op.source,\n\n op.op_type,\n\n op.for_asset,\n\n op.for_amount,\n\n op.asset,\n", "file_path": "exchanges/exchange/src/operations/db.rs", "rank": 8, "score": 29993.359180132808 }, { "content": "fn read_file(path: &OsStr) -> std::result::Result<File, OsString> {\n\n match File::open(path) {\n\n Ok(file) => Ok(file),\n\n Err(err) => Err(err.to_string().into()),\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Deserialize)]\n\npub struct ExchangeConfig {\n\n // all the asset pairs to work with, only asset pairs included here will\n\n // appear in the report.\n\n pub symbols: Option<Vec<String>>,\n\n pub assets: Option<Vec<Asset>>,\n\n // How far back to look for transactions\n\n start_date: toml::Value,\n\n}\n\n\n\nimpl ExchangeConfig {\n\n pub fn start_date(&self) -> Result<NaiveDate> {\n\n if let Some(start_date) = self.start_date.as_datetime() {\n", "file_path": "portfolio/src/cli.rs", "rank": 9, "score": 29993.359180132808 }, { "content": "fn read_config_file(path: &OsStr) -> std::result::Result<Config, OsString> {\n\n match Config::from_file_path(path.into()) {\n\n Ok(config) => Ok(config),\n\n Err(err) => Err(err.to_string().into()),\n\n }\n\n}\n\n\n", "file_path": "portfolio/src/cli.rs", "rank": 10, "score": 29328.96662900284 }, { "content": "#[async_trait]\n\npub trait AssetsInfo {\n\n async fn price_at(&self, asset_pair: &AssetPair, time: &DateTime<Utc>) -> Result<f64>;\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct Candle {\n\n pub open_time: u64,\n\n pub close_time: u64,\n\n pub open_price: f64,\n\n pub close_price: f64,\n\n}\n\n\n", "file_path": "exchanges/exchange/src/exchange.rs", "rank": 12, "score": 16742.3180839545 }, { "content": "use crate::{\n\n operations::Operation,\n\n {Deposit, Loan, Repay, Status, Trade, TradeSide, Withdraw},\n\n};\n\n\n\n// pub struct Vec<Operation>(Vec<Operation>);\n\n\n\n// impl Vec<Operation> {\n\n// pub fn new() -> Self {\n\n// Self(vec![])\n\n// }\n\n// }\n\n\n\n// impl From<Vec<Operation>> for Vec<Operation> {\n\n// fn from(ops: Vec<Operation>) -> Self {\n\n// ops.0\n\n// }\n\n// }\n\n\n\n// impl Deref for Vec<Operation> {\n", "file_path": "exchanges/exchange/src/operations/exchange_ops.rs", "rank": 14, "score": 18.633267652713144 }, { "content": "use std::convert::TryFrom;\n\n\n\nuse anyhow::{Error, Result};\n\nuse async_trait::async_trait;\n\nuse chrono::{DateTime, Utc};\n\nuse futures::future::join_all;\n\n\n\nuse crate::{\n\n api_model::{Amount, Fill, Transaction, TransactionSide},\n\n client::{CoinbaseFetcher, Config, Pro, Std},\n\n};\n\n\n\nuse exchange::{Deposit, ExchangeDataFetcher, Loan, Repay, Trade, TradeSide, Withdraw};\n\n\n\nimpl Into<Trade> for Fill {\n\n fn into(self) -> Trade {\n\n let assets: Vec<_> = self.product_id.split(\"-\").collect();\n\n let base_asset = assets[0].to_string();\n\n let quote_asset = assets[1].to_string();\n\n Trade {\n", "file_path": "exchanges/coinbase/src/exchange.rs", "rank": 15, "score": 15.847650884915609 }, { "content": "use std::collections::{HashMap, HashSet};\n\nuse std::convert::{TryFrom, TryInto};\n\n\n\nuse anyhow::{anyhow, Error, Result};\n\nuse async_trait::async_trait;\n\nuse chrono::{DateTime, TimeZone, Utc};\n\nuse serde::Deserialize;\n\nuse tokio::sync::{mpsc, RwLock};\n\nuse tracing::{debug, error, span, Level};\n\n\n\nuse crate::operations::{\n\n db::{self, get_asset_price_bucket, insert_asset_price_bucket},\n\n storage::Storage,\n\n};\n\n\n\nuse crate::{\n\n AssetPair, AssetsInfo, Candle, Deposit, ExchangeClient, ExchangeDataFetcher, Loan, Repay,\n\n Trade, TradeSide, Withdraw,\n\n};\n\n\n", "file_path": "exchanges/exchange/src/operations/ops.rs", "rank": 17, "score": 15.229407890090195 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use quickcheck::{quickcheck, Arbitrary, Gen, TestResult};\n\n\n\n impl Arbitrary for Status {\n\n fn arbitrary(g: &mut Gen) -> Self {\n\n g.choose(&[Self::Success, Self::Failure]).cloned().unwrap()\n\n }\n\n }\n\n\n\n impl Arbitrary for Trade {\n\n fn arbitrary(g: &mut Gen) -> Self {\n\n let assets = [\"ADA\", \"SOL\", \"MATIC\"];\n\n let quote_assets = [\"BTC\", \"ETH\", \"AVAX\"];\n\n let base_asset = g.choose(&assets).take().unwrap();\n\n let quote_asset = g.choose(&quote_assets).take().unwrap();\n\n let sides = [TradeSide::Buy, TradeSide::Sell];\n\n Self {\n\n source_id: \"1\".to_string(),\n", "file_path": "exchanges/exchange/src/exchange.rs", "rank": 18, "score": 14.86472195765656 }, { "content": "// type Target = Vec<Operation>;\n\n\n\n// fn deref(&self) -> &Self::Target {\n\n// &self.0\n\n// }\n\n// }\n\n\n\nimpl From<Trade> for Vec<Operation> {\n\n fn from(trade: Trade) -> Self {\n\n let mut ops = match trade.side {\n\n TradeSide::Buy => vec![\n\n Operation::BalanceIncrease {\n\n id: 1,\n\n source_id: trade.source_id.clone(),\n\n source: trade.source.clone(),\n\n asset: trade.base_asset.clone(),\n\n amount: trade.amount,\n\n },\n\n Operation::Cost {\n\n id: 2,\n", "file_path": "exchanges/exchange/src/operations/exchange_ops.rs", "rank": 19, "score": 14.248978062706795 }, { "content": "\n\n rx\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::Status;\n\n use quickcheck::{quickcheck, Arbitrary, Gen, TestResult};\n\n use tokio::sync::Mutex;\n\n use Operation::*;\n\n\n\n impl Arbitrary for OperationStatus {\n\n fn arbitrary(g: &mut Gen) -> Self {\n\n g.choose(&[Self::Success, Self::Failed]).cloned().unwrap()\n\n }\n\n }\n\n\n\n impl Arbitrary for Operation {\n\n fn arbitrary(g: &mut Gen) -> Self {\n", "file_path": "exchanges/exchange/src/operations/ops.rs", "rank": 20, "score": 14.001724057295148 }, { "content": " source_id: withdraw.source_id,\n\n source: withdraw.source,\n\n for_asset: withdraw.asset.clone(),\n\n for_amount: 0.0,\n\n asset: withdraw.asset,\n\n amount: withdraw.fee,\n\n time: withdraw.time,\n\n },\n\n ]);\n\n }\n\n ops\n\n }\n\n}\n\n\n\nimpl From<Loan> for Vec<Operation> {\n\n fn from(loan: Loan) -> Self {\n\n match loan.status {\n\n Status::Success => vec![Operation::BalanceIncrease {\n\n id: 1,\n\n source_id: loan.source_id,\n", "file_path": "exchanges/exchange/src/operations/exchange_ops.rs", "rank": 21, "score": 13.560413417227652 }, { "content": "impl From<Withdraw> for Vec<Operation> {\n\n fn from(withdraw: Withdraw) -> Self {\n\n let mut ops = vec![Operation::BalanceDecrease {\n\n id: 1,\n\n source_id: withdraw.source_id.clone(),\n\n source: withdraw.source.clone(),\n\n asset: withdraw.asset.clone(),\n\n amount: withdraw.amount,\n\n }];\n\n if withdraw.fee > 0.0 {\n\n ops.extend(vec![\n\n Operation::BalanceDecrease {\n\n id: 2,\n\n source_id: format!(\"{}-fee\", &withdraw.source_id),\n\n source: withdraw.source.clone(),\n\n asset: withdraw.asset.clone(),\n\n amount: withdraw.fee,\n\n },\n\n Operation::Cost {\n\n id: 3,\n", "file_path": "exchanges/exchange/src/operations/exchange_ops.rs", "rank": 22, "score": 13.282996242448155 }, { "content": "\n\npub enum EndpointsUs {\n\n Trades,\n\n Klines,\n\n Prices,\n\n ExchangeInfo,\n\n Deposits,\n\n Withdraws,\n\n FiatDeposits,\n\n FiatWithdraws,\n\n}\n\n\n\nimpl ToString for EndpointsUs {\n\n fn to_string(&self) -> String {\n\n match self {\n\n Self::Trades => \"/api/v3/myTrades\",\n\n Self::Klines => \"/api/v3/klines\",\n\n Self::Prices => \"/api/v3/ticker/price\",\n\n Self::ExchangeInfo => \"/api/v3/exchangeInfo\",\n\n Self::Deposits => \"/wapi/v3/depositHistory.html\",\n", "file_path": "exchanges/binance/src/client.rs", "rank": 23, "score": 13.192749893308541 }, { "content": " MarginRepays,\n\n}\n\n\n\nimpl ToString for EndpointsGlobal {\n\n fn to_string(&self) -> String {\n\n match self {\n\n Self::Trades => \"/api/v3/myTrades\",\n\n Self::Klines => \"/api/v3/klines\",\n\n Self::Prices => \"/api/v3/ticker/price\",\n\n Self::ExchangeInfo => \"/api/v3/exchangeInfo\",\n\n Self::Deposits => \"/sapi/v1/capital/deposit/hisrec\",\n\n Self::Withdraws => \"/sapi/v1/capital/withdraw/history\",\n\n Self::FiatOrders => \"/sapi/v1/fiat/orders\",\n\n Self::FiatDeposits => \"\",\n\n Self::FiatWithdraws => \"\",\n\n Self::MarginTrades => \"/sapi/v1/margin/myTrades\",\n\n Self::MarginLoans => \"/sapi/v1/margin/loan\",\n\n Self::MarginRepays => \"/sapi/v1/margin/repay\",\n\n }\n\n .to_string()\n", "file_path": "exchanges/binance/src/client.rs", "rank": 24, "score": 13.084667522298044 }, { "content": "use std::fmt;\n\n\n\nuse anyhow::{anyhow, Result};\n\nuse async_trait::async_trait;\n\nuse chrono::{DateTime, Utc};\n\n\n\nuse crate::operations::Operation;\n\nuse crate::{AssetPair, AssetsInfo};\n\n\n\n#[derive(Debug)]\n\npub struct Sale {\n\n pub asset: String,\n\n pub amount: f64,\n\n pub datetime: DateTime<Utc>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Purchase {\n\n source: String,\n\n amount: f64,\n", "file_path": "exchanges/exchange/src/operations/profit_loss.rs", "rank": 25, "score": 12.940791289623068 }, { "content": " time: DateTime<Utc>,\n\n },\n\n // Revenue generated by selling an asset\n\n Revenue {\n\n id: u8,\n\n source_id: String,\n\n source: String,\n\n asset: String,\n\n amount: f64,\n\n time: DateTime<Utc>,\n\n },\n\n}\n\n\n\nimpl Operation {\n\n fn id(&self) -> String {\n\n match self {\n\n Self::BalanceIncrease { id, source_id, .. } => {\n\n format!(\"balance_increase-{}-{}\", source_id, id)\n\n }\n\n Self::BalanceDecrease { id, source_id, .. } => {\n", "file_path": "exchanges/exchange/src/operations/ops.rs", "rank": 26, "score": 12.894685344669819 }, { "content": " format!(\"balance_decrease-{}-{}\", source_id, id,)\n\n }\n\n Self::Cost { id, source_id, .. } => format!(\"cost-{}-{}\", source_id, id),\n\n Self::Revenue { id, source_id, .. } => format!(\"revenue-{}-{}\", source_id, id),\n\n }\n\n }\n\n\n\n pub fn time(&self) -> Option<&DateTime<Utc>> {\n\n use Operation::*;\n\n match self {\n\n Cost { time, .. } => Some(time),\n\n Revenue { time, .. } => Some(time),\n\n _ => None,\n\n }\n\n }\n\n\n\n pub fn amount(&self) -> f64 {\n\n use Operation::*;\n\n match self {\n\n Cost { amount, .. } => *amount,\n", "file_path": "exchanges/exchange/src/operations/ops.rs", "rank": 28, "score": 12.628539782271398 }, { "content": "use std::fs::File;\n\n\n\nuse anyhow::{anyhow, Result};\n\nuse async_trait::async_trait;\n\nuse serde::Deserialize;\n\nuse serde_json;\n\n\n\nuse crate::errors::Error;\n\nuse exchange::{Deposit, ExchangeDataFetcher, Loan, Repay, Trade, Withdraw};\n\n\n\n#[derive(Debug, Deserialize, Clone)]\n", "file_path": "portfolio/src/custom_ops.rs", "rank": 29, "score": 12.35742791064057 }, { "content": " source: \"test\".to_string(),\n\n asset: g.choose(&assets).take().unwrap().to_string(),\n\n // non-zero amount\n\n amount: 0.1 + u16::arbitrary(g) as f64,\n\n fee: Option::arbitrary(g),\n\n time: Utc::now(),\n\n is_fiat: *g.choose(&[true, false]).take().unwrap(),\n\n }\n\n }\n\n }\n\n\n\n impl Arbitrary for Withdraw {\n\n fn arbitrary(g: &mut Gen) -> Self {\n\n let assets = [\"ADA\", \"SOL\", \"MATIC\", \"BTC\", \"ETH\", \"AVAX\"];\n\n Self {\n\n source_id: \"1\".to_string(),\n\n source: \"test\".to_string(),\n\n asset: g.choose(&assets).take().unwrap().to_string(),\n\n // non-zero amount\n\n amount: 0.1 + u16::arbitrary(g) as f64,\n", "file_path": "exchanges/exchange/src/exchange.rs", "rank": 30, "score": 12.248712271641127 }, { "content": " fee: u16::arbitrary(g) as f64,\n\n time: Utc::now(),\n\n }\n\n }\n\n }\n\n\n\n impl Arbitrary for Loan {\n\n fn arbitrary(g: &mut Gen) -> Self {\n\n let assets = [\"ADA\", \"SOL\", \"MATIC\", \"BTC\", \"ETH\", \"AVAX\"];\n\n Self {\n\n source_id: \"1\".to_string(),\n\n source: \"test\".to_string(),\n\n asset: g.choose(&assets).unwrap().to_string(),\n\n // non-zero amount\n\n amount: 0.1 + u16::arbitrary(g) as f64,\n\n time: Utc::now(),\n\n status: Status::arbitrary(g),\n\n }\n\n }\n\n }\n", "file_path": "exchanges/exchange/src/exchange.rs", "rank": 33, "score": 11.923946934702586 }, { "content": "use crate::client::Identifiable;\n\nuse chrono::{DateTime, Utc};\n\nuse serde::Deserialize;\n\n\n", "file_path": "exchanges/coinbase/src/api_model.rs", "rank": 34, "score": 11.584556905515182 }, { "content": " Self::Withdraws => \"/wapi/v3/withdrawHistory.html\",\n\n Self::FiatDeposits => \"/sapi/v1/fiatpayment/query/deposit/history\",\n\n Self::FiatWithdraws => \"/sapi/v1/fiatpayment/query/withdraw/history\",\n\n }\n\n .to_string()\n\n }\n\n}\n\n\n\npub enum EndpointsGlobal {\n\n Trades,\n\n Klines,\n\n Prices,\n\n ExchangeInfo,\n\n Deposits,\n\n Withdraws,\n\n FiatDeposits,\n\n FiatWithdraws,\n\n FiatOrders,\n\n MarginTrades,\n\n MarginLoans,\n", "file_path": "exchanges/binance/src/client.rs", "rank": 35, "score": 11.499875651707324 }, { "content": "#[derive(Clone, Debug, Deserialize)]\n\npub struct Repay {\n\n pub source_id: String,\n\n pub source: String,\n\n pub asset: String,\n\n pub amount: f64,\n\n pub interest: f64,\n\n #[serde(with = \"datetime_from_str\")]\n\n pub time: DateTime<Utc>,\n\n pub status: Status,\n\n}\n\n\n\npub(crate) mod datetime_from_str {\n\n use chrono::{DateTime, TimeZone, Utc};\n\n use serde::{de, Deserialize, Deserializer};\n\n use std::convert::TryInto;\n\n\n\n pub fn deserialize<'de, D>(deserializer: D) -> Result<DateTime<Utc>, D::Error>\n\n where\n\n D: Deserializer<'de>,\n", "file_path": "exchanges/exchange/src/exchange.rs", "rank": 36, "score": 11.324465719404028 }, { "content": " }\n\n}\n\n\n\npub struct Config {\n\n pub start_date: NaiveDate,\n\n pub symbols: Vec<AssetPair>,\n\n}\n\n\n\nimpl Config {\n\n pub fn empty() -> Self {\n\n Self {\n\n start_date: Utc::now().naive_utc().date(),\n\n symbols: Vec::new(),\n\n }\n\n }\n\n}\n\n\n\nimpl Clone for Config {\n\n fn clone(&self) -> Self {\n\n Self {\n", "file_path": "exchanges/binance/src/client.rs", "rank": 37, "score": 11.292363828023522 }, { "content": "use anyhow::{anyhow, Context, Result};\n\nuse async_trait::async_trait;\n\nuse rusqlite::{ffi::Error as FfiError, params, Connection, Error, ErrorCode};\n\nuse serde_json;\n\n\n\nuse crate::operations::{storage::Storage, Operation as OperationType};\n\nuse crate::{AssetPair, Candle};\n\n\n\nconst DB_NAME: &'static str = \"operations.db\";\n\n\n\n#[derive(Debug)]\n\npub struct Operation {\n\n pub op_id: u8,\n\n pub source_id: String,\n\n pub source: String,\n\n pub op_type: String,\n\n pub for_asset: Option<String>,\n\n pub for_amount: Option<f64>,\n\n pub asset: String,\n\n pub amount: f64,\n", "file_path": "exchanges/exchange/src/operations/db.rs", "rank": 38, "score": 11.27988422198376 }, { "content": "\n\n impl Arbitrary for Repay {\n\n fn arbitrary(g: &mut Gen) -> Self {\n\n let assets = [\"ADA\", \"SOL\", \"MATIC\", \"BTC\", \"ETH\", \"AVAX\"];\n\n Self {\n\n source_id: \"1\".to_string(),\n\n source: \"test\".to_string(),\n\n asset: g.choose(&assets).unwrap().to_string(),\n\n // non-zero amount\n\n amount: 0.1 + u16::arbitrary(g) as f64,\n\n interest: 0.1 + u16::arbitrary(g) as f64,\n\n time: Utc::now(),\n\n status: Status::arbitrary(g),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "exchanges/exchange/src/exchange.rs", "rank": 39, "score": 11.267837043591046 }, { "content": " pub fee_asset: String,\n\n #[serde(with = \"datetime_from_str\")]\n\n pub time: DateTime<Utc>,\n\n pub side: TradeSide,\n\n}\n\n\n\n#[derive(Deserialize, Debug, Clone)]\n\npub struct Deposit {\n\n pub source_id: String,\n\n pub source: String,\n\n pub asset: String,\n\n pub amount: f64,\n\n #[serde(with = \"datetime_from_str\")]\n\n pub time: DateTime<Utc>,\n\n pub fee: Option<f64>,\n\n pub is_fiat: bool,\n\n}\n\n\n\n#[derive(Deserialize, Debug, Clone)]\n\npub struct Withdraw {\n", "file_path": "exchanges/exchange/src/exchange.rs", "rank": 41, "score": 10.197770625529067 }, { "content": " .created_at\n\n .parse::<DateTime<Utc>>()\n\n .expect(&format!(\"couldn't parse time '{}'\", self.created_at)),\n\n side: match self.side.as_str() {\n\n \"buy\" => TradeSide::Buy,\n\n \"sell\" => TradeSide::Sell,\n\n _ => panic!(\"invalid transaction side {}\", self.side),\n\n },\n\n }\n\n }\n\n}\n\n\n\nimpl Into<Deposit> for Transaction {\n\n fn into(self) -> Deposit {\n\n let subtotal: Amount = self.subtotal.expect(\"missing subtotal in transaction\");\n\n let fee: Amount = self.fee.expect(\"missing fee in transaction\");\n\n let payout_at = self.payout_at.expect(\"missing payout_at in transaction\");\n\n Deposit {\n\n source_id: self.id,\n\n source: \"coinbase\".to_string(),\n", "file_path": "exchanges/coinbase/src/exchange.rs", "rank": 42, "score": 9.934688064238838 }, { "content": "use std::{env, marker::PhantomData, sync::Arc};\n\n\n\nuse anyhow::{anyhow, Result};\n\n\n\nuse bytes::Bytes;\n\nuse chrono::{DateTime, Duration, NaiveDate, Utc};\n\nuse futures::future::join_all;\n\nuse hex::encode as hex_encode;\n\nuse hmac::{Hmac, Mac};\n\nuse reqwest::header::{HeaderMap, HeaderName, HeaderValue};\n\nuse serde::{de::DeserializeOwned, Deserialize};\n\nuse serde_json::Value;\n\nuse sha2::Sha256;\n\n\n\nuse api_client::{errors::Error as ClientError, ApiClient, QueryParams};\n\nuse exchange::{Candle, AssetPair};\n\n\n\nuse crate::{\n\n api_model::*,\n\n errors::{ApiErrorKind, Error as ApiError},\n", "file_path": "exchanges/binance/src/client.rs", "rank": 43, "score": 9.752875834259193 }, { "content": " source_id: trade.source_id.clone(),\n\n source: trade.source.clone(),\n\n for_asset: trade.base_asset.clone(),\n\n for_amount: trade.amount,\n\n asset: trade.quote_asset.clone(),\n\n amount: trade.amount * trade.price,\n\n time: trade.time,\n\n },\n\n Operation::BalanceDecrease {\n\n id: 3,\n\n source_id: trade.source_id.clone(),\n\n source: trade.source.clone(),\n\n asset: trade.quote_asset.clone(),\n\n amount: trade.amount * trade.price,\n\n },\n\n Operation::Revenue {\n\n id: 4,\n\n source_id: trade.source_id.clone(),\n\n source: trade.source.clone(),\n\n asset: trade.quote_asset.clone(),\n", "file_path": "exchanges/exchange/src/operations/exchange_ops.rs", "rank": 44, "score": 9.559278882895082 }, { "content": " }\n\n }\n\n Ok(trades)\n\n }\n\n\n\n pub async fn fetch_trades(&self, endpoint: &str, symbol: &AssetPair) -> Result<Vec<Trade>> {\n\n self.fetch_trades_from_endpoint(symbol, endpoint, None)\n\n .await\n\n }\n\n}\n\n\n\nimpl BinanceFetcher<RegionGlobal> {\n\n pub fn new() -> Self {\n\n Self {\n\n api_client: ApiClient::new(ENDPOINT_CONCURRENCY),\n\n config: None,\n\n credentials: Credentials::<RegionGlobal>::new(),\n\n domain: API_DOMAIN_GLOBAL,\n\n }\n\n }\n", "file_path": "exchanges/binance/src/client.rs", "rank": 45, "score": 9.461995302065686 }, { "content": " amount: trade.amount * trade.price,\n\n time: trade.time,\n\n },\n\n ],\n\n TradeSide::Sell => vec![\n\n Operation::BalanceDecrease {\n\n id: 1,\n\n source_id: trade.source_id.clone(),\n\n source: trade.source.clone(),\n\n asset: trade.base_asset.clone(),\n\n amount: trade.amount,\n\n },\n\n Operation::Revenue {\n\n id: 2,\n\n source_id: trade.source_id.clone(),\n\n source: trade.source.clone(),\n\n asset: trade.base_asset.clone(),\n\n amount: trade.amount,\n\n time: trade.time,\n\n },\n", "file_path": "exchanges/exchange/src/operations/exchange_ops.rs", "rank": 47, "score": 9.395399418797783 }, { "content": " Operation::BalanceIncrease {\n\n id: 3,\n\n source_id: trade.source_id.clone(),\n\n source: trade.source.clone(),\n\n asset: trade.quote_asset.clone(),\n\n amount: trade.amount * trade.price,\n\n },\n\n Operation::Cost {\n\n id: 4,\n\n source_id: trade.source_id.clone(),\n\n source: trade.source.clone(),\n\n for_asset: trade.quote_asset.clone(),\n\n for_amount: trade.amount * trade.price,\n\n asset: trade.base_asset.clone(),\n\n amount: trade.amount,\n\n time: trade.time,\n\n },\n\n ],\n\n };\n\n if trade.fee_asset != \"\" && trade.fee > 0.0 {\n", "file_path": "exchanges/exchange/src/operations/exchange_ops.rs", "rank": 48, "score": 9.381074636202083 }, { "content": " ops.push(Operation::BalanceDecrease {\n\n id: 5,\n\n source_id: trade.source_id.clone(),\n\n source: trade.source.clone(),\n\n asset: trade.fee_asset.clone(),\n\n amount: trade.fee,\n\n });\n\n ops.push(Operation::Cost {\n\n id: 6,\n\n source_id: trade.source_id,\n\n source: trade.source,\n\n for_asset: match trade.side {\n\n TradeSide::Buy => trade.base_asset,\n\n TradeSide::Sell => trade.quote_asset,\n\n },\n\n for_amount: 0.0,\n\n asset: trade.fee_asset,\n\n amount: trade.fee,\n\n time: trade.time,\n\n });\n", "file_path": "exchanges/exchange/src/operations/exchange_ops.rs", "rank": 49, "score": 9.325459552819925 }, { "content": " source: \"test\".to_string(),\n\n symbol: format!(\"{}{}\", base_asset, quote_asset),\n\n base_asset: base_asset.to_string(),\n\n quote_asset: quote_asset.to_string(),\n\n // non-zero price and amount\n\n price: 0.1 + u16::arbitrary(g) as f64,\n\n amount: 0.1 + u16::arbitrary(g) as f64,\n\n fee: u16::arbitrary(g).try_into().unwrap(),\n\n fee_asset: g.choose(&assets).take().unwrap().to_string(),\n\n time: Utc::now(),\n\n side: g.choose(&sides).unwrap().clone(),\n\n }\n\n }\n\n }\n\n\n\n impl Arbitrary for Deposit {\n\n fn arbitrary(g: &mut Gen) -> Self {\n\n let assets = [\"ADA\", \"SOL\", \"MATIC\", \"BTC\", \"ETH\", \"AVAX\"];\n\n Self {\n\n source_id: \"1\".to_string(),\n", "file_path": "exchanges/exchange/src/exchange.rs", "rank": 50, "score": 9.311624915323272 }, { "content": "\n\n#[async_trait]\n\nimpl ExchangeDataFetcher for FileDataFetcher {\n\n async fn trades(&self) -> Result<Vec<Trade>> {\n\n Ok(self.data.trades.clone())\n\n }\n\n async fn margin_trades(&self) -> Result<Vec<Trade>> {\n\n Ok(Vec::new())\n\n }\n\n async fn loans(&self) -> Result<Vec<Loan>> {\n\n Ok(Vec::new())\n\n }\n\n async fn repays(&self) -> Result<Vec<Repay>> {\n\n Ok(Vec::new())\n\n }\n\n async fn deposits(&self) -> Result<Vec<Deposit>> {\n\n Ok(self.data.deposits.clone().unwrap_or(Vec::new()))\n\n }\n\n async fn withdraws(&self) -> Result<Vec<Withdraw>> {\n\n Ok(self.data.withdraws.clone().unwrap_or(Vec::new()))\n\n }\n\n}\n", "file_path": "portfolio/src/custom_ops.rs", "rank": 51, "score": 9.261701280415672 }, { "content": " source: loan.source,\n\n asset: loan.asset,\n\n amount: loan.amount,\n\n }],\n\n Status::Failure => vec![],\n\n }\n\n }\n\n}\n\n\n\nimpl From<Repay> for Vec<Operation> {\n\n fn from(repay: Repay) -> Self {\n\n match repay.status {\n\n Status::Success => vec![\n\n Operation::BalanceDecrease {\n\n id: 1,\n\n source_id: repay.source_id.clone(),\n\n source: repay.source.clone(),\n\n asset: repay.asset.clone(),\n\n amount: repay.amount + repay.interest,\n\n },\n", "file_path": "exchanges/exchange/src/operations/exchange_ops.rs", "rank": 52, "score": 8.870591960173766 }, { "content": " break;\n\n }\n\n }\n\n Ok(withdraws)\n\n }\n\n}\n\n\n\nimpl BinanceFetcher<RegionUs> {\n\n pub fn new() -> Self {\n\n Self {\n\n api_client: ApiClient::new(ENDPOINT_CONCURRENCY),\n\n config: None,\n\n credentials: Credentials::<RegionUs>::new(),\n\n domain: API_DOMAIN_US,\n\n }\n\n }\n\n\n\n pub fn with_config(config: Config) -> Self {\n\n Self {\n\n api_client: ApiClient::new(ENDPOINT_CONCURRENCY),\n", "file_path": "exchanges/binance/src/client.rs", "rank": 53, "score": 8.84835636398672 }, { "content": " }\n\n\n\n ops\n\n }\n\n}\n\n\n\nimpl From<Deposit> for Vec<Operation> {\n\n fn from(deposit: Deposit) -> Self {\n\n let mut ops = vec![Operation::BalanceIncrease {\n\n id: 1,\n\n source_id: deposit.source_id.clone(),\n\n source: deposit.source.clone(),\n\n asset: deposit.asset.clone(),\n\n amount: deposit.amount,\n\n }];\n\n if let Some(fee) = deposit.fee.filter(|f| f > &0.0) {\n\n ops.extend(vec![\n\n Operation::BalanceDecrease {\n\n id: 2,\n\n source_id: deposit.source_id.clone(),\n", "file_path": "exchanges/exchange/src/operations/exchange_ops.rs", "rank": 54, "score": 8.8133707013805 }, { "content": " source_id,\n\n source,\n\n asset,\n\n amount,\n\n },\n\n _ => panic!(\"unexpected index\"),\n\n }\n\n }\n\n }\n\n\n\n #[test]\n\n fn trade_buy_into_operations() {\n\n fn prop(trade: Trade) -> TestResult {\n\n if matches!(trade.side, TradeSide::Sell) {\n\n return TestResult::discard();\n\n }\n\n\n\n let t = trade.clone();\n\n let ops: Vec<Operation> = trade.into();\n\n\n", "file_path": "exchanges/exchange/src/operations/ops.rs", "rank": 55, "score": 8.353826586610232 }, { "content": " use async_trait::async_trait;\n\n use quickcheck::{quickcheck, Gen, TestResult};\n\n use std::sync::Arc;\n\n use tokio::sync::Mutex;\n\n\n\n use crate::operations::{storage::Storage, AssetPrices, Operation::*};\n\n\n\n struct DummyStorage {\n\n ops: Vec<Operation>,\n\n }\n\n\n\n #[async_trait]\n\n impl Storage for DummyStorage {\n\n async fn get_ops(&self) -> Result<Vec<Operation>> {\n\n Ok(self.ops.clone())\n\n }\n\n async fn insert_ops(&self, ops: Vec<Operation>) -> Result<(usize, usize)> {\n\n Ok((ops.len(), 0))\n\n }\n\n }\n", "file_path": "exchanges/exchange/src/operations/profit_loss.rs", "rank": 56, "score": 8.186056148447694 }, { "content": "use std::{env, fmt, marker::PhantomData};\n\n\n\nuse anyhow::{Error, Result};\n\nuse base64;\n\nuse bytes::Bytes;\n\nuse chrono::{Duration, NaiveDate, Utc};\n\nuse futures::future::join_all;\n\nuse hex::encode as hex_encode;\n\nuse hmac::{Hmac, Mac, NewMac};\n\nuse reqwest::header::{HeaderMap, HeaderName, HeaderValue};\n\nuse serde::de::DeserializeOwned;\n\nuse sha2::Sha256;\n\n\n\nuse api_client::{\n\n errors::{Error as ApiError},\n\n ApiClient, QueryParams,\n\n};\n\n\n\nuse crate::api_model::{Account, Fill, Product, Response, Transaction};\n\nuse exchange::AssetPair;\n\n\n\nconst ENDPOINT_CONCURRENCY: usize = 2;\n\n\n", "file_path": "exchanges/coinbase/src/client.rs", "rank": 57, "score": 8.18094161817622 }, { "content": "pub(crate) mod datetime_from_str {\n\n use chrono::{DateTime, TimeZone, Utc};\n\n use serde::{de, Deserialize, Deserializer};\n\n\n\n pub fn deserialize<'de, D>(deserializer: D) -> Result<DateTime<Utc>, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n #[derive(Deserialize)]\n\n #[serde(untagged)]\n\n enum TimestampOrString {\n\n Timestamp(i64),\n\n String(String),\n\n }\n\n\n\n Ok(match TimestampOrString::deserialize(deserializer)? {\n\n // timestamps from the API are in milliseconds\n\n TimestampOrString::Timestamp(ts) => Utc.timestamp_millis(ts),\n\n TimestampOrString::String(s) => Utc\n\n .datetime_from_str(&s, \"%Y-%m-%d %H:%M:%S\")\n\n .map_err(de::Error::custom)?,\n\n })\n\n }\n\n}\n", "file_path": "exchanges/binance/src/api_model.rs", "rank": 58, "score": 8.136676643963725 }, { "content": "use chrono::{DateTime, Utc};\n\nuse serde::Deserialize;\n\n\n", "file_path": "exchanges/binance/src/api_model.rs", "rank": 59, "score": 8.087065880568087 }, { "content": " struct TestAssetInfo {\n\n prices: Mutex<Vec<f64>>,\n\n }\n\n\n\n impl TestAssetInfo {\n\n fn new() -> Self {\n\n Self {\n\n prices: Mutex::new(vec![7000.0, 25.0, 95.0]),\n\n }\n\n }\n\n }\n\n\n\n #[async_trait]\n\n impl AssetsInfo for TestAssetInfo {\n\n async fn price_at(&self, asset_pair: &AssetPair, _time: &DateTime<Utc>) -> Result<f64> {\n\n Ok(match (asset_pair.base.as_str(), asset_pair.quote.as_str()) {\n\n (\"USDT\" | \"USD\", \"USDT\" | \"USD\") => 1.0,\n\n _ => self.prices.lock().await.remove(0),\n\n })\n\n }\n", "file_path": "exchanges/exchange/src/operations/ops.rs", "rank": 60, "score": 8.074527692610923 }, { "content": "use anyhow::Result;\n\nuse async_trait::async_trait;\n\n\n\nuse crate::operations::Operation;\n\n\n\n#[async_trait]\n", "file_path": "exchanges/exchange/src/operations/storage.rs", "rank": 61, "score": 7.986996640388579 }, { "content": " self.fetch_resource_for_accounts(StdEndpoints::Sells).await\n\n }\n\n\n\n pub async fn fetch_fiat_deposits(&self) -> Result<Vec<Transaction>> {\n\n self.fetch_resource_for_accounts(StdEndpoints::Deposits)\n\n .await\n\n }\n\n\n\n pub async fn fetch_withdraws(&self) -> Result<Vec<Transaction>> {\n\n self.fetch_resource_for_accounts(StdEndpoints::Withdraws)\n\n .await\n\n }\n\n}\n\n\n\nimpl<'a> CoinbaseFetcher<Pro> {\n\n pub fn new(config: Config) -> Self {\n\n Self {\n\n api_client: ApiClient::new(ENDPOINT_CONCURRENCY),\n\n credentials: Credentials::<Pro>::new(),\n\n api: PhantomData,\n", "file_path": "exchanges/coinbase/src/client.rs", "rank": 62, "score": 7.906691449126965 }, { "content": "use std::collections::HashMap;\n\nuse std::hash::Hash;\n\nuse std::sync::Arc;\n\n\n\nuse reqwest::Result;\n\nuse tokio::sync::{Mutex, OwnedSemaphorePermit, Semaphore};\n\n\n\n/// A semaphore that allows to acquire permits for arbitrary values of type T.\n\npub struct ValueSemaphore<T> {\n\n entries: Mutex<HashMap<T, Arc<Semaphore>>>,\n\n capacity: usize,\n\n}\n\n\n\nimpl<T> ValueSemaphore<T>\n\nwhere\n\n T: Eq + Hash + Sized,\n\n{\n\n /// Creates a new value semaphore where for each value 1 permit can be acquired \n\n /// at the same time.\n\n pub fn new() -> Self {\n", "file_path": "exchanges/api-client/src/sync.rs", "rank": 64, "score": 7.861756730574487 }, { "content": " amount,\n\n time: Utc::now(),\n\n },\n\n &1 => Operation::Revenue {\n\n id: u8::arbitrary(g),\n\n source_id,\n\n source,\n\n asset,\n\n amount,\n\n time: Utc::now(),\n\n },\n\n &2 => Operation::BalanceIncrease {\n\n id: u8::arbitrary(g),\n\n source_id,\n\n source,\n\n asset,\n\n amount,\n\n },\n\n &3 => Operation::BalanceDecrease {\n\n id: u8::arbitrary(g),\n", "file_path": "exchanges/exchange/src/operations/ops.rs", "rank": 65, "score": 7.777894163858124 }, { "content": " asset: subtotal.currency,\n\n amount: subtotal.amount.parse::<f64>().expect(&format!(\n\n \"couldn't parse amount '{}' into f64\",\n\n subtotal.amount\n\n )),\n\n fee: Some(\n\n fee.amount\n\n .parse::<f64>()\n\n .expect(&format!(\"couldn't parse amount '{}' into f64\", fee.amount)),\n\n ),\n\n time: payout_at\n\n .parse::<DateTime<Utc>>()\n\n .expect(&format!(\"couldn't parse time '{}'\", payout_at)),\n\n is_fiat: true,\n\n }\n\n }\n\n}\n\n\n\nimpl Into<Withdraw> for Transaction {\n\n fn into(self) -> Withdraw {\n", "file_path": "exchanges/coinbase/src/exchange.rs", "rank": 66, "score": 7.728793451409519 }, { "content": "mod client;\n\nmod api_model;\n\nmod exchange;\n\n\n\npub use client::*;\n\npub use api_model::*;\n\npub use crate::exchange::*;\n", "file_path": "exchanges/coinbase/src/lib.rs", "rank": 67, "score": 7.68122632101157 }, { "content": " asset: \"USD\".into(),\n\n amount: 7.5,\n\n time: Utc::now(),\n\n },\n\n Operation::BalanceDecrease {\n\n id: 13,\n\n source_id: \"13\".to_string(),\n\n source: \"test\".to_string(),\n\n asset: \"DOT\".into(),\n\n amount: 0.1,\n\n },\n\n Operation::Revenue {\n\n id: 14,\n\n source_id: \"14\".to_string(),\n\n source: \"test\".to_string(),\n\n asset: \"DOT\".into(),\n\n amount: 0.1,\n\n time: Utc::now(),\n\n },\n\n Operation::BalanceDecrease {\n", "file_path": "exchanges/exchange/src/operations/ops.rs", "rank": 68, "score": 7.640122885287555 }, { "content": "mod api_model;\n\nmod client;\n\nmod errors;\n\nmod exchange;\n\n\n\npub use client::*;\n\npub use api_model::*;\n\npub use errors::*;\n\npub use crate::exchange::*;", "file_path": "exchanges/binance/src/lib.rs", "rank": 69, "score": 7.58569396961339 }, { "content": "\n\n struct DummyAssetsInfo {\n\n last_price: Arc<Mutex<f64>>,\n\n }\n\n\n\n impl DummyAssetsInfo {\n\n pub fn new() -> Self {\n\n Self {\n\n last_price: Arc::new(Mutex::new(0.0)),\n\n }\n\n }\n\n }\n\n\n\n #[async_trait]\n\n impl AssetsInfo for DummyAssetsInfo {\n\n async fn price_at(&self, _asset_pair: &AssetPair, _time: &DateTime<Utc>) -> Result<f64> {\n\n let nums: Vec<_> = (1usize..10).map(|n| n as f64).collect();\n\n let mut gen = Gen::new(100);\n\n let mut p: f64 = *gen.choose(&nums[..]).unwrap();\n\n let mut last_price = self.last_price.lock().await;\n", "file_path": "exchanges/exchange/src/operations/profit_loss.rs", "rank": 70, "score": 7.583940020858611 }, { "content": "}\n\n\n\nimpl From<TransactionSide> for TradeSide {\n\n fn from(ts: TransactionSide) -> TradeSide {\n\n match ts {\n\n TransactionSide::Buy => TradeSide::Buy,\n\n TransactionSide::Sell => TradeSide::Sell,\n\n }\n\n }\n\n}\n\n\n\nimpl Into<Trade> for Transaction {\n\n fn into(self) -> Trade {\n\n let to_f64 = |amount_str: &str| {\n\n amount_str\n\n .parse::<f64>()\n\n .unwrap_or_else(|_| panic!(\"couldn't parse amount '{}' into f64\", amount_str))\n\n };\n\n\n\n let base_asset = self.amount.currency;\n", "file_path": "exchanges/coinbase/src/exchange.rs", "rank": 71, "score": 7.5521019101688065 }, { "content": "mod exchange;\n\npub mod operations;\n\n\n\npub use crate::exchange::*;", "file_path": "exchanges/exchange/src/lib.rs", "rank": 73, "score": 7.344018157509249 }, { "content": " .ok_or_else(|| anyhow!(\"missing symbols in binance config\"))?\n\n .iter()\n\n .map(|s| AssetPair::try_from_str(&s))\n\n .collect::<Result<Vec<AssetPair>>>()?,\n\n })\n\n }\n\n}\n\n\n\nimpl TryFrom<ExchangeConfig> for CoinbaseConfig {\n\n type Error = AnyhowError;\n\n fn try_from(c: ExchangeConfig) -> Result<Self> {\n\n Ok(Self {\n\n start_date: c.start_date()?,\n\n // fixme: decide which one to use by check if c.symbols or c.assets is present\n\n symbols: c\n\n .assets\n\n .ok_or_else(|| anyhow!(\"missing assets in binance config\"))?\n\n .clone(),\n\n })\n\n }\n", "file_path": "portfolio/src/cli.rs", "rank": 74, "score": 7.295133747644131 }, { "content": "use std::collections::HashMap;\n\nuse std::sync::Arc;\n\n\n\nuse anyhow::{anyhow, Result};\n\nuse bytes::Bytes;\n\nuse reqwest::{header::HeaderMap, StatusCode};\n\nuse tokio::sync::RwLock;\n\n\n\nuse crate::{errors::Error as ApiError, sync::ValueSemaphore};\n\n\n", "file_path": "exchanges/api-client/src/client.rs", "rank": 75, "score": 7.290436184363518 }, { "content": " }\n\n\n\n fn find_price_at(&self, prices: &Vec<Candle>, time: u64) -> f64 {\n\n // find the price at `time` in the vector of candles, it's assumed\n\n // the data is sorted.\n\n // With that invariant then the first candle which time is greater than\n\n // the provided `time` is the one that holds the most accurate price.\n\n prices\n\n .iter()\n\n .find_map(|c| match c.close_time > time {\n\n true => Some(c.close_price),\n\n false => None,\n\n })\n\n .unwrap_or(0.0)\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl<T: ExchangeClient + Send + Sync> AssetsInfo for AssetPrices<T> {\n\n async fn price_at(&self, asset_pair: &AssetPair, time: &DateTime<Utc>) -> Result<f64> {\n", "file_path": "exchanges/exchange/src/operations/ops.rs", "rank": 76, "score": 7.256447210217318 }, { "content": "}\n\n\n\n/// stores buckets to prices in the db, buckets are periods of time\n\n/// defined by number of days of span.\n\npub struct AssetPrices<T> {\n\n client: T,\n\n}\n\n\n\nimpl<T: ExchangeClient> AssetPrices<T> {\n\n pub fn new(client: T) -> Self {\n\n Self { client }\n\n }\n\n\n\n async fn asset_price_at(\n\n &self,\n\n asset_pair: &AssetPair,\n\n datetime: &DateTime<Utc>,\n\n ) -> Result<f64> {\n\n // special case: USD - USDT - USDC\n\n // not all exchanges have the (USDT|USDC)USD martek, so for now just return 1.0.\n", "file_path": "exchanges/exchange/src/operations/ops.rs", "rank": 77, "score": 7.150022374274867 }, { "content": "\n\nimpl Credentials<RegionGlobal> {\n\n fn new() -> Self {\n\n Self {\n\n api_key: env::var(\"BINANCE_API_KEY\").unwrap(),\n\n secret_key: env::var(\"BINANCE_API_SECRET\").unwrap(),\n\n region: PhantomData,\n\n }\n\n }\n\n}\n\n\n\nimpl Credentials<RegionUs> {\n\n fn new() -> Self {\n\n Self {\n\n api_key: env::var(\"BINANCE_API_KEY_US\").unwrap(),\n\n secret_key: env::var(\"BINANCE_API_SECRET_US\").unwrap(),\n\n region: PhantomData,\n\n }\n\n }\n\n}\n", "file_path": "exchanges/binance/src/client.rs", "rank": 78, "score": 7.110848945311201 }, { "content": " id: 10,\n\n source_id: \"10\".to_string(),\n\n source: \"test\".to_string(),\n\n asset: \"ETH\".into(),\n\n amount: 0.2,\n\n time: Utc::now(),\n\n },\n\n Operation::BalanceIncrease {\n\n id: 11,\n\n source_id: \"11\".to_string(),\n\n source: \"test\".to_string(),\n\n asset: \"DOT\".into(),\n\n amount: 0.5,\n\n },\n\n Operation::Cost {\n\n id: 12,\n\n source_id: \"12\".to_string(),\n\n source: \"test\".to_string(),\n\n for_asset: \"DOT\".into(),\n\n for_amount: 0.5,\n", "file_path": "exchanges/exchange/src/operations/ops.rs", "rank": 79, "score": 7.017891965633888 }, { "content": " source_id: \"5\".to_string(),\n\n source: \"test\".to_string(),\n\n asset: \"ETH\".into(),\n\n amount: 0.5,\n\n },\n\n Operation::Cost {\n\n id: 6,\n\n source_id: \"6\".to_string(),\n\n source: \"test\".to_string(),\n\n for_asset: \"ETH\".into(),\n\n for_amount: 0.5,\n\n asset: \"USD\".into(),\n\n amount: 1000.0,\n\n time: Utc::now(),\n\n },\n\n Operation::BalanceIncrease {\n\n id: 7,\n\n source_id: \"7\".to_string(),\n\n source: \"test\".to_string(),\n\n asset: \"ETH\".into(),\n", "file_path": "exchanges/exchange/src/operations/ops.rs", "rank": 80, "score": 6.97648305979853 }, { "content": " pub source_id: String,\n\n pub source: String,\n\n pub asset: String,\n\n pub amount: f64,\n\n #[serde(with = \"datetime_from_str\")]\n\n pub time: DateTime<Utc>,\n\n pub fee: f64,\n\n}\n\n\n\n#[derive(Clone, Debug, Deserialize)]\n\npub struct Loan {\n\n pub source_id: String,\n\n pub source: String,\n\n pub asset: String,\n\n pub amount: f64,\n\n #[serde(with = \"datetime_from_str\")]\n\n pub time: DateTime<Utc>,\n\n pub status: Status,\n\n}\n\n\n", "file_path": "exchanges/exchange/src/exchange.rs", "rank": 81, "score": 6.947294475440935 }, { "content": "use std::{\n\n ffi::{OsStr, OsString},\n\n fs::{read_to_string, File},\n\n path::PathBuf,\n\n};\n\n\n\nuse anyhow::{anyhow, Context, Error as AnyhowError, Result};\n\nuse chrono::NaiveDate;\n\nuse serde::Deserialize;\n\nuse structopt::{self, StructOpt};\n\nuse toml;\n\n\n\nuse crate::errors::Error;\n\nuse binance::Config as BinanceConfig;\n\nuse coinbase::Config as CoinbaseConfig;\n\nuse exchange::{Asset, AssetPair};\n\n\n", "file_path": "portfolio/src/cli.rs", "rank": 82, "score": 6.929567133140838 }, { "content": " amount: 0.01,\n\n },\n\n Operation::Cost {\n\n id: 8,\n\n source_id: \"1\".to_string(),\n\n source: \"test\".to_string(),\n\n for_asset: \"ETH\".into(),\n\n for_amount: 0.01,\n\n asset: \"USD\".into(),\n\n amount: 21.0,\n\n time: Utc::now(),\n\n },\n\n Operation::BalanceDecrease {\n\n id: 9,\n\n source_id: \"9\".to_string(),\n\n source: \"test\".to_string(),\n\n asset: \"ETH\".into(),\n\n amount: 0.2,\n\n },\n\n Operation::Revenue {\n", "file_path": "exchanges/exchange/src/operations/ops.rs", "rank": 83, "score": 6.8564639271844445 }, { "content": " },\n\n Operation::BalanceIncrease {\n\n id: 3,\n\n source_id: \"3\".to_string(),\n\n source: \"test\".to_string(),\n\n asset: \"BTC\".into(),\n\n amount: 0.1,\n\n },\n\n Operation::Cost {\n\n id: 4,\n\n source_id: \"4\".to_string(),\n\n source: \"test\".to_string(),\n\n for_asset: \"BTC\".into(),\n\n for_amount: 0.1,\n\n asset: \"USD\".into(),\n\n amount: 890.0,\n\n time: Utc::now(),\n\n },\n\n Operation::BalanceIncrease {\n\n id: 5,\n", "file_path": "exchanges/exchange/src/operations/ops.rs", "rank": 84, "score": 6.807772537553184 }, { "content": "use std::convert::TryFrom;\n\n\n\nuse anyhow::{anyhow, Result, Error};\n\nuse async_trait::async_trait;\n\nuse chrono::{DateTime, Utc};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[async_trait]\n\n/// Layer of abstraction on how to fetch data from exchanges.\n\n/// This allow to handle any incoming transactions/operations and convert them\n\n/// into known structs that can be correctly translated into operations.\n", "file_path": "exchanges/exchange/src/exchange.rs", "rank": 85, "score": 6.788705353378069 }, { "content": " .for_amount\n\n .ok_or_else(|| anyhow!(\"missing for_amount in cost operation\"))?,\n\n asset: op.asset,\n\n amount: op.amount,\n\n time: Utc.timestamp(\n\n op.timestamp\n\n .ok_or_else(|| anyhow!(\"missing timestamp in cost operation\"))?,\n\n 0,\n\n ),\n\n }),\n\n \"revenue\" => Ok(Operation::Revenue {\n\n id: op.op_id,\n\n source_id: op.source_id,\n\n source: op.source,\n\n asset: op.asset,\n\n amount: op.amount,\n\n time: Utc.timestamp(\n\n op.timestamp\n\n .ok_or_else(|| anyhow!(\"missing timestamp in revenue operation\"))?,\n\n 0,\n", "file_path": "exchanges/exchange/src/operations/ops.rs", "rank": 86, "score": 6.733178146734483 }, { "content": "#[cfg(test)]\n\n#[macro_use]\n\nextern crate quickcheck;\n\n\n\nmod cli;\n\nmod custom_ops;\n\nmod errors;\n\nmod reports;\n\n\n\nuse std::{convert::TryInto, sync::Arc};\n\n\n\nuse anyhow::{anyhow, Result};\n\nuse futures::future::join_all;\n\nuse structopt::{self, StructOpt};\n\nuse tokio::sync::mpsc;\n\n\n\nuse binance::{BinanceFetcher, Config, RegionGlobal, RegionUs};\n\n// use coinbase::{CoinbaseFetcher, Config as CoinbaseConfig, Pro, Std};\n\n\n\nuse exchange::operations::{\n", "file_path": "portfolio/src/main.rs", "rank": 87, "score": 6.710471684245178 }, { "content": " pub timestamp: Option<i64>,\n\n}\n\n\n\nimpl From<OperationType> for Operation {\n\n fn from(op: OperationType) -> Operation {\n\n match op {\n\n OperationType::Cost {\n\n id,\n\n source_id,\n\n source,\n\n for_asset,\n\n for_amount,\n\n asset,\n\n amount,\n\n time,\n\n } => Operation {\n\n op_id: id,\n\n source_id,\n\n source,\n\n op_type: \"cost\".to_string(),\n", "file_path": "exchanges/exchange/src/operations/db.rs", "rank": 88, "score": 6.698789820855316 }, { "content": " curr_start = end + Duration::milliseconds(1);\n\n if end == now {\n\n break;\n\n }\n\n }\n\n Ok(deposits)\n\n }\n\n\n\n pub async fn fetch_withdraws(&self) -> Result<Vec<Withdraw>> {\n\n let mut withdraws = Vec::<Withdraw>::new();\n\n\n\n // fetch in batches of 90 days from `start_date` to `now()`\n\n let mut curr_start = self.data_start_date().and_hms(0, 0, 0);\n\n loop {\n\n let now = Utc::now().naive_utc();\n\n // the API only allows 90 days between start and end\n\n let end = std::cmp::min(curr_start + Duration::days(90), now);\n\n\n\n let mut query = QueryParams::new();\n\n query.add(\"timestamp\", now.timestamp_millis(), false);\n", "file_path": "exchanges/binance/src/client.rs", "rank": 89, "score": 6.6872983833547535 }, { "content": "}\n\n\n\nimpl Transaction {\n\n pub fn update_time(&self) -> DateTime<Utc> {\n\n self.updated_at.parse::<DateTime<Utc>>().expect(&format!(\n\n \"couldn't parse updated_at time '{}'\",\n\n self.updated_at\n\n ))\n\n }\n\n}\n\n\n\nimpl Identifiable<String> for Transaction {\n\n fn id(&self) -> &String {\n\n &self.id\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Clone)]\n\npub struct Response<T> {\n\n pub pagination: Pagination,\n\n pub data: Vec<T>,\n\n}\n", "file_path": "exchanges/coinbase/src/api_model.rs", "rank": 90, "score": 6.676539834908178 }, { "content": " id: 15,\n\n source_id: \"15\".to_string(),\n\n source: \"test\".to_string(),\n\n asset: \"DOT\".into(),\n\n amount: 0.2,\n\n },\n\n Operation::Revenue {\n\n id: 16,\n\n source_id: \"16\".to_string(),\n\n source: \"test\".to_string(),\n\n asset: \"DOT\".into(),\n\n amount: 0.2,\n\n time: Utc::now(),\n\n },\n\n ];\n\n\n\n for op in ops {\n\n coin_tracker.batch_operation(op).await;\n\n }\n\n coin_tracker.process_batch().await?;\n", "file_path": "exchanges/exchange/src/operations/ops.rs", "rank": 91, "score": 6.6665300361199336 }, { "content": " .map(|x| x.into())\n\n .collect())\n\n }\n\n async fn margin_trades(&self) -> Result<Vec<Trade>> {\n\n Ok(Vec::new())\n\n }\n\n async fn loans(&self) -> Result<Vec<Loan>> {\n\n Ok(Vec::new())\n\n }\n\n async fn repays(&self) -> Result<Vec<Repay>> {\n\n Ok(Vec::new())\n\n }\n\n async fn deposits(&self) -> Result<Vec<Deposit>> {\n\n Ok(Vec::new())\n\n }\n\n async fn withdraws(&self) -> Result<Vec<Withdraw>> {\n\n Ok(Vec::new())\n\n }\n\n}\n", "file_path": "exchanges/coinbase/src/exchange.rs", "rank": 92, "score": 6.6506376626104515 }, { "content": " source: source.clone(),\n\n // the amount fulfilled from the operation\n\n amount: amount_fulfilled,\n\n cost: paid_amount_used * price,\n\n price: self\n\n .assets_info\n\n .price_at(&AssetPair::new(&sale.asset, \"USDT\"), time)\n\n .await?,\n\n paid_with: asset.to_string(),\n\n paid_with_amount: paid_amount_used,\n\n datetime: *time,\n\n sale_result: match (sale_revenue, purchase_cost) {\n\n (r, c) if r >= c => OperationResult::Profit(r - c),\n\n (r, c) => OperationResult::Loss(c - r),\n\n },\n\n });\n\n // if the whole amount from the purchase was used, remove it\n\n // from the queue as we can't use it anymore to fulfill more sales.\n\n if *purchased_amount == 0.0 {\n\n // TODO: delete op from vec\n", "file_path": "exchanges/exchange/src/operations/profit_loss.rs", "rank": 93, "score": 6.580497562064718 }, { "content": " }\n\n\n\n let coin_tracker = BalanceTracker::new(TestAssetInfo::new());\n\n let ops = vec![\n\n Operation::BalanceIncrease {\n\n id: 1,\n\n source_id: \"1\".to_string(),\n\n source: \"test\".to_string(),\n\n asset: \"BTC\".into(),\n\n amount: 0.03,\n\n },\n\n Operation::Cost {\n\n id: 2,\n\n source_id: \"2\".to_string(),\n\n source: \"test\".to_string(),\n\n for_asset: \"BTC\".to_string(),\n\n for_amount: 0.03,\n\n asset: \"USD\".into(),\n\n amount: 255.0,\n\n time: Utc::now(),\n", "file_path": "exchanges/exchange/src/operations/ops.rs", "rank": 94, "score": 6.445934084244046 }, { "content": " Failure,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub enum TradeSide {\n\n Buy,\n\n Sell,\n\n}\n\n\n\n#[derive(Deserialize, Debug, Clone)]\n\npub struct Trade {\n\n pub source_id: String,\n\n pub source: String,\n\n pub symbol: String,\n\n pub base_asset: String,\n\n pub quote_asset: String,\n\n pub amount: f64,\n\n pub price: f64,\n\n pub fee: f64,\n", "file_path": "exchanges/exchange/src/exchange.rs", "rank": 95, "score": 6.417599611012317 }, { "content": " let subtotal: Amount = self.subtotal.expect(\"missing subtotal in transaction\");\n\n let fee: Amount = self.fee.expect(\"missing fee in transaction\");\n\n let payout_at = self.payout_at.expect(\"missing payout_at in transaction\");\n\n let amount = subtotal.amount;\n\n Withdraw {\n\n source_id: self.id,\n\n source: \"coinbase\".to_string(),\n\n asset: subtotal.currency,\n\n amount: amount\n\n .parse::<f64>()\n\n .unwrap_or_else(|_| panic!(\"couldn't parse amount '{}' into f64\", amount)),\n\n fee: fee\n\n .amount\n\n .parse::<f64>()\n\n .unwrap_or_else(|_| panic!(\"couldn't parse amount '{}' into f64\", fee.amount)),\n\n time: payout_at\n\n .parse::<DateTime<Utc>>()\n\n .unwrap_or_else(|_| panic!(\"couldn't parse time '{}'\", payout_at)),\n\n }\n\n }\n", "file_path": "exchanges/coinbase/src/exchange.rs", "rank": 96, "score": 6.363173093165205 }, { "content": " let subtotal = self.subtotal.expect(\"missing subtotal in transaction\");\n\n let subtotal_amount = to_f64(&subtotal.amount);\n\n let quote_asset = subtotal.currency;\n\n let amount = to_f64(&self.amount.amount);\n\n let fee = self.fee.expect(\"missing fee in transaction\");\n\n let updated_at = self.updated_at;\n\n Trade {\n\n source_id: self.id,\n\n source: \"coinbase\".to_string(),\n\n symbol: format!(\"{}{}\", base_asset, quote_asset),\n\n base_asset,\n\n quote_asset,\n\n amount: amount,\n\n price: subtotal_amount / amount,\n\n fee: to_f64(&fee.amount),\n\n fee_asset: fee.currency,\n\n time: updated_at\n\n .parse::<DateTime<Utc>>()\n\n .unwrap_or_else(|_| panic!(\"couldn't parse time '{}'\", updated_at)),\n\n side: self.side.into(),\n", "file_path": "exchanges/coinbase/src/exchange.rs", "rank": 97, "score": 6.308104605430591 }, { "content": "use std::fmt;\n\n\n\nuse api_client::errors::Error as ApiError;\n\n\n\nuse serde::Deserialize;\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n Api(ApiErrorKind),\n\n Other(String),\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum ApiErrorKind {\n\n UnavailableSymbol,\n\n // add any other relevant error codes here\n\n Other(i16),\n\n}\n\n\n\nimpl From<Option<i16>> for ApiErrorKind {\n", "file_path": "exchanges/binance/src/errors.rs", "rank": 99, "score": 6.249794260070457 } ]
Rust
deps/stb-image/image.rs
tomaka/declmagic
df3abdcd89cd1515f93c369401ac93581c3d4382
use stb_image::bindgen::*; use std::any::Any; use libc; use libc::{c_void, c_int}; use std::slice::raw::buf_as_slice; pub struct Image<T> { pub width : uint, pub height : uint, pub depth : uint, pub data : Vec<T>, } pub fn new_image<T>(width: uint, height: uint, depth: uint, data: Vec<T>) -> Image<T> { Image::<T> { width : width, height : height, depth : depth, data : data, } } pub enum LoadResult { Error(String), ImageU8(Image<u8>), ImageF32(Image<f32>), } impl LoadResult { pub fn from_result(res: Result<LoadResult,Box<Any>>)-> LoadResult { match res { Ok(res) => res, Err(e) => Error(e.to_string()), } } } pub fn load(path: &Path) -> LoadResult { let force_depth = 0; load_with_depth(path, force_depth, false) } fn load_internal<T: Clone>(buf : *const T, w : c_int, h : c_int, d : c_int) -> Image<T> { unsafe { let data = buf_as_slice(buf, (w * h * d) as uint, |s| { Vec::from_slice(s) }); libc::free(buf as *mut c_void); Image::<T>{ width : w as uint, height : h as uint, depth : d as uint, data : data} } } pub fn load_with_depth(path: &Path, force_depth: uint, convert_hdr: bool) -> LoadResult { unsafe { let mut width = 0 as c_int; let mut height = 0 as c_int; let mut depth = 0 as c_int; let path_as_str = match path.as_str() { Some(s) => s, None => return Error("path is not valid utf8".to_string()), }; path_as_str.with_c_str(|bytes| { if !convert_hdr && stbi_is_hdr(bytes)!=0 { let buffer = stbi_loadf(bytes, &mut width, &mut height, &mut depth, force_depth as c_int); if buffer.is_null() { Error("stbi_loadf failed".to_string()) } else { ImageF32(load_internal(buffer, width, height, depth)) } } else { let buffer = stbi_load(bytes, &mut width, &mut height, &mut depth, force_depth as c_int); if buffer.is_null() { Error("stbi_load failed".to_string()) } else { ImageU8(load_internal(buffer, width, height, depth)) } } }) } } pub fn load_from_memory(buffer: &[u8]) -> LoadResult { let force_depth = 0; load_from_memory_with_depth(buffer, force_depth, false) } pub fn load_from_memory_with_depth(buffer: &[u8], force_depth: uint, convert_hdr:bool) -> LoadResult { unsafe { let mut width = 0 as c_int; let mut height = 0 as c_int; let mut depth = 0 as c_int; if !convert_hdr && stbi_is_hdr_from_memory(buffer.as_ptr(), buffer.len() as c_int) != 0 { let buffer = stbi_loadf_from_memory(buffer.as_ptr(), buffer.len() as c_int, &mut width, &mut height, &mut depth, force_depth as c_int); if buffer.is_null() { Error("stbi_loadf_from_memory failed".to_string()) } else { let actual_depth = if force_depth != 0 { force_depth as c_int } else { depth }; ImageF32(load_internal(buffer, width, height, actual_depth)) } } else { let buffer = stbi_load_from_memory(buffer.as_ptr(), buffer.len() as c_int, &mut width, &mut height, &mut depth, force_depth as c_int); if buffer.is_null() { Error("stbi_load_from_memory failed".to_string()) } else { let actual_depth = if force_depth != 0 { force_depth as c_int } else { depth }; ImageU8(load_internal(buffer, width, height, actual_depth)) } } } }
use stb_image::bindgen::*; use std::any::Any; use libc; use libc::{c_void, c_int}; use std::slice::raw::buf_as_slice; pub struct Image<T> { pub width : uint, pub height : uint, pub depth : uint, pub data : Vec<T>, } pub fn new_image<T>(width: uint, height: uint, depth: uint, data: Vec<T>) -> Image<T> { Image::<T> { width : width, height : height, depth : depth, data : data, } } pub enum LoadResult { Error(String), ImageU8(Image<u8>), ImageF32(Image<f32>), } impl LoadResult { pub fn from_result(res: Result<LoadResult,Box<Any>>)-> LoadResult { match res { Ok(res) => res, Err(e) => Error(e.to_string()), } } } pub fn load(path: &Path) -> LoadResult { let force_depth = 0; load_with_depth(path, force_depth, false) } fn load_internal<T: Clone>(buf : *const T, w : c_int, h : c_int, d : c_int) -> Image<T> { unsafe { let data = buf_as_slice(buf, (w * h * d) as uint, |s| { Vec::from_slice(s) });
pub fn load_with_depth(path: &Path, force_depth: uint, convert_hdr: bool) -> LoadResult { unsafe { let mut width = 0 as c_int; let mut height = 0 as c_int; let mut depth = 0 as c_int; let path_as_str = match path.as_str() { Some(s) => s, None => return Error("path is not valid utf8".to_string()), }; path_as_str.with_c_str(|bytes| { if !convert_hdr && stbi_is_hdr(bytes)!=0 { let buffer = stbi_loadf(bytes, &mut width, &mut height, &mut depth, force_depth as c_int); if buffer.is_null() { Error("stbi_loadf failed".to_string()) } else { ImageF32(load_internal(buffer, width, height, depth)) } } else { let buffer = stbi_load(bytes, &mut width, &mut height, &mut depth, force_depth as c_int); if buffer.is_null() { Error("stbi_load failed".to_string()) } else { ImageU8(load_internal(buffer, width, height, depth)) } } }) } } pub fn load_from_memory(buffer: &[u8]) -> LoadResult { let force_depth = 0; load_from_memory_with_depth(buffer, force_depth, false) } pub fn load_from_memory_with_depth(buffer: &[u8], force_depth: uint, convert_hdr:bool) -> LoadResult { unsafe { let mut width = 0 as c_int; let mut height = 0 as c_int; let mut depth = 0 as c_int; if !convert_hdr && stbi_is_hdr_from_memory(buffer.as_ptr(), buffer.len() as c_int) != 0 { let buffer = stbi_loadf_from_memory(buffer.as_ptr(), buffer.len() as c_int, &mut width, &mut height, &mut depth, force_depth as c_int); if buffer.is_null() { Error("stbi_loadf_from_memory failed".to_string()) } else { let actual_depth = if force_depth != 0 { force_depth as c_int } else { depth }; ImageF32(load_internal(buffer, width, height, actual_depth)) } } else { let buffer = stbi_load_from_memory(buffer.as_ptr(), buffer.len() as c_int, &mut width, &mut height, &mut depth, force_depth as c_int); if buffer.is_null() { Error("stbi_load_from_memory failed".to_string()) } else { let actual_depth = if force_depth != 0 { force_depth as c_int } else { depth }; ImageU8(load_internal(buffer, width, height, actual_depth)) } } } }
libc::free(buf as *mut c_void); Image::<T>{ width : w as uint, height : h as uint, depth : d as uint, data : data} } }
function_block-function_prefixed
[ { "content": "/// Load each OpenGL symbol using a custom load function. This allows for the\n\n/// use of functions like `glfwGetProcAddress` or `SDL_GL_GetProcAddress`.\n\n///\n\n/// ~~~\n\n/// let gl = gl::load_with(glfw::get_proc_address);\n\n/// ~~~\n\npub fn load_with(loadfn: |symbol: &str| -> *const libc::c_void) {\n\n ActiveShaderProgram::load_with(|s| loadfn(s));\n\n ActiveTexture::load_with(|s| loadfn(s));\n\n AttachShader::load_with(|s| loadfn(s));\n\n BeginConditionalRender::load_with(|s| loadfn(s));\n\n BeginQuery::load_with(|s| loadfn(s));\n\n BeginQueryIndexed::load_with(|s| loadfn(s));\n\n BeginTransformFeedback::load_with(|s| loadfn(s));\n\n BindAttribLocation::load_with(|s| loadfn(s));\n\n BindBuffer::load_with(|s| loadfn(s));\n\n BindBufferBase::load_with(|s| loadfn(s));\n\n BindBufferRange::load_with(|s| loadfn(s));\n\n BindFragDataLocation::load_with(|s| loadfn(s));\n\n BindFragDataLocationIndexed::load_with(|s| loadfn(s));\n\n BindFramebuffer::load_with(|s| loadfn(s));\n\n BindImageTexture::load_with(|s| loadfn(s));\n\n BindProgramPipeline::load_with(|s| loadfn(s));\n\n BindRenderbuffer::load_with(|s| loadfn(s));\n\n BindSampler::load_with(|s| loadfn(s));\n\n BindTexture::load_with(|s| loadfn(s));\n", "file_path": "deps/gl/gl.rs", "rank": 4, "score": 153782.3133361509 }, { "content": "pub fn stbi_info(filename: *const c_char, x: *const c_int, y: *const c_int, comp: *const c_int) -> c_int;\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 5, "score": 142645.68943368172 }, { "content": "pub fn stbi_info_from_file(f: *const FILE, x: *const c_int, y: *const c_int, comp: *const c_int) -> c_int;\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 6, "score": 142645.68943368172 }, { "content": "pub fn stbi_info_from_memory(buffer: *const stbi_uc, len: c_int, x: *const c_int, y: *const c_int, comp: *const c_int) -> c_int;\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 7, "score": 134869.2887559477 }, { "content": "pub fn stbi_is_hdr_from_file(f: *const FILE) -> c_int;\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 8, "score": 134008.74742643067 }, { "content": "pub fn stbi_is_hdr(filename: *const c_char) -> c_int;\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 9, "score": 134008.74742643067 }, { "content": "pub fn stbi_info_from_callbacks(clbk: *const stbi_io_callbacks, user: *const c_void, x: *const c_int, y: *const c_int, comp: *const c_int) -> c_int;\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 10, "score": 133392.0541104909 }, { "content": "pub fn stbi_failure_reason() -> *const c_char;\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 11, "score": 132344.43610387688 }, { "content": "pub fn stbi_zlib_decode_malloc(buffer: *const c_char, len: c_int, outlen: *const c_int) -> *const c_char;\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 12, "score": 131781.9796787139 }, { "content": "pub fn stbi_zlib_decode_noheader_malloc(buffer: *const c_char, len: c_int, outlen: *const c_int) -> *const c_char;\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 13, "score": 130131.87373222789 }, { "content": "pub fn stbi_is_hdr_from_callbacks(clbk: *const stbi_io_callbacks, user: *const c_void) -> c_int;\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 14, "score": 126201.0785687179 }, { "content": "pub fn stbi_zlib_decode_malloc_guesssize(buffer: *const c_char, len: c_int, initial_size: c_int, outlen: *const c_int) -> *const c_char;\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 15, "score": 125143.96588673154 }, { "content": "pub fn stbi_is_hdr_from_memory(buffer: *const stbi_uc, len: c_int) -> c_int;\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 16, "score": 123399.87862634257 }, { "content": "pub fn stbi_zlib_decode_buffer(obuffer: *const c_char, olen: c_int, ibuffer: *const c_char, ilen: c_int) -> c_int;\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 17, "score": 120849.65094681727 }, { "content": "pub fn stbi_zlib_decode_noheader_buffer(obuffer: *const c_char, olen: c_int, ibuffer: *const c_char, ilen: c_int) -> c_int;\n\n\n\n }\n\n}\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 18, "score": 119270.18659564928 }, { "content": "pub fn stbi_image_free(retval_from_stbi_load: *const c_void);\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 19, "score": 118952.41211634307 }, { "content": "pub fn stbi_loadf(filename: *const c_char, x: *mut c_int, y: *mut c_int, comp: *mut c_int, req_comp: c_int) -> *const c_float;\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 20, "score": 110777.27158156452 }, { "content": "pub fn stbi_loadf_from_file(f: *const FILE, x: *mut c_int, y: *mut c_int, comp: *mut c_int, req_comp: c_int) -> *const c_float;\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 21, "score": 110777.27158156452 }, { "content": "pub fn stbi_load(filename: *const c_char, x: *mut c_int, y: *mut c_int, comp: *mut c_int, req_comp: c_int) -> *const stbi_uc;\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 22, "score": 109338.92182202877 }, { "content": "pub fn stbi_load_from_file(f: *const FILE, x: *mut c_int, y: *mut c_int, comp: *mut c_int, req_comp: c_int) -> *const stbi_uc;\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 23, "score": 109338.92182202877 }, { "content": "pub fn stbi_loadf_from_callbacks(clbk: *const stbi_io_callbacks, user: *const c_void, x: *mut c_int, y: *mut c_int, comp: *mut c_int, req_comp: c_int) -> *const c_float;\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 24, "score": 108769.40700105311 }, { "content": "pub fn stbi_load_from_callbacks(clbk: *const stbi_io_callbacks, user: *const c_void, x: *mut c_int, y: *mut c_int, comp: *mut c_int, req_comp: c_int) -> *const stbi_uc;\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 25, "score": 107634.9867821053 }, { "content": "pub fn stbi_loadf_from_memory(buffer: *const stbi_uc, len: c_int, x: *mut c_int, y: *mut c_int, comp: *mut c_int, req_comp: c_int) -> *const c_float;\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 26, "score": 105083.80168903734 }, { "content": "pub fn stbi_load_from_memory(buffer: *const stbi_uc, len: c_int, x: *mut c_int, y: *mut c_int, comp: *mut c_int, req_comp: c_int) -> *const stbi_uc;\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 27, "score": 103875.8080630136 }, { "content": "pub fn stbi_set_unpremultiply_on_load(flag_true_if_should_unpremultiply: c_int);\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 28, "score": 101103.0704643778 }, { "content": "pub fn stbi_convert_iphone_png_to_rgb(flag_true_if_should_convert: c_int);\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 30, "score": 99622.73998886623 }, { "content": "enum ComponentData {\n\n ComponentDataNative(HashMap<String, Data>),\n\n ComponentDataLink(ComponentID)\n\n}\n\n\n\n/// Type of a component\n\n#[deriving(Clone,Show)]\n\npub enum ComponentType {\n\n NativeComponentType(String),\n\n EntityComponentType(EntityID)\n\n}\n\n\n\n#[deriving(Clone,Show)]\n\npub enum Data {\n\n Number(f64),\n\n String(String),\n\n Boolean(bool),\n\n List(Vec<Data>),\n\n Entity(EntityID),\n\n FromProperty(String),\n", "file_path": "src/entities/state.rs", "rank": 32, "score": 95755.05022279854 }, { "content": "struct EntityData {\n\n // name of the entity with its path as prefix\n\n name: Option<String>,\n\n visible: bool,\n\n\n\n // components owned by the entity\n\n components: Vec<ComponentID>,\n\n\n\n // components whose type is the entity\n\n components_of_type: Vec<ComponentID>,\n\n\n\n // list of parameters of the current entity\n\n default_parameters: HashMap<String, Data>\n\n}\n\n\n", "file_path": "src/entities/state.rs", "rank": 33, "score": 95697.02195896834 }, { "content": "struct ShaderImpl {\n\n\tdisplay: Arc<context::GLContext>,\n\n\tid: gl::types::GLuint,\n\n\tshaderType: ShaderType\n\n}\n\n\n\npub struct Program {\n\n\tdisplay: Arc<context::GLContext>,\n\n\tshaders: Vec<Arc<ShaderImpl>>,\n\n\tid: gl::types::GLuint,\n\n\tuniforms: Arc<HashMap<String, (gl::types::GLint, gl::types::GLenum, gl::types::GLint)>>\t\t// location, type and size of each uniform, ordered by name\n\n}\n\n\n\n#[deriving(Clone)]\n\npub struct ProgramUniforms {\n\n\tdisplay: Arc<context::GLContext>,\n\n\ttextures: HashMap<gl::types::GLint, Arc<TextureImpl>>,\n\n\tvalues: HashMap<gl::types::GLint, (gl::types::GLenum, Vec<char>)>,\n\n\tuniforms: Arc<HashMap<String, (gl::types::GLint, gl::types::GLenum, gl::types::GLint)>>\t\t// same as the program's variable\n\n}\n", "file_path": "src/display/raw/mod.rs", "rank": 34, "score": 92838.16343599759 }, { "content": "struct TextureImpl {\n\n\tdisplay: Arc<context::GLContext>,\n\n\tid: gl::types::GLuint,\n\n\tbindPoint: gl::types::GLenum,\n\n\twidth: uint,\n\n\theight: uint,\n\n\tdepth: uint,\n\n\tarraySize: uint\n\n}\n\n\n\npub struct Shader {\n\n\tshader: Arc<ShaderImpl>\n\n}\n\n\n", "file_path": "src/display/raw/mod.rs", "rank": 35, "score": 92838.16343599759 }, { "content": "pub trait GLDataTuple {\n\n\tfn get_gl_type(&self) -> gl::types::GLenum;\n\n\tfn get_num_elems(&self) -> uint;\n\n\tfn get_total_size(&self) -> gl::types::GLsizei;\n\n}\n\n\n\nimpl GLDataTuple for (f32) {\n\n\tfn get_gl_type(&self) -> gl::types::GLenum { gl::FLOAT }\n\n\tfn get_num_elems(&self) -> uint { 1 }\n\n\tfn get_total_size(&self) -> gl::types::GLsizei { std::mem::size_of_val(self) as gl::types::GLsizei }\n\n}\n\n\n\nimpl GLDataTuple for (f32, f32) {\n\n\tfn get_gl_type(&self) -> gl::types::GLenum { gl::FLOAT }\n\n\tfn get_num_elems(&self) -> uint { 2 }\n\n\tfn get_total_size(&self) -> gl::types::GLsizei { 2 * std::mem::size_of_val(self.ref0()) as gl::types::GLsizei }\n\n}\n\n\n\nimpl GLDataTuple for (f32, f32, f32) {\n\n\tfn get_gl_type(&self) -> gl::types::GLenum { gl::FLOAT }\n", "file_path": "src/display/raw/data_types.rs", "rank": 36, "score": 90868.06662762426 }, { "content": "pub fn stbi_hdr_to_ldr_gamma(gamma: c_float);\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 37, "score": 86087.70385247865 }, { "content": "pub fn stbi_hdr_to_ldr_scale(scale: c_float);\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 38, "score": 86087.70385247865 }, { "content": "pub fn stbi_ldr_to_hdr_gamma(gamma: c_float);\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 39, "score": 86087.70385247865 }, { "content": "pub fn stbi_ldr_to_hdr_scale(scale: c_float);\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 40, "score": 86087.70385247865 }, { "content": "pub trait UniformValue {\n\n\tfn get_gl_type(&self) -> gl::types::GLenum;\n\n}\n\n\n\nimpl UniformValue for i8 {\n\n\tfn get_gl_type(&self) -> gl::types::GLenum {\n\n\t\tgl::BYTE\n\n\t}\n\n}\n\n\n\nimpl UniformValue for u8 {\n\n\tfn get_gl_type(&self) -> gl::types::GLenum {\n\n\t\tgl::UNSIGNED_BYTE\n\n\t}\n\n}\n\n\n\nimpl UniformValue for i16 {\n\n\tfn get_gl_type(&self) -> gl::types::GLenum {\n\n\t\tgl::SHORT\n\n\t}\n", "file_path": "src/display/raw/data_types.rs", "rank": 41, "score": 82136.61275371596 }, { "content": "pub trait GLDataType: Num + Copy {\n\n\tfn get_gl_type(&self) -> gl::types::GLenum;\n\n}\n\n\n\nimpl GLDataType for i8 {\n\n\tfn get_gl_type(&self) -> gl::types::GLenum {\n\n\t\tgl::BYTE\n\n\t}\n\n}\n\n\n\nimpl GLDataType for u8 {\n\n\tfn get_gl_type(&self) -> gl::types::GLenum {\n\n\t\tgl::UNSIGNED_BYTE\n\n\t}\n\n}\n\n\n\nimpl GLDataType for i16 {\n\n\tfn get_gl_type(&self) -> gl::types::GLenum {\n\n\t\tgl::SHORT\n\n\t}\n", "file_path": "src/display/raw/data_types.rs", "rank": 42, "score": 81234.30425921308 }, { "content": "/// returns the total requested movement of an entity\n\npub fn get_requested_movement(state: &EntitiesState, id: &EntityID)\n\n -> Option<Vec2<f32>>\n\n{\n\n state\n\n .get_visible_native_components(\"requestedMovement\")\n\n .move_iter()\n\n\n\n // take only the components owned by the entity\n\n .filter(|c| state.get_owner(c).unwrap() == *id)\n\n\n\n // build a vector from each of the component\n\n .filter_map(|cmp| match (state.get_as_number(&cmp, \"x\"), state.get_as_number(&cmp, \"y\"), state.get_as_number(&cmp, \"z\")) {\n\n (Some(x), Some(y), _)\n\n => Some(Vec2::new(x as f32, y as f32)),\n\n _ => None\n\n })\n\n\n\n // add all the elements together\n\n .fold(None, |vec: Option<Vec2<f32>>, a| match vec { None => Some(a), Some(v) => Some(v + a) })\n\n}\n\n\n", "file_path": "src/physics/mod.rs", "rank": 43, "score": 77505.8438945521 }, { "content": "/// returns the position of an entity\n\npub fn get_entity_position(state: &EntitiesState, id: &EntityID)\n\n -> na::Vec3<f32>\n\n{\n\n use std::iter::AdditiveIterator;\n\n\n\n state\n\n .get_visible_native_components(\"position\")\n\n .move_iter()\n\n\n\n // take only the components owned by the entity\n\n .filter(|c| state.get_owner(c).unwrap() == *id)\n\n\n\n // build a vector from each of the component\n\n .filter_map(|cmp| match (state.get_as_number(&cmp, \"x\"), state.get_as_number(&cmp, \"y\"), state.get_as_number(&cmp, \"z\")) {\n\n (Some(x), Some(y), Some(z))\n\n => Some(na::Vec3::new(x as f32, y as f32, z as f32)),\n\n (Some(x), Some(y), _)\n\n => Some(na::Vec3::new(x as f32, y as f32, 0.0)),\n\n _ => None\n\n })\n\n\n\n // add all the elements together\n\n .sum()\n\n}\n\n\n", "file_path": "src/physics/mod.rs", "rank": 44, "score": 77505.8438945521 }, { "content": "/// Returns the total movement of an entity.\n\npub fn get_entity_movement(state: &EntitiesState, id: &EntityID)\n\n -> na::Vec3<f32>\n\n{\n\n use std::iter::AdditiveIterator;\n\n\n\n state\n\n .get_visible_native_components(\"movement\")\n\n .move_iter()\n\n\n\n // take only the components owned by the entity\n\n .filter(|c| state.get_owner(c).unwrap() == *id)\n\n\n\n // build a vector from each of the component\n\n .filter_map(|cmp| match (state.get_as_number(&cmp, \"x\"), state.get_as_number(&cmp, \"y\"), state.get_as_number(&cmp, \"z\")) {\n\n (Some(x), Some(y), Some(z))\n\n => Some(na::Vec3::new(x as f32, y as f32, z as f32)),\n\n (Some(x), Some(y), _)\n\n => Some(na::Vec3::new(x as f32, y as f32, 0.0)),\n\n _ => None\n\n })\n\n\n\n // add all the elements together\n\n .sum()\n\n}\n\n\n", "file_path": "src/physics/mod.rs", "rank": 45, "score": 77505.8438945521 }, { "content": "fn load_component_data(context: &mut LoadContext, componentData: &json::Json)\n\n -> Result<HashMap<String, super::Data>, LoaderError>\n\n{\n\n match componentData {\n\n &json::Object(ref data) => {\n\n let mut result = HashMap::new();\n\n\n\n for (key, val) in data.iter() {\n\n result.insert(key.clone(), try!(load_data_entry(context, val)));\n\n }\n\n\n\n Ok(result)\n\n },\n\n _ => return Err(WrongDataStructure(format!(\"Wrong format for component data, expected object but got {}\", componentData)))\n\n }\n\n}\n\n\n", "file_path": "src/entities/loader.rs", "rank": 46, "score": 71277.15632255352 }, { "content": "pub fn exec_game<RL: resources::ResourcesLoader+Send+Share>(resources: RL) {\n\n let game = Game::new(resources);\n\n game.exec();\n\n}\n", "file_path": "src/declmagic.rs", "rank": 47, "score": 71014.33330736757 }, { "content": "/// Loads entities into an EntitiesState\n\npub fn load(loader: &ResourcesLoader, resourceName: &str, output: &mut EntitiesState)\n\n -> Result<Vec<EntityID>, LoaderError>\n\n{\n\n let mut context = LoadContext {\n\n loader: loader,\n\n output: output,\n\n loadedDocs: HashSet::new(),\n\n };\n\n\n\n load_impl(&mut context, resourceName)\n\n}\n\n\n", "file_path": "src/entities/loader.rs", "rank": 48, "score": 67953.79635326294 }, { "content": "/// changes the position of an entity\n\npub fn set_position(state: &mut EntitiesState, id: &EntityID, pos: &Vec2<f32>)\n\n{\n\n let current = get_entity_position(state, id);\n\n let current = na::Vec2::new(current.x, current.y);\n\n adjust_position(state, id, &(pos - current));\n\n}\n\n\n", "file_path": "src/physics/mod.rs", "rank": 49, "score": 64999.707212425856 }, { "content": "/// changes the movement of an entity\n\npub fn adjust_movement(state: &mut EntitiesState, id: &EntityID, diff: &Vec2<f32>)\n\n{\n\n let list: Vec<ComponentID> = state\n\n .get_visible_native_components(\"movement\")\n\n .move_iter()\n\n // take only the components owned by the entity\n\n .filter(|c| state.get_owner(c).unwrap() == *id)\n\n .filter(|c| state.get_as_boolean(c, \"autoupdate\").unwrap_or(true))\n\n .collect();\n\n\n\n if list.len() == 0 {\n\n let mut data = HashMap::new();\n\n data.insert(\"x\".to_string(), ::entities::Number(diff.x as f64));\n\n data.insert(\"y\".to_string(), ::entities::Number(diff.y as f64));\n\n state.create_native_component(id, \"movement\", data);\n\n return\n\n }\n\n\n\n let diff = diff / (list.len() as f32);\n\n\n", "file_path": "src/physics/mod.rs", "rank": 50, "score": 64999.707212425856 }, { "content": "/// changes the position of an entity\n\npub fn set_movement(state: &mut EntitiesState, id: &EntityID, movement: &Vec2<f32>)\n\n{\n\n let current = get_entity_movement(state, id);\n\n let current = na::Vec2::new(current.x, current.y);\n\n adjust_movement(state, id, &(movement - current));\n\n}\n\n\n", "file_path": "src/physics/mod.rs", "rank": 51, "score": 64999.707212425856 }, { "content": "/// updates the position of an entity\n\npub fn adjust_position(state: &mut EntitiesState, id: &EntityID, diff: &Vec2<f32>)\n\n{\n\n let list: Vec<ComponentID> = state\n\n .get_components_iter()\n\n // take only the components owned by the entity\n\n .filter(|c| state.get_owner(*c).unwrap() == *id)\n\n // take only the \"position\" components\n\n .filter(|c| match state.get_type(*c) { Ok(NativeComponentType(t)) => t.as_slice() == \"position\", _ => false })\n\n .filter(|c| state.get_as_boolean(*c, \"autoupdate\").unwrap_or(true))\n\n .map(|c| c.clone()).collect();\n\n\n\n if list.len() == 0 {\n\n let mut data = HashMap::new();\n\n data.insert(\"x\".to_string(), ::entities::Number(diff.x as f64));\n\n data.insert(\"y\".to_string(), ::entities::Number(diff.y as f64));\n\n state.create_native_component(id, \"position\", data);\n\n return\n\n }\n\n\n\n let diff = diff / (list.len() as f32);\n", "file_path": "src/physics/mod.rs", "rank": 52, "score": 64999.707212425856 }, { "content": "fn load_impl(context: &mut LoadContext, resourceName: &str)\n\n -> Result<Vec<EntityID>, LoaderError>\n\n{\n\n // checking that the doc has not already been loaded\n\n if context.loadedDocs.contains_equiv(&resourceName) {\n\n return Ok(vec!());\n\n }\n\n context.loadedDocs.insert(resourceName.to_string());\n\n\n\n // if not, loading the resource\n\n let mut resource = match context.loader.load(resourceName) {\n\n Ok(r) => ::std::io::BufferedReader::new(r),\n\n Err(err) => return Err(IoError(err))\n\n };\n\n\n\n // building the JSON object\n\n let data = match json::Builder::new(resource.chars().map(|c| c.unwrap())).build() {\n\n Ok(d) => d,\n\n Err(err) => return Err(SyntaxError(err))\n\n };\n\n\n\n // loading the doc into the entities state\n\n load_all(context, resourceName, &data)\n\n}\n\n\n", "file_path": "src/entities/loader.rs", "rank": 53, "score": 64975.70048394366 }, { "content": " uint8 *data;\n", "file_path": "deps/stb-image/stb_image.c", "rank": 54, "score": 64552.81379476373 }, { "content": "enum Message {\n\n Stop(Sender<()>),\n\n Execute(proc():Send)\n\n}\n\n\n\nimpl CommandsThread {\n\n pub fn new() -> CommandsThread {\n\n let (tx, rx) : (Sender<Message>, Receiver<Message>) = std::comm::channel();\n\n\n\n spawn(proc() {\n\n loop {\n\n match rx.recv() {\n\n Stop(s) => { s.send(()); return; },\n\n Execute(f) => f()\n\n }\n\n }\n\n });\n\n\n\n CommandsThread {\n\n sender: tx\n", "file_path": "src/threaded_executer.rs", "rank": 55, "score": 63885.8070801772 }, { "content": "struct Component {\n\n owner: EntityID,\n\n\n\n cmp_type: ComponentType,\n\n\n\n data: ComponentData,\n\n\n\n // all components in this list have their data as a ComponentDataLink to this one\n\n linked_from: Vec<ComponentID>,\n\n\n\n parent: Option<ComponentID>,\n\n // when a component is destroyed, all children are destroyed too\n\n children: Vec<ComponentID>\n\n}\n\n\n", "file_path": "src/entities/state.rs", "rank": 56, "score": 63826.9713104738 }, { "content": "fn main() {\n\n\tlet loader = DirLoader::new(::std::os::self_exe_path().unwrap().join(\"resources\"));\n\n\n\n\tdeclmagic::exec_game(loader);\n\n}\n", "file_path": "example/src/example1.rs", "rank": 57, "score": 62647.66244919058 }, { "content": "enum LoaderError {\n\n IoError(::std::io::IoError),\n\n SyntaxError(::serialize::json::ParserError),\n\n StateError(super::StateError),\n\n WrongDataStructure(String),\n\n}\n\n\n\nimpl ::std::fmt::Show for LoaderError {\n\n fn fmt(&self, formatter: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::FormatError> {\n\n match self {\n\n &IoError(ref err) => err.fmt(formatter),\n\n &SyntaxError(ref err) => err.fmt(formatter),\n\n &StateError(ref err) => err.fmt(formatter),\n\n &WrongDataStructure(ref err) => format!(\"WrongDataStructure({})\", err).fmt(formatter),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/entities/loader.rs", "rank": 58, "score": 62258.33478960654 }, { "content": "fn load_data_entry(context: &mut LoadContext, element: &json::Json)\n\n -> Result<super::Data, LoaderError>\n\n{\n\n Ok(match element {\n\n &json::String(ref data) => {\n\n super::String(data.clone())\n\n },\n\n &json::Number(ref data) => {\n\n super::Number(data.clone())\n\n },\n\n &json::Boolean(ref data) => {\n\n super::Boolean(data.clone())\n\n },\n\n &json::List(ref elems) => {\n\n let mut result = Vec::new();\n\n for elem in elems.iter() {\n\n let val = try!(load_data_entry(context, elem));\n\n result.push(val);\n\n }\n\n super::List(result)\n", "file_path": "src/entities/loader.rs", "rank": 59, "score": 61924.69837111375 }, { "content": "struct Element {\n\n\tvertexShaderSrc: String,\n\n\tvertexShader: Option<Shader>,\n\n\tfragmentShaderSrc: String,\n\n\tfragmentShader: Option<Shader>,\n\n\tprogram: Option<Program>,\n\n\tvertexBuffer: Option<VertexBuffer>,\n\n\tindexBuffer: Option<IndexBuffer>,\n\n\tuniforms: Option<Mutex<ProgramUniforms>>\n\n}\n\n\n\nimpl CustomDisplaySystem {\n\n\tpub fn new(display: Arc<ManagedDisplay>, _: &EntitiesState, log: |log::LogRecord|)\n\n\t\t-> CustomDisplaySystem\n\n\t{\n\n\t\tCustomDisplaySystem {\n\n\t\t\tdisplay: display.clone(),\n\n\t\t\telements: HashMap::new()\n\n\t\t}\n\n\t}\n", "file_path": "src/display/system/custom_display_system.rs", "rank": 60, "score": 59359.777255417095 }, { "content": "type enum_unnamed1 = c_uint;\n\n//static STBI_default: u32 = 0_u32;\n\n//static STBI_grey: u32 = 1_u32;\n\n//static STBI_grey_alpha: u32 = 2_u32;\n\n//static STBI_rgb: u32 = 3_u32;\n\n//static STBI_rgb_alpha: u32 = 4_u32;\n\n\n\npub mod bindgen {\n\n use libc::*;\n\n\n\n pub type stbi_uc = c_uchar;\n\n pub struct stbi_io_callbacks {\n\n read: *const u8,\n\n skip: *const u8,\n\n eof: *const u8,\n\n }\n\n\n\n#[link(name = \"stb-image\")]\n\nextern {\n\n\n", "file_path": "deps/stb-image/stb_image.rs", "rank": 61, "score": 59013.16350164622 }, { "content": "struct LoadContext<'a> {\n\n loader: &'a ResourcesLoader,\n\n output: &'a mut EntitiesState,\n\n loadedDocs: HashSet<String>,\n\n}\n\n\n", "file_path": "src/entities/loader.rs", "rank": 62, "score": 57756.46284622383 }, { "content": "pub trait Drawable {\n\n\tfn draw(&self, matrix: &Mat4<f32>);\n\n}\n", "file_path": "src/display/mod.rs", "rank": 63, "score": 56755.62244887465 }, { "content": "pub trait GameSystem {\n\n fn process(&mut self, state: &mut entities::EntitiesState, elapsed: &f64);\n\n}\n\n\n\npub struct Game {\n\n display: Arc<display::managed_display::ManagedDisplay>,\n\n\n\n state: entities::EntitiesState,\n\n loader: Box<resources::ResourcesLoader + Send + Share>,\n\n\n\n displaySystem: display::DisplaySystem,\n\n inputSystem: input::InputSystem,\n\n physicsSystem: physics::PhysicsSystem,\n\n mechanicsSystem: mechanics::MechanicsSystem,\n\n\n\n thirdPartySystems: Vec<Box<GameSystem>>\n\n}\n\n\n\nimpl Game {\n\n pub fn new<RL: resources::ResourcesLoader+Send+Share>(resources: RL)\n", "file_path": "src/declmagic.rs", "rank": 64, "score": 56755.62244887465 }, { "content": "// TODO: totally rework this trait once associated types are implemented\n\npub trait EntitiesHelper {\n\n /// Creates a new empty entity in the state.\n\n fn create_entity(&mut self, name: Option<String>, visible: bool) -> EntityID;\n\n\n\n /// Destroys an entity.\n\n ///\n\n /// This operation will also destroy all the components owned by\n\n /// this entity and all components whose type is this entity.\n\n fn destroy_entity(&mut self, id: &EntityID) -> Result<(), StateError>;\n\n\n\n /// Creates a new component of a native type.\n\n fn create_native_component(&mut self, owner: &EntityID,\n\n typename: &str, data: HashMap<String, Data>)\n\n -> Result<ComponentID, StateError>;\n\n\n\n /// Creates a new component of an entity type.\n\n fn create_component_from_entity(&mut self, owner: &EntityID, typename: &EntityID,\n\n data: HashMap<String, Data>)\n\n -> Result<ComponentID, StateError>;\n\n\n", "file_path": "src/entities/mod.rs", "rank": 65, "score": 55290.754257832916 }, { "content": "pub fn execute<E: EntitiesHelper, S: ::std::str::Str + ::std::fmt::Show>(entities: &E, component: &ComponentID, code: &S)\n\n -> Result<any::AnyLuaValue, LuaError>\n\n{\n\n let mut lua = Lua::new();\n\n\n\n println!(\"executing script {}\", code);\n\n\n\n //lua.set(\"Entities\", );\n\n\n\n lua.execute(code.as_slice())\n\n}\n", "file_path": "src/script.rs", "rank": 66, "score": 52293.99698572528 }, { "content": "pub trait ResourcesLoader : Clone {\n\n\tfn load(&self, resourceName: &str) -> IoResult<Box<Reader>>;\n\n}\n", "file_path": "src/resources/mod.rs", "rank": 67, "score": 50779.831218165134 }, { "content": "pub fn execute_mut<E: EntitiesHelper, S: ::std::str::Str + ::std::fmt::Show>(entities: &mut E, component: &ComponentID, code: &S)\n\n -> Result<any::AnyLuaValue, LuaError>\n\n{\n\n let mut lua = Lua::new();\n\n\n\n println!(\"executing script {}\", code);\n\n\n\n //lua.set(\"Entities\", );\n\n\n\n lua.execute(code.as_slice())\n\n}\n\n\n", "file_path": "src/script.rs", "rank": 68, "score": 49739.153583962136 }, { "content": "pub trait ConfigValue: Send + Clone + Show {\n\n}\n\n\n\npub struct Config {\n\n params: HashMap<String, Box<ConfigValue>>\n\n}\n\n\n\nimpl ConfigValue for int {}\n\nimpl ConfigValue for String {}\n\nimpl ConfigValue for bool {}\n", "file_path": "src/config.rs", "rank": 69, "score": 45222.47737031873 }, { "content": "fn load_entity_from_name(context: &mut LoadContext, entityName: &str)\n\n -> Result<EntityID, LoaderError>\n\n{\n\n let entityNameRefined = {\n\n let path = Path::new(entityName);\n\n path.join(path.filename().unwrap()).as_str().expect(\"non-utf8 entity name!\").to_string()\n\n };\n\n\n\n // first, we check if there is an existing entity with this name\n\n {\n\n let entities = context.output.get_entities_by_name(entityName);\n\n if entities.len() >= 2 {\n\n return Err(WrongDataStructure(format!(\"Found multiple entities with the same name: {}\", entityName)))\n\n }\n\n if entities.len() == 1 {\n\n return Ok(entities.get(0).clone())\n\n }\n\n }\n\n\n\n // first, we check if there is an existing entity with this name\n", "file_path": "src/entities/loader.rs", "rank": 70, "score": 38738.24735892223 }, { "content": " int w,h;\n", "file_path": "deps/stb-image/stb_image.c", "rank": 71, "score": 35133.87510629291 }, { "content": " int h,v;\n", "file_path": "deps/stb-image/stb_image.c", "rank": 72, "score": 35133.87510629291 }, { "content": "fn load_all(context: &mut LoadContext, resourceName: &str, doc: &json::Json)\n\n -> Result<Vec<EntityID>, LoaderError>\n\n{\n\n match doc {\n\n &json::List(ref entities) => {\n\n let mut result = Vec::new();\n\n\n\n for elem in entities.iter() {\n\n match load_entity(context, resourceName, elem) {\n\n Ok(e) => result.push(e),\n\n Err(err) => {\n\n for e in result.iter() { context.output.destroy_entity(e); }\n\n return Err(err);\n\n }\n\n }\n\n }\n\n\n\n Ok(result)\n\n },\n\n _ => return Err(WrongDataStructure(format!(\"Wrong format for entities document, expected list of entities but got: {}\", doc)))\n\n }\n\n}\n\n\n", "file_path": "src/entities/loader.rs", "rank": 73, "score": 34704.42623385102 }, { "content": "fn load_component(context: &mut LoadContext, entity: &EntityID, component: &json::Json)\n\n -> Result<ComponentID, LoaderError>\n\n{\n\n match component {\n\n &json::Object(ref componentInfos) => {\n\n let cmptype = match componentInfos.find(&\"type\".to_string()) {\n\n Some(t) => t,\n\n None => return Err(WrongDataStructure(format!(\"Component does not have a \\\"type\\\" field: {}\", component)))\n\n };\n\n\n\n let data = match componentInfos.find(&\"data\".to_string()) {\n\n Some(cmp) => { try!(load_component_data(context, cmp)) },\n\n _ => HashMap::new()\n\n };\n\n\n\n match cmptype {\n\n &json::String(ref t) => context.output.create_native_component(entity, t.as_slice(), data).map_err(|err| StateError(err)),\n\n &json::Object(_) => {\n\n match load_data_entry(context, cmptype) {\n\n Ok(super::Entity(id)) => context.output.create_component_from_entity(entity, &id, data).map_err(|err| StateError(err)),\n", "file_path": "src/entities/loader.rs", "rank": 74, "score": 33964.459599552094 }, { "content": "fn load_entity(context: &mut LoadContext, resourceName: &str, entity: &json::Json)\n\n -> Result<EntityID, LoaderError>\n\n{\n\n match entity {\n\n &json::Object(ref entityData) => {\n\n let name = entityData\n\n .find(&\"name\".to_string())\n\n .and_then(|e| e.as_string())\n\n .map(|e| e.to_string())\n\n .map(|name| Path::new(resourceName).join(name).as_str().expect(\"non-utf8 entity name!\").to_string());\n\n\n\n let visible = entityData.find(&\"visible\".to_string()).and_then(|e| e.as_boolean()).unwrap_or(true);\n\n let entityID = context.output.create_entity(name, visible);\n\n\n\n match entityData.find(&\"components\".to_string()) {\n\n Some(cmp) => { \n\n match load_components_list(context, &entityID, cmp) {\n\n Ok(_) => (),\n\n Err(err) => {\n\n context.output.destroy_entity(&entityID);\n", "file_path": "src/entities/loader.rs", "rank": 75, "score": 33964.459599552094 }, { "content": "\tfn get_num_elems(&self) -> uint { 3 }\n\n\tfn get_total_size(&self) -> gl::types::GLsizei { 3 * std::mem::size_of_val(self.ref0()) as gl::types::GLsizei }\n\n}\n\n\n\nimpl GLDataTuple for (f32, f32, f32, f32) {\n\n\tfn get_gl_type(&self) -> gl::types::GLenum { gl::FLOAT }\n\n\tfn get_num_elems(&self) -> uint { 4 }\n\n\tfn get_total_size(&self) -> gl::types::GLsizei { 4 * std::mem::size_of_val(self.ref0()) as gl::types::GLsizei }\n\n}\n\n\n\n\n", "file_path": "src/display/raw/data_types.rs", "rank": 76, "score": 33508.87632883528 }, { "content": "}\n\n\n\nimpl GLDataType for u16 {\n\n\tfn get_gl_type(&self) -> gl::types::GLenum {\n\n\t\tgl::UNSIGNED_SHORT\n\n\t}\n\n}\n\n\n\nimpl GLDataType for f32 {\n\n\tfn get_gl_type(&self) -> gl::types::GLenum {\n\n\t\tgl::FLOAT\n\n\t}\n\n}\n\n\n\nimpl GLDataType for f64 {\n\n\tfn get_gl_type(&self) -> gl::types::GLenum {\n\n\t\tgl::DOUBLE\n\n\t}\n\n}\n\n\n", "file_path": "src/display/raw/data_types.rs", "rank": 77, "score": 33506.977514089565 }, { "content": "use gl;\n\nuse std;\n\n\n", "file_path": "src/display/raw/data_types.rs", "rank": 78, "score": 33501.98158964009 }, { "content": "}\n\n\n\nimpl UniformValue for u16 {\n\n\tfn get_gl_type(&self) -> gl::types::GLenum {\n\n\t\tgl::UNSIGNED_SHORT\n\n\t}\n\n}\n\n\n\nimpl UniformValue for f32 {\n\n\tfn get_gl_type(&self) -> gl::types::GLenum {\n\n\t\tgl::FLOAT\n\n\t}\n\n}\n\n\n\nimpl UniformValue for f64 {\n\n\tfn get_gl_type(&self) -> gl::types::GLenum {\n\n\t\tgl::DOUBLE\n\n\t}\n\n}\n\n\n", "file_path": "src/display/raw/data_types.rs", "rank": 79, "score": 33501.9412070489 }, { "content": "impl UniformValue for ::nalgebra::na::Mat2<f32> {\n\n\tfn get_gl_type(&self) -> gl::types::GLenum {\n\n\t\tgl::FLOAT_MAT2\n\n\t}\n\n}\n\n\n\nimpl UniformValue for ::nalgebra::na::Mat3<f32> {\n\n\tfn get_gl_type(&self) -> gl::types::GLenum {\n\n\t\tgl::FLOAT_MAT3\n\n\t}\n\n}\n\n\n\nimpl UniformValue for ::nalgebra::na::Mat4<f32> {\n\n\tfn get_gl_type(&self) -> gl::types::GLenum {\n\n\t\tgl::FLOAT_MAT4\n\n\t}\n\n}\n", "file_path": "src/display/raw/data_types.rs", "rank": 80, "score": 33501.84056605127 }, { "content": "fn load_components_list(context: &mut LoadContext, entity: &EntityID, componentsList: &json::Json)\n\n -> Result<Vec<ComponentID>, LoaderError>\n\n{\n\n match componentsList {\n\n &json::List(ref components) => {\n\n let mut result = Vec::new();\n\n\n\n for elem in components.iter() {\n\n match load_component(context, entity, elem) {\n\n Ok(e) => result.push(e),\n\n Err(err) => {\n\n for e in result.iter() { context.output.destroy_component(e); }\n\n return Err(err);\n\n }\n\n }\n\n }\n\n\n\n Ok(result)\n\n },\n\n _ => return Err(WrongDataStructure(format!(\"Wrong format for components list, expected list but got: {}\", componentsList)))\n\n }\n\n}\n\n\n", "file_path": "src/entities/loader.rs", "rank": 81, "score": 32616.49290519133 }, { "content": " void *raw_data;\n", "file_path": "deps/stb-image/stb_image.c", "rank": 82, "score": 28271.704808847862 }, { "content": " void *io_user_data;\n", "file_path": "deps/stb-image/stb_image.c", "rank": 83, "score": 27210.58871656794 }, { "content": " \tidResult.map(|id| {\n\n \t\tShader {\n\n \t\t\tshader: Arc::new(ShaderImpl {\n\n\t \t\t\tdisplay: self.context.clone(),\n\n\t \t\t\tid: id,\n\n\t \t\t\tshaderType: stype\n\n\t \t\t})\n\n \t\t}\n\n \t})\n\n\t}\n\n\n\n\tpub fn build_texture<T: data_types::GLDataType>(&self, data: &[T], width: uint, height: uint, depth: uint, arraySize: uint)\n\n\t\t-> Texture\n\n\t{\n\n\t\t// TODO: restore when image format is supported\n\n\t\t/*if width * height * depth * arraySize != data.len() {\n\n\t\t\tfail!(\"Texture data has different size from width*height*depth*arraySize\");\n\n\t\t}*/\n\n\n\n\t\tlet textureType = if height == 1 && depth == 1 {\n", "file_path": "src/display/raw/mod.rs", "rank": 85, "score": 32.59851248107586 }, { "content": "extern fn closeCallback(window: *const libglfw3::GLFWwindow) {\n\n let sender : &Sender<WindowEvent> = unsafe { transmute(libglfw3::glfwGetWindowUserPointer(window)) };\n\n sender.send(super::super::Closed);\n\n}\n\n\n\nextern fn cursorPosCallback(window: *const libglfw3::GLFWwindow, x: ::libc::c_double, y: ::libc::c_double) {\n\n let mut width: c_int = unsafe { ::std::mem::uninitialized() };\n\n let mut height: c_int = unsafe { ::std::mem::uninitialized() };\n\n unsafe { libglfw3::glfwGetWindowSize(window, &mut width, &mut height) };\n\n\n\n let sender : &Sender<WindowEvent> = unsafe { transmute(libglfw3::glfwGetWindowUserPointer(window)) };\n\n let x = (2.0 * x / (width as f64)) - 1.0;\n\n let y = (2.0 * (1.0 - (y / (height as f64)))) - 1.0;\n\n sender.send(super::super::Input(::input::MouseMoved(x as f64, y as f64)));\n\n}\n\n\n\nimpl Window {\n\n pub fn new(width: uint, height: uint, title: &str) -> Window {\n\n unsafe {\n\n GLFWInitialized.doit(|| {\n", "file_path": "src/display/raw/context/window.rs", "rank": 86, "score": 29.370066128029862 }, { "content": "\t\t};\n\n\n\n\t\tlet mut stream = match self.loader.load(name) {\n\n\t\t\tOk(v) => v,\n\n\t\t\tErr(e) => return Err(format!(\"{}\", e))\n\n\t\t};\n\n\n\n\t\tlet data = match stream.read_to_end() {\n\n\t\t\tOk(d) => d,\n\n\t\t\tErr(e) => return Err(format!(\"{}\", e))\n\n\t\t};\n\n\n\n\t\tlet texture = Arc::new(match ::stb_image::image::load_from_memory(data.as_slice()) {\n\n\t\t\t::stb_image::image::Error(s) => return Err(format!(\"load_from_memory failed\")),\n\n\t\t\t::stb_image::image::ImageU8(img) => {\n\n\t\t\t\t//let data: &[u32] = ::std::mem::transmute(img.data.as_slice());\n\n\t\t\t\tself.display.build_texture(img.data.as_slice(), img.width, img.height, 1, 1)\t\t// TODO: image depth not taken into account\n\n\t\t\t},\n\n \t\t::stb_image::image::ImageF32(img) => self.display.build_texture(img.data.as_slice(), img.width, img.height, 1, 1)\n\n\t\t});\n", "file_path": "src/display/managed_display.rs", "rank": 87, "score": 27.337178481803658 }, { "content": "extern crate std;\n\n\n\nuse std::sync::{ Arc, Future, Mutex };\n\nuse std::rc::Rc;\n\nuse threaded_executer::CommandsThread;\n\nuse std::string::String;\n\n\n\n#[allow(dead_code)]\n\nmod libglfw3;\n\nmod window;\n\n\n\npub struct GLContext {\n\n\twindow: Mutex<window::Window>\n\n}\n\n\n\nimpl GLContext {\n\n\tpub fn new(width: uint, height: uint, title: &str) -> GLContext {\n\n\t\tlet window = window::Window::new(width, height, title);\n\n\t\twindow.make_context_current();\n\n\n", "file_path": "src/display/raw/context/mod.rs", "rank": 88, "score": 26.747635358394767 }, { "content": "\n\npub struct VertexBuffer {\n\n\tdisplay: Arc<context::GLContext>,\n\n\tid: gl::types::GLuint,\n\n\telementsSize: uint,\n\n\tbindings: HashMap<String, (gl::types::GLenum, gl::types::GLint, gl::types::GLint)>\t\t\t// for each binding, the data type, number of elems, and offset\n\n}\n\n\n\npub struct IndexBuffer {\n\n\tdisplay: Arc<context::GLContext>,\n\n\tid: gl::types::GLuint,\n\n\telementsCount: uint,\n\n\tdataType: gl::types::GLenum,\n\n\tprimitives: gl::types::GLenum\n\n}\n\n\n\nimpl Display {\n\n\tpub fn new(width: uint, height: uint, title: &str) -> Display {\n\n\t\tDisplay {\n\n\t\t\tcontext: Arc::new(context::GLContext::new(width, height, title))\n", "file_path": "src/display/raw/mod.rs", "rank": 89, "score": 25.86715916807986 }, { "content": " ::libc::c_int) ->\n\n ::libc::c_int;\n\n pub fn archive_match_path_unmatched_inclusions(arg1: *mut Struct_archive)\n\n -> ::libc::c_int;\n\n pub fn archive_match_path_unmatched_inclusions_next(arg1:\n\n *mut Struct_archive,\n\n arg2:\n\n *mut *const ::libc::c_char)\n\n -> ::libc::c_int;\n\n pub fn archive_match_path_unmatched_inclusions_next_w(arg1:\n\n *mut Struct_archive,\n\n arg2: *mut *const ::libc::wchar_t)\n\n -> ::libc::c_int;\n\n pub fn archive_match_time_excluded(arg1: *mut Struct_archive,\n\n arg2: *mut Struct_archive_entry) ->\n\n ::libc::c_int;\n\n pub fn archive_match_include_time(arg1: *mut Struct_archive,\n\n _flag: ::libc::c_int, _sec: ::libc::time_t,\n\n _nsec: ::libc::c_long) -> ::libc::c_int;\n\n pub fn archive_match_include_date(arg1: *mut Struct_archive,\n", "file_path": "src/resources/libarchive.rs", "rank": 90, "score": 25.72881209252065 }, { "content": "#[inline] pub fn ColorMaski(index: GLuint, r: GLboolean, g: GLboolean, b: GLboolean, a: GLboolean) { unsafe { mem::transmute::<_, extern \"system\" fn(GLuint, GLboolean, GLboolean, GLboolean, GLboolean)>(storage::ColorMaski.f)(index, r, g, b, a) } }\n\n#[inline] pub fn ColorP3ui(type_: GLenum, color: GLuint) { unsafe { mem::transmute::<_, extern \"system\" fn(GLenum, GLuint)>(storage::ColorP3ui.f)(type_, color) } }\n\n#[inline] pub unsafe fn ColorP3uiv(type_: GLenum, color: *const GLuint) { mem::transmute::<_, extern \"system\" fn(type_: GLenum, color: *const GLuint) >(storage::ColorP3uiv.f)(type_, color) }\n\n#[inline] pub fn ColorP4ui(type_: GLenum, color: GLuint) { unsafe { mem::transmute::<_, extern \"system\" fn(GLenum, GLuint)>(storage::ColorP4ui.f)(type_, color) } }\n\n#[inline] pub unsafe fn ColorP4uiv(type_: GLenum, color: *const GLuint) { mem::transmute::<_, extern \"system\" fn(type_: GLenum, color: *const GLuint) >(storage::ColorP4uiv.f)(type_, color) }\n\n#[inline] pub fn CompileShader(shader: GLuint) { unsafe { mem::transmute::<_, extern \"system\" fn(GLuint)>(storage::CompileShader.f)(shader) } }\n\n#[inline] pub unsafe fn CompressedTexImage1D(target: GLenum, level: GLint, internalformat: GLenum, width: GLsizei, border: GLint, imageSize: GLsizei, data: *const c_void) { mem::transmute::<_, extern \"system\" fn(target: GLenum, level: GLint, internalformat: GLenum, width: GLsizei, border: GLint, imageSize: GLsizei, data: *const c_void) >(storage::CompressedTexImage1D.f)(target, level, internalformat, width, border, imageSize, data) }\n\n#[inline] pub unsafe fn CompressedTexImage2D(target: GLenum, level: GLint, internalformat: GLenum, width: GLsizei, height: GLsizei, border: GLint, imageSize: GLsizei, data: *const c_void) { mem::transmute::<_, extern \"system\" fn(target: GLenum, level: GLint, internalformat: GLenum, width: GLsizei, height: GLsizei, border: GLint, imageSize: GLsizei, data: *const c_void) >(storage::CompressedTexImage2D.f)(target, level, internalformat, width, height, border, imageSize, data) }\n\n#[inline] pub unsafe fn CompressedTexImage3D(target: GLenum, level: GLint, internalformat: GLenum, width: GLsizei, height: GLsizei, depth: GLsizei, border: GLint, imageSize: GLsizei, data: *const c_void) { mem::transmute::<_, extern \"system\" fn(target: GLenum, level: GLint, internalformat: GLenum, width: GLsizei, height: GLsizei, depth: GLsizei, border: GLint, imageSize: GLsizei, data: *const c_void) >(storage::CompressedTexImage3D.f)(target, level, internalformat, width, height, depth, border, imageSize, data) }\n\n#[inline] pub unsafe fn CompressedTexSubImage1D(target: GLenum, level: GLint, xoffset: GLint, width: GLsizei, format: GLenum, imageSize: GLsizei, data: *const c_void) { mem::transmute::<_, extern \"system\" fn(target: GLenum, level: GLint, xoffset: GLint, width: GLsizei, format: GLenum, imageSize: GLsizei, data: *const c_void) >(storage::CompressedTexSubImage1D.f)(target, level, xoffset, width, format, imageSize, data) }\n\n#[inline] pub unsafe fn CompressedTexSubImage2D(target: GLenum, level: GLint, xoffset: GLint, yoffset: GLint, width: GLsizei, height: GLsizei, format: GLenum, imageSize: GLsizei, data: *const c_void) { mem::transmute::<_, extern \"system\" fn(target: GLenum, level: GLint, xoffset: GLint, yoffset: GLint, width: GLsizei, height: GLsizei, format: GLenum, imageSize: GLsizei, data: *const c_void) >(storage::CompressedTexSubImage2D.f)(target, level, xoffset, yoffset, width, height, format, imageSize, data) }\n\n#[inline] pub unsafe fn CompressedTexSubImage3D(target: GLenum, level: GLint, xoffset: GLint, yoffset: GLint, zoffset: GLint, width: GLsizei, height: GLsizei, depth: GLsizei, format: GLenum, imageSize: GLsizei, data: *const c_void) { mem::transmute::<_, extern \"system\" fn(target: GLenum, level: GLint, xoffset: GLint, yoffset: GLint, zoffset: GLint, width: GLsizei, height: GLsizei, depth: GLsizei, format: GLenum, imageSize: GLsizei, data: *const c_void) >(storage::CompressedTexSubImage3D.f)(target, level, xoffset, yoffset, zoffset, width, height, depth, format, imageSize, data) }\n\n#[inline] pub fn CopyBufferSubData(readTarget: GLenum, writeTarget: GLenum, readOffset: GLintptr, writeOffset: GLintptr, size: GLsizeiptr) { unsafe { mem::transmute::<_, extern \"system\" fn(GLenum, GLenum, GLintptr, GLintptr, GLsizeiptr)>(storage::CopyBufferSubData.f)(readTarget, writeTarget, readOffset, writeOffset, size) } }\n\n#[inline] pub fn CopyImageSubData(srcName: GLuint, srcTarget: GLenum, srcLevel: GLint, srcX: GLint, srcY: GLint, srcZ: GLint, dstName: GLuint, dstTarget: GLenum, dstLevel: GLint, dstX: GLint, dstY: GLint, dstZ: GLint, srcWidth: GLsizei, srcHeight: GLsizei, srcDepth: GLsizei) { unsafe { mem::transmute::<_, extern \"system\" fn(GLuint, GLenum, GLint, GLint, GLint, GLint, GLuint, GLenum, GLint, GLint, GLint, GLint, GLsizei, GLsizei, GLsizei)>(storage::CopyImageSubData.f)(srcName, srcTarget, srcLevel, srcX, srcY, srcZ, dstName, dstTarget, dstLevel, dstX, dstY, dstZ, srcWidth, srcHeight, srcDepth) } }\n\n#[inline] pub fn CopyTexImage1D(target: GLenum, level: GLint, internalformat: GLenum, x: GLint, y: GLint, width: GLsizei, border: GLint) { unsafe { mem::transmute::<_, extern \"system\" fn(GLenum, GLint, GLenum, GLint, GLint, GLsizei, GLint)>(storage::CopyTexImage1D.f)(target, level, internalformat, x, y, width, border) } }\n\n#[inline] pub fn CopyTexImage2D(target: GLenum, level: GLint, internalformat: GLenum, x: GLint, y: GLint, width: GLsizei, height: GLsizei, border: GLint) { unsafe { mem::transmute::<_, extern \"system\" fn(GLenum, GLint, GLenum, GLint, GLint, GLsizei, GLsizei, GLint)>(storage::CopyTexImage2D.f)(target, level, internalformat, x, y, width, height, border) } }\n\n#[inline] pub fn CopyTexSubImage1D(target: GLenum, level: GLint, xoffset: GLint, x: GLint, y: GLint, width: GLsizei) { unsafe { mem::transmute::<_, extern \"system\" fn(GLenum, GLint, GLint, GLint, GLint, GLsizei)>(storage::CopyTexSubImage1D.f)(target, level, xoffset, x, y, width) } }\n\n#[inline] pub fn CopyTexSubImage2D(target: GLenum, level: GLint, xoffset: GLint, yoffset: GLint, x: GLint, y: GLint, width: GLsizei, height: GLsizei) { unsafe { mem::transmute::<_, extern \"system\" fn(GLenum, GLint, GLint, GLint, GLint, GLint, GLsizei, GLsizei)>(storage::CopyTexSubImage2D.f)(target, level, xoffset, yoffset, x, y, width, height) } }\n\n#[inline] pub fn CopyTexSubImage3D(target: GLenum, level: GLint, xoffset: GLint, yoffset: GLint, zoffset: GLint, x: GLint, y: GLint, width: GLsizei, height: GLsizei) { unsafe { mem::transmute::<_, extern \"system\" fn(GLenum, GLint, GLint, GLint, GLint, GLint, GLint, GLsizei, GLsizei)>(storage::CopyTexSubImage3D.f)(target, level, xoffset, yoffset, zoffset, x, y, width, height) } }\n\n#[inline] pub fn CreateProgram() -> GLuint { unsafe { mem::transmute::<_, extern \"system\" fn() -> GLuint>(storage::CreateProgram.f)() } }\n", "file_path": "deps/gl/gl.rs", "rank": 91, "score": 25.570139407599182 }, { "content": "}\n\n\n\nimpl Reader for MemoryResourceReader {\n\n\tfn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {\n\n\t\tunsafe {\n\n\t\t\tlet read = libarchive::archive_read_data(self.archive, buf.as_mut_ptr() as *mut ::libc::c_void, buf.len() as ::libc::size_t);\n\n\n\n\t\t\tif read < 0 {\n\n\t\t\t\treturn fail!(\"Error while decompressing resource\");\n\n\t\t\t}\n\n\n\n\t\t\tif read == 0 {\n\n\t\t\t\treturn Err(::std::io::IoError{ kind: ::std::io::EndOfFile, desc: \"End of file\", detail: None });\n\n\t\t\t}\n\n\n\n\t\t\tOk(read as uint)\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl Drop for MemoryResourceReader {\n\n\tfn drop(&mut self) {\n\n\t\tunsafe { libarchive::archive_read_free(self.archive); }\n\n\t}\n\n}\n", "file_path": "src/resources/archive_loader.rs", "rank": 92, "score": 25.023264384333913 }, { "content": " src: *mut Struct_archive);\n\n pub fn archive_file_count(arg1: *mut Struct_archive) -> ::libc::c_int;\n\n pub fn archive_match_new() -> *mut Struct_archive;\n\n pub fn archive_match_free(arg1: *mut Struct_archive) -> ::libc::c_int;\n\n pub fn archive_match_excluded(arg1: *mut Struct_archive,\n\n arg2: *mut Struct_archive_entry) ->\n\n ::libc::c_int;\n\n pub fn archive_match_path_excluded(arg1: *mut Struct_archive,\n\n arg2: *mut Struct_archive_entry) ->\n\n ::libc::c_int;\n\n pub fn archive_match_exclude_pattern(arg1: *mut Struct_archive,\n\n arg2: *const ::libc::c_char) ->\n\n ::libc::c_int;\n\n pub fn archive_match_exclude_pattern_w(arg1: *mut Struct_archive,\n\n arg2: *const ::libc::wchar_t) -> ::libc::c_int;\n\n pub fn archive_match_exclude_pattern_from_file(arg1: *mut Struct_archive,\n\n arg2: *const ::libc::c_char,\n\n _nullSeparator:\n\n ::libc::c_int) ->\n\n ::libc::c_int;\n", "file_path": "src/resources/libarchive.rs", "rank": 93, "score": 24.342076235092623 }, { "content": "}\n\n\n\nimpl ProgramUniforms {\n\n\tpub fn set_value<T: data_types::UniformValue>(&mut self, uniformName: &str, value: T) {\n\n\t\tlet &(location, gltype, typesize) = match self.uniforms.find(&uniformName.to_string()) {\n\n\t\t\tSome(a) => a,\n\n\t\t\tNone => return\t\t// the uniform is not used, we ignore it\n\n\t\t};\n\n\n\n\t\tif gltype != value.get_gl_type() {\n\n\t\t\tfail!(\"Type of data passed to set_value must match the type of data requested by the shader\")\n\n\t\t}\n\n\n\n\t\tlet mut data: Vec<char> = Vec::with_capacity(std::mem::size_of_val(&value));\n\n\t\tunsafe { data.set_len(std::mem::size_of_val(&value)); }\n\n\n\n\t\tlet mut dataInside = data.as_mut_ptr() as *mut T;\n\n\t\tunsafe { (*dataInside) = value; }\n\n\n\n\t\tself.values.insert(location.clone(), (gltype, data));\n", "file_path": "src/display/raw/mod.rs", "rank": 94, "score": 23.762028563658426 }, { "content": "impl PrimitiveType {\n\n\tfn get_gl_enum(&self) -> gl::types::GLenum {\n\n\t\tmatch *self {\n\n\t\t\tPointsList => gl::POINTS,\n\n\t\t\tLinesList => gl::LINES,\n\n\t\t\tLineStrip => gl::LINE_STRIP,\n\n\t\t\tTrianglesList => gl::TRIANGLES,\n\n\t\t\tTrianglesListAdjacency => gl::TRIANGLES_ADJACENCY,\n\n\t\t\tTriangleStrip => gl::TRIANGLE_STRIP,\n\n\t\t\tTriangleStripAdjacency => gl::TRIANGLE_STRIP_ADJACENCY,\n\n\t\t\tTriangleFan => gl::TRIANGLE_FAN\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl Texture {\n\n\tpub fn get_width(&self) -> uint {\n\n\t\tself.texture.width\n\n\t}\n\n\n", "file_path": "src/display/raw/mod.rs", "rank": 95, "score": 23.530397921615062 }, { "content": " arg2: *mut ::libc::c_void,\n\n arg3: ::libc::c_uint) ->\n\n ::libc::c_int;\n\n pub fn archive_read_append_callback_data(arg1: *mut Struct_archive,\n\n arg2: *mut ::libc::c_void) ->\n\n ::libc::c_int;\n\n pub fn archive_read_prepend_callback_data(arg1: *mut Struct_archive,\n\n arg2: *mut ::libc::c_void) ->\n\n ::libc::c_int;\n\n pub fn archive_read_open1(arg1: *mut Struct_archive) -> ::libc::c_int;\n\n pub fn archive_read_open(arg1: *mut Struct_archive,\n\n _client_data: *mut ::libc::c_void,\n\n arg2:\n\n *mut ::std::option::Option<extern \"C\" fn\n\n (arg1:\n\n *mut Struct_archive,\n\n arg2:\n\n *mut ::libc::c_void)\n\n ->\n\n ::libc::c_int>,\n", "file_path": "src/resources/libarchive.rs", "rank": 96, "score": 23.435400248948838 }, { "content": " pub static mut BlendFuncSeparate: FnPtr = FnPtr { f: failing::BlendFuncSeparate as *const libc::c_void, is_loaded: false };\n\n pub static mut BlendFuncSeparatei: FnPtr = FnPtr { f: failing::BlendFuncSeparatei as *const libc::c_void, is_loaded: false };\n\n pub static mut BlendFunci: FnPtr = FnPtr { f: failing::BlendFunci as *const libc::c_void, is_loaded: false };\n\n pub static mut BlitFramebuffer: FnPtr = FnPtr { f: failing::BlitFramebuffer as *const libc::c_void, is_loaded: false };\n\n pub static mut BufferData: FnPtr = FnPtr { f: failing::BufferData as *const libc::c_void, is_loaded: false };\n\n pub static mut BufferSubData: FnPtr = FnPtr { f: failing::BufferSubData as *const libc::c_void, is_loaded: false };\n\n pub static mut CheckFramebufferStatus: FnPtr = FnPtr { f: failing::CheckFramebufferStatus as *const libc::c_void, is_loaded: false };\n\n pub static mut ClampColor: FnPtr = FnPtr { f: failing::ClampColor as *const libc::c_void, is_loaded: false };\n\n pub static mut Clear: FnPtr = FnPtr { f: failing::Clear as *const libc::c_void, is_loaded: false };\n\n pub static mut ClearBufferData: FnPtr = FnPtr { f: failing::ClearBufferData as *const libc::c_void, is_loaded: false };\n\n pub static mut ClearBufferSubData: FnPtr = FnPtr { f: failing::ClearBufferSubData as *const libc::c_void, is_loaded: false };\n\n pub static mut ClearBufferfi: FnPtr = FnPtr { f: failing::ClearBufferfi as *const libc::c_void, is_loaded: false };\n\n pub static mut ClearBufferfv: FnPtr = FnPtr { f: failing::ClearBufferfv as *const libc::c_void, is_loaded: false };\n\n pub static mut ClearBufferiv: FnPtr = FnPtr { f: failing::ClearBufferiv as *const libc::c_void, is_loaded: false };\n\n pub static mut ClearBufferuiv: FnPtr = FnPtr { f: failing::ClearBufferuiv as *const libc::c_void, is_loaded: false };\n\n pub static mut ClearColor: FnPtr = FnPtr { f: failing::ClearColor as *const libc::c_void, is_loaded: false };\n\n pub static mut ClearDepth: FnPtr = FnPtr { f: failing::ClearDepth as *const libc::c_void, is_loaded: false };\n\n pub static mut ClearDepthf: FnPtr = FnPtr { f: failing::ClearDepthf as *const libc::c_void, is_loaded: false };\n\n pub static mut ClearStencil: FnPtr = FnPtr { f: failing::ClearStencil as *const libc::c_void, is_loaded: false };\n\n pub static mut ClientWaitSync: FnPtr = FnPtr { f: failing::ClientWaitSync as *const libc::c_void, is_loaded: false };\n", "file_path": "deps/gl/gl.rs", "rank": 97, "score": 23.344061631179102 }, { "content": "\tpub fn get_height(&self) -> uint {\n\n\t\tself.texture.height\n\n\t}\n\n\n\n\tpub fn get_depth(&self) -> uint {\n\n\t\tself.texture.depth\n\n\t}\n\n}\n\n\n\nimpl Program {\n\n\tpub fn build_uniforms(&self)\n\n\t\t-> ProgramUniforms\n\n\t{\n\n\t\tProgramUniforms {\n\n\t\t\tdisplay: self.display.clone(),\n\n\t\t\ttextures: HashMap::new(),\n\n\t\t\tvalues: HashMap::new(),\n\n\t\t\tuniforms: self.uniforms.clone()\n\n\t\t}\n\n\t}\n", "file_path": "src/display/raw/mod.rs", "rank": 98, "score": 23.244026530533556 }, { "content": "use gl;\n\nuse libc;\n\nuse std;\n\n\n\nuse std::collections::HashMap;\n\nuse std::sync::Arc;\n\n\n\nmod context;\n\npub mod data_types;\n\n\n\npub enum WindowEvent {\n\n\tMoved(uint, uint),\n\n\tResized(uint, uint),\n\n\tClosed,\n\n\tInput(::input::Message)\n\n}\n\n\n\npub enum PrimitiveType {\n\n\tPointsList,\n\n\tLinesList,\n", "file_path": "src/display/raw/mod.rs", "rank": 99, "score": 23.14488155777526 } ]
Rust
keyvalues-parser/src/text/parse.rs
LovecraftianHorror/vdf-pest
a9f3966b7ec7b389a18351d6469749ce8c93253d
#![allow(renamed_and_removed_lints)] #![allow(clippy::unknown_clippy_lints)] #![allow(clippy::upper_case_acronyms)] use pest::{iterators::Pair as PestPair, Parser}; use pest_derive::Parser; use std::borrow::Cow; use crate::{error::Result, Obj, PartialVdf as Vdf, Value}; macro_rules! common_parsing { ($parser:ty, $rule:ty, $parse_escaped:expr) => { pub fn parse<'a>(s: &'a str) -> Result<Vdf<'a>> { let mut full_grammar = <$parser>::parse(<$rule>::vdf, s)?; let mut bases = Vec::new(); loop { let pair = full_grammar.next().unwrap(); if let <$rule>::base_macro = pair.as_rule() { let base_path_string = pair.into_inner().next().unwrap(); let base_path = match base_path_string.as_rule() { <$rule>::quoted_raw_string => base_path_string.into_inner().next().unwrap(), <$rule>::unquoted_string => base_path_string, _ => unreachable!("Prevented by grammar"), } .as_str(); bases.push(Cow::from(base_path)); } else { let (key, value) = parse_pair(pair); return Ok(Vdf { key, value, bases }); } } } fn parse_pair(grammar_pair: PestPair<'_, $rule>) -> (Cow<'_, str>, Value<'_>) { if let <$rule>::pair = grammar_pair.as_rule() { let mut grammar_pair_innards = grammar_pair.into_inner(); let grammar_string = grammar_pair_innards.next().unwrap(); let key = parse_string(grammar_string); let grammar_value = grammar_pair_innards.next().unwrap(); let value = Value::from(grammar_value); (key, value) } else { unreachable!("Prevented by grammar"); } } fn parse_string(grammar_string: PestPair<'_, $rule>) -> Cow<'_, str> { match grammar_string.as_rule() { <$rule>::quoted_string => { let quoted_inner = grammar_string.into_inner().next().unwrap(); if $parse_escaped { parse_escaped_string(quoted_inner) } else { Cow::from(quoted_inner.as_str()) } } <$rule>::unquoted_string => { let s = grammar_string.as_str(); Cow::from(s) } _ => unreachable!("Prevented by grammar"), } } fn parse_escaped_string(inner: PestPair<'_, $rule>) -> Cow<'_, str> { let s = inner.as_str(); if s.contains('\\') { let mut escaped = String::with_capacity(s.len()); let mut it = s.chars(); while let Some(ch) = it.next() { if ch == '\\' { match it.next() { Some('n') => escaped.push('\n'), Some('r') => escaped.push('\r'), Some('t') => escaped.push('\t'), Some('\\') => escaped.push('\\'), Some('\"') => escaped.push('\"'), _ => unreachable!("Prevented by grammar"), } } else { escaped.push(ch) } } Cow::from(escaped) } else { Cow::from(s) } } impl<'a> From<PestPair<'a, $rule>> for Value<'a> { fn from(grammar_value: PestPair<'a, $rule>) -> Self { match grammar_value.as_rule() { <$rule>::quoted_string | <$rule>::unquoted_string => { Self::Str(parse_string(grammar_value)) } <$rule>::obj => { let mut obj = Obj::new(); for grammar_pair in grammar_value.into_inner() { let (key, value) = parse_pair(grammar_pair); let entry = obj.entry(key).or_default(); (*entry).push(value); } Self::Obj(obj) } _ => unreachable!("Prevented by grammar"), } } } }; } pub use escaped::{parse as escaped_parse, PestError as EscapedPestError}; pub use raw::{parse as raw_parse, PestError as RawPestError}; impl<'a> Vdf<'a> { pub fn parse(s: &'a str) -> Result<Self> { escaped_parse(s) } pub fn parse_raw(s: &'a str) -> Result<Self> { raw_parse(s) } } impl<'a> crate::Vdf<'a> { pub fn parse(s: &'a str) -> Result<Self> { Ok(crate::Vdf::from(Vdf::parse(s)?)) } pub fn parse_raw(s: &'a str) -> Result<Self> { Ok(crate::Vdf::from(Vdf::parse_raw(s)?)) } } mod escaped { use super::*; #[derive(Parser)] #[grammar = "grammars/escaped.pest"] struct EscapedParser; pub type PestError = pest::error::Error<Rule>; common_parsing!(EscapedParser, Rule, true); } mod raw { use super::*; #[derive(Parser)] #[grammar = "grammars/raw.pest"] struct RawParser; pub type PestError = pest::error::Error<Rule>; common_parsing!(RawParser, Rule, false); }
#![allow(renamed_and_removed_lints)] #![allow(clippy::unknown_clippy_lints)] #![allow(clippy::upper_case_acronyms)] use pest::{iterators::Pair as PestPair, Parser}; use pest_derive::Parser; use std::borrow::Cow; use crate::{error::Result, Obj, PartialVdf as Vdf, Value}; macro_rules! common_parsing { ($parser:ty, $rule:ty, $parse_escaped:expr) => { pub fn parse<'a>(s: &'a str) -> Result<Vdf<'a>> { let mut full_grammar = <$parser>::parse(<$rule>::vdf, s)?; let mut bases = Vec::new(); loop { let pair = full_grammar.next().unwrap(); if let <$rule>::base_macro = pair.as_rule() { let base_path_string = pair.into_inner().next().unwrap(); let base_path = match base_path_string.as_rule() { <$rule>::quoted_raw_string => base_path_string.into_inner().next().unwrap(), <$rule>::unquoted_string => base_path_string, _ => unreachable!("Prevented by grammar"), } .as_str(); bases.push(Cow::from(base_path)); } else { let (key, value) = parse_pair(pair); return Ok(Vdf { key, value, bases }); } } } fn parse_pair(grammar_pair: PestPair<'_, $rule>) -> (Cow<'_, str>, Value<'_>) { if let <$rule>::pair = grammar_pair.as_rule() { let mut grammar_pair_innards = grammar_pair.into_inner(); let grammar_string = grammar_pair_innards.next().unwrap(); let key = parse_string(grammar_string); let grammar_value = grammar_pair_innards.next().unwrap(); let value = Value::from(grammar_value); (key, value) } else { unreachable!("Prevented by grammar"); } } fn parse_string(grammar_string: PestPair<'_, $rule>) -> Cow<'_, str> { match grammar_string.as_rule() { <$rule>::quoted_string => { let quoted_inner = grammar_string.into_inner().next().unwrap(); if $parse_escaped { parse_escaped_string(quoted_inner) } else { Cow::from(quoted_inner.as_str()) } } <$rule>::unquoted_string => { let s = grammar_string.as_str(); Cow::from(s) } _ => unreachable!("Prevented by grammar"), } } fn parse_escaped_string(inner: PestPair<'_, $rule>) -> Cow<'_, str> { let s = inner.as_str(); if s.contains('\\') { let mut escaped = String::with_capacity(s.len()); let mut it = s.chars(); while let Some(ch) = it.next() { if ch == '\\' { match it.next() { Some('n') => escaped.push('\n'), Some('r') => escaped.
rser)] #[grammar = "grammars/escaped.pest"] struct EscapedParser; pub type PestError = pest::error::Error<Rule>; common_parsing!(EscapedParser, Rule, true); } mod raw { use super::*; #[derive(Parser)] #[grammar = "grammars/raw.pest"] struct RawParser; pub type PestError = pest::error::Error<Rule>; common_parsing!(RawParser, Rule, false); }
push('\r'), Some('t') => escaped.push('\t'), Some('\\') => escaped.push('\\'), Some('\"') => escaped.push('\"'), _ => unreachable!("Prevented by grammar"), } } else { escaped.push(ch) } } Cow::from(escaped) } else { Cow::from(s) } } impl<'a> From<PestPair<'a, $rule>> for Value<'a> { fn from(grammar_value: PestPair<'a, $rule>) -> Self { match grammar_value.as_rule() { <$rule>::quoted_string | <$rule>::unquoted_string => { Self::Str(parse_string(grammar_value)) } <$rule>::obj => { let mut obj = Obj::new(); for grammar_pair in grammar_value.into_inner() { let (key, value) = parse_pair(grammar_pair); let entry = obj.entry(key).or_default(); (*entry).push(value); } Self::Obj(obj) } _ => unreachable!("Prevented by grammar"), } } } }; } pub use escaped::{parse as escaped_parse, PestError as EscapedPestError}; pub use raw::{parse as raw_parse, PestError as RawPestError}; impl<'a> Vdf<'a> { pub fn parse(s: &'a str) -> Result<Self> { escaped_parse(s) } pub fn parse_raw(s: &'a str) -> Result<Self> { raw_parse(s) } } impl<'a> crate::Vdf<'a> { pub fn parse(s: &'a str) -> Result<Self> { Ok(crate::Vdf::from(Vdf::parse(s)?)) } pub fn parse_raw(s: &'a str) -> Result<Self> { Ok(crate::Vdf::from(Vdf::parse_raw(s)?)) } } mod escaped { use super::*; #[derive(Pa
random
[ { "content": "/// Serialize the `value` into an IO stream of VDF text with a custom top level VDF key\n\n///\n\n/// # Errors\n\n///\n\n/// This will return an error if the input can't be represented with valid VDF\n\npub fn to_writer_with_key<W, T>(writer: &mut W, value: &T, key: &str) -> Result<()>\n\nwhere\n\n W: Write,\n\n T: Serialize,\n\n{\n\n _to_writer(writer, value, Some(key))\n\n}\n\n\n", "file_path": "keyvalues-serde/src/ser.rs", "rank": 0, "score": 241623.04620438826 }, { "content": "/// Attempts to serialize some input to VDF text with a custom top level VDF key\n\n///\n\n/// # Errors\n\n///\n\n/// This will return an error if the input can't be represented with valid VDF\n\npub fn to_string_with_key<T>(value: &T, key: &str) -> Result<String>\n\nwhere\n\n T: Serialize,\n\n{\n\n let mut buffer = Vec::new();\n\n to_writer_with_key(&mut buffer, value, key)?;\n\n let s = String::from_utf8(buffer).expect(\"Input was all valid UTF-8\");\n\n\n\n Ok(s)\n\n}\n\n\n\n// Expands out a list of types to a list of `serialize_<type>` methods\n\nmacro_rules! serialize_types_as_str {\n\n ( $( $types:ty ),* ) => {\n\n paste! {\n\n $(\n\n fn [<serialize_ $types >](self, v: $types) -> Result<()> {\n\n self.serialize_str(&v.to_string())\n\n }\n\n )*\n", "file_path": "keyvalues-serde/src/ser.rs", "rank": 1, "score": 214834.00894697563 }, { "content": "// Serialization process goes as follows:\n\n// value: &T\n\n// -> NaiveTokenStream\n\n// -> Vdf (fails on invalid VDF structure like nested sequences)\n\n// -> Formatted\n\n// Which is a bit of a long-winded process just to serialize some text, but it comes with\n\n// validation (NaiveTokenStream -> Vdf) and reuses portions from the parser (Vdf -> Formatted)\n\npub fn _to_writer<W, T>(writer: &mut W, value: &T, maybe_key: Option<&str>) -> Result<()>\n\nwhere\n\n W: Write,\n\n T: Serialize,\n\n{\n\n let mut serializer = Serializer::new();\n\n value.serialize(&mut serializer)?;\n\n\n\n if let Some(key) = maybe_key {\n\n match serializer.tokens.get(0) {\n\n // Replace the old key\n\n Some(NaiveToken::Str(_old_key)) => {\n\n serializer.tokens[0] = NaiveToken::Str(key.to_owned());\n\n }\n\n // Push on the key\n\n Some(_) => {\n\n serializer.tokens.insert(0, NaiveToken::Str(key.to_owned()));\n\n }\n\n None => {}\n\n }\n\n }\n\n\n\n let vdf = Vdf::try_from(&serializer.tokens)?;\n\n write!(writer, \"{}\", vdf)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "keyvalues-serde/src/ser.rs", "rank": 2, "score": 210304.788164485 }, { "content": "pub fn render_throughput(c: &mut Criterion) {\n\n let vdf_text = read_app_info().unwrap();\n\n let vdf = Vdf::parse(&vdf_text).unwrap();\n\n\n\n let mut group = c.benchmark_group(\"render throughput\");\n\n group.throughput(Throughput::Bytes(vdf_text.len() as u64));\n\n group.bench_function(\"render\", |b| b.iter(|| vdf.to_string()));\n\n group.finish();\n\n}\n\n\n\ncriterion_group!(throughput, parse_throughput, render_throughput);\n\ncriterion_main!(throughput);\n", "file_path": "keyvalues-parser/benches/parser.rs", "rank": 3, "score": 189557.19809286486 }, { "content": "pub fn parse_throughput(c: &mut Criterion) {\n\n let vdf_text = read_app_info().unwrap();\n\n\n\n let mut group = c.benchmark_group(\"parse throughput\");\n\n group.throughput(Throughput::Bytes(vdf_text.len() as u64));\n\n group.bench_function(\"parse\", |b| b.iter(|| Vdf::parse(black_box(&vdf_text))));\n\n group.finish();\n\n}\n\n\n", "file_path": "keyvalues-parser/benches/parser.rs", "rank": 4, "score": 189557.19809286486 }, { "content": "/// The same as [`from_str()`][from_str], but also returns the top level VDF key\n\npub fn from_str_with_key<'a, T: Deserialize<'a>>(s: &'a str) -> Result<(T, Key<'a>)> {\n\n let (mut deserializer, key) = Deserializer::new_with_key(s)?;\n\n let t = T::deserialize(&mut deserializer)?;\n\n\n\n if deserializer.is_empty() {\n\n Ok((t, key))\n\n } else {\n\n Err(Error::TrailingTokens)\n\n }\n\n}\n\n\n\n/// The struct that handles deserializing VDF into Rust structs\n\n///\n\n/// This typically doesn't need to be invoked directly when [`from_str()`][from_str] and\n\n/// [`from_str_with_key()`][from_str_with_key] can be used instead\n\n#[derive(Debug)]\n\npub struct Deserializer<'de> {\n\n tokens: Peekable<IntoIter<Token<'de>>>,\n\n}\n\n\n", "file_path": "keyvalues-serde/src/de/mod.rs", "rank": 5, "score": 185919.41191467436 }, { "content": "type ObjInnerPair<'a> = (Key<'a>, Vec<Value<'a>>);\n\n\n\n#[cfg_attr(test, derive(serde::Deserialize, serde::Serialize))]\n\n#[derive(Debug, Clone, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct Obj<'a>(ObjInner<'a>);\n\n\n\nimpl<'a> Obj<'a> {\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n\n\n pub fn into_inner(self) -> ObjInner<'a> {\n\n self.0\n\n }\n\n\n\n pub fn into_vdfs(self) -> IntoVdfs<'a> {\n\n IntoVdfs::new(self)\n\n }\n\n}\n\n\n", "file_path": "keyvalues-parser/src/lib.rs", "rank": 6, "score": 170445.7039772077 }, { "content": "fn get_version<'a>(controller_mappings: &'a Vdf<'a>) -> Option<&'a str> {\n\n controller_mappings\n\n .value\n\n .get_obj()?\n\n .get(\"version\")?\n\n .get(0)?\n\n .get_str()\n\n}\n\n\n", "file_path": "keyvalues-parser/examples/parse_mutate_render.rs", "rank": 7, "score": 160441.3803755931 }, { "content": "#[allow(dead_code)]\n\npub fn test_vdf_deserialization<'a, T>(vdf_text: &'a str, ideal_val: &T) -> BoxedResult<()>\n\nwhere\n\n T: fmt::Debug + PartialEq + Deserialize<'a>,\n\n{\n\n let deserialized_val: T = from_str(&vdf_text)?;\n\n assert_eq!(&deserialized_val, ideal_val, \"Failed deserializing\");\n\n Ok(())\n\n}\n\n\n\n// I'm too tired to be able to wrap my head around why just this one function is causing trouble\n", "file_path": "keyvalues-serde/tests/utils.rs", "rank": 8, "score": 159300.9789415183 }, { "content": "/// Serialize the `value` into an IO stream of VDF text\n\n///\n\n/// # Errors\n\n///\n\n/// This will return an error if the input can't be represented with valid VDF\n\npub fn to_writer<W, T>(writer: &mut W, value: &T) -> Result<()>\n\nwhere\n\n W: Write,\n\n T: Serialize,\n\n{\n\n _to_writer(writer, value, None)\n\n}\n\n\n", "file_path": "keyvalues-serde/src/ser.rs", "rank": 9, "score": 157997.29482493227 }, { "content": "#[allow(dead_code)]\n\npub fn test_vdf_serialization<T>(ideal_text: &str, val: &T) -> BoxedResult<()>\n\nwhere\n\n T: fmt::Debug + PartialEq + Serialize,\n\n{\n\n let val_text = to_string(val)?;\n\n assert_eq!(ideal_text, val_text, \"Failed serializing\");\n\n Ok(())\n\n}\n", "file_path": "keyvalues-serde/tests/utils.rs", "rank": 10, "score": 156587.15374208754 }, { "content": "fn write_str(writer: &mut impl Write, s: &str, render_type: RenderType) -> fmt::Result {\n\n writer.write_char('\"')?;\n\n\n\n match render_type {\n\n RenderType::Escaped => {\n\n for c in s.chars() {\n\n match c {\n\n '\\n' => writer.write_str(r\"\\n\"),\n\n '\\r' => writer.write_str(r\"\\r\"),\n\n '\\t' => writer.write_str(r\"\\t\"),\n\n '\\\"' => writer.write_str(r#\"\\\"\"#),\n\n '\\\\' => writer.write_str(r\"\\\\\"),\n\n reg => writer.write_char(reg),\n\n }?\n\n }\n\n }\n\n RenderType::Raw => writer.write_str(s)?,\n\n }\n\n\n\n writer.write_char('\"')\n\n}\n\n\n", "file_path": "keyvalues-parser/src/text/render.rs", "rank": 11, "score": 154673.9123813709 }, { "content": "// It doesn't really make sense to reserialize just the extracted content\n\npub fn ser(c: &mut Criterion) {\n\n let vdf_text = read_app_info().unwrap();\n\n let app_info_all: types::AppInfo = from_str_helper(&vdf_text);\n\n let ser_len = to_string_helper::<types::AppInfo>(&app_info_all).len();\n\n\n\n let mut group = c.benchmark_group(\"ser\");\n\n group.throughput(Throughput::Bytes(ser_len as u64));\n\n group.bench_function(\"all\", |b| {\n\n b.iter(|| to_string_helper::<types::AppInfo>(&app_info_all))\n\n });\n\n group.finish();\n\n}\n\n\n\ncriterion_group!(throughput, de, ser);\n\ncriterion_main!(throughput);\n", "file_path": "keyvalues-serde/benches/ser_de.rs", "rank": 12, "score": 152476.540946181 }, { "content": "pub fn de(c: &mut Criterion) {\n\n let vdf_text = read_app_info().unwrap();\n\n\n\n let mut group = c.benchmark_group(\"de\");\n\n group.throughput(Throughput::Bytes(vdf_text.len() as u64));\n\n group.bench_function(\"all owned\", |b| {\n\n b.iter(|| from_str_helper::<types::AppInfo>(&vdf_text))\n\n });\n\n group.bench_function(\"all borrowed\", |b| {\n\n b.iter(|| from_str_helper::<types::AppInfoBorrow>(&vdf_text))\n\n });\n\n group.bench_function(\"extract single\", |b| {\n\n b.iter(|| from_str_helper::<types::AppInfoExtract>(&vdf_text))\n\n });\n\n group.finish();\n\n}\n\n\n", "file_path": "keyvalues-serde/benches/ser_de.rs", "rank": 13, "score": 152476.540946181 }, { "content": "fn update_version<'text>(controller_mappings: &mut Vdf<'text>, new_version: String) -> Option<()> {\n\n let version = controller_mappings\n\n .value\n\n .get_mut_obj()?\n\n .get_mut(\"version\")?\n\n .get_mut(0)?\n\n .get_mut_str()?\n\n .to_mut();\n\n\n\n *version = new_version;\n\n\n\n Some(())\n\n}\n\n\n", "file_path": "keyvalues-parser/examples/parse_mutate_render.rs", "rank": 14, "score": 150708.72013043432 }, { "content": "pub fn from_reader_with_key<R: Read, T: DeserializeOwned>(mut rdr: R) -> Result<(T, String)> {\n\n let mut buffer = String::new();\n\n rdr.read_to_string(&mut buffer)?;\n\n\n\n from_str_with_key(&buffer).map(|(t, key)| (t, key.into_owned()))\n\n}\n\n\n", "file_path": "keyvalues-serde/src/de/mod.rs", "rank": 15, "score": 143780.45826795453 }, { "content": "#[test]\n\nfn invalid_vdf_obj_key() {\n\n let naive_token_stream = NaiveTokenStream(vec![\n\n NaiveToken::str(\"outer\"),\n\n NaiveToken::ObjBegin,\n\n NaiveToken::ObjBegin,\n\n NaiveToken::ObjEnd,\n\n NaiveToken::ObjEnd,\n\n ]);\n\n\n\n assert!(Vdf::try_from(&naive_token_stream).is_err());\n\n}\n\n\n", "file_path": "keyvalues-serde/src/tokens/tests.rs", "rank": 16, "score": 142776.04338785654 }, { "content": "/// Attempts to deserialize a string of VDF text to some type T\n\npub fn from_str<'a, T: Deserialize<'a>>(s: &'a str) -> Result<T> {\n\n let vals = from_str_with_key(s)?;\n\n Ok(vals.0)\n\n}\n\n\n", "file_path": "keyvalues-serde/src/de/mod.rs", "rank": 17, "score": 139771.1930106164 }, { "content": "pub fn read_asset_file(file_name: &str) -> BoxedResult<String> {\n\n let val = fs::read_to_string(Path::new(\"tests\").join(\"assets\").join(file_name))?;\n\n Ok(val)\n\n}\n\n\n", "file_path": "keyvalues-serde/tests/utils.rs", "rank": 18, "score": 138755.23957387335 }, { "content": "type ObjInner<'a> = BTreeMap<Key<'a>, Vec<Value<'a>>>;\n", "file_path": "keyvalues-parser/src/lib.rs", "rank": 19, "score": 133845.84224499468 }, { "content": "/// Attempts to serialize some input to VDF text\n\n///\n\n/// # Errors\n\n///\n\n/// This will return an error if the input can't be represented with valid VDF\n\npub fn to_string<T>(value: &T) -> Result<String>\n\nwhere\n\n T: Serialize,\n\n{\n\n let mut buffer = Vec::new();\n\n to_writer(&mut buffer, value)?;\n\n let s = String::from_utf8(buffer).expect(\"Input was all valid UTF-8\");\n\n\n\n Ok(s)\n\n}\n\n\n", "file_path": "keyvalues-serde/src/ser.rs", "rank": 20, "score": 122181.94776409576 }, { "content": "// Mimics the behavior of the parse fuzzer test for regressions testing\n\nfn parse_fuzz_test(file_name: &str) -> BoxedResult<()> {\n\n let crash_file = Path::new(\"tests\").join(\"crash_outputs\").join(file_name);\n\n let contents = fs::read_to_string(crash_file)?;\n\n\n\n // This should be infallible unless the grammar changes in which case the test is no longer\n\n // valid\n\n let parsed = Vdf::parse(&contents).expect(\"Input has to be valid here\");\n\n let vdf_text = parsed.to_string();\n\n let reparsed = Vdf::parse(&vdf_text)?;\n\n assert_eq!(parsed, reparsed);\n\n\n\n Ok(())\n\n}\n\n\n\n// Generates a tests for each `name` that indicates both the test name and file name\n\nmacro_rules! parse_fuzzer_crash_infer_files {\n\n ( $( $name:ident ),* ) => {\n\n $(\n\n #[test]\n\n fn $name() -> BoxedResult<()> {\n\n parse_fuzz_test(stringify!($name))\n\n }\n\n )*\n\n };\n\n}\n\n\n\nparse_fuzzer_crash_infer_files!(crash_1, crash_2, crash_3);\n", "file_path": "keyvalues-parser/tests/fuzzer_crashes.rs", "rank": 21, "score": 121851.83001648431 }, { "content": "fn find_invalid_raw_char(s: &str) -> Option<char> {\n\n s.chars().find(|&c| c == '\"').to_owned()\n\n}\n\n\n", "file_path": "keyvalues-parser/src/text/render.rs", "rank": 22, "score": 121571.17591043272 }, { "content": "// Snapshots both parsing and re-rendering the text from a file\n\nfn snapshot_test_parse_and_render(file_name: &str) -> BoxedResult<()> {\n\n let vdf_text = read_asset_file(file_name)?;\n\n let vdf = Vdf::parse(&vdf_text)?;\n\n assert_ron_snapshot!(vdf);\n\n\n\n let rendered = vdf.to_string();\n\n assert_snapshot!(rendered);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "keyvalues-parser/src/text/tests.rs", "rank": 23, "score": 120127.43055970175 }, { "content": "fn snapshot_test_raw_parse_render(file_name: &str) -> BoxedResult<()> {\n\n let vdf_text = read_asset_file(file_name)?;\n\n let vdf = Vdf::parse_raw(&vdf_text)?;\n\n assert_ron_snapshot!(vdf);\n\n\n\n let mut buf = String::new();\n\n vdf.render_raw(&mut buf)?;\n\n assert_snapshot!(buf);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "keyvalues-parser/src/text/tests.rs", "rank": 24, "score": 118474.47320104839 }, { "content": "// Snapshots both parsing and re-rendering the text from a file\n\nfn snapshot_test_partial_parse_and_render(file_name: &str) -> BoxedResult<()> {\n\n let vdf_text = read_asset_file(file_name)?;\n\n let vdf = PartialVdf::parse(&vdf_text)?;\n\n assert_ron_snapshot!(vdf);\n\n\n\n let rendered = vdf.to_string();\n\n assert_snapshot!(rendered);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "keyvalues-parser/src/text/tests.rs", "rank": 25, "score": 118474.47320104839 }, { "content": "fn read_asset_file(file_name: &str) -> BoxedResult<String> {\n\n let val = fs::read_to_string(Path::new(\"tests\").join(\"assets\").join(file_name))?;\n\n Ok(val)\n\n}\n\n\n", "file_path": "keyvalues-parser/src/text/tests.rs", "rank": 26, "score": 118046.20092137693 }, { "content": "fn snapshot_test_partial_raw_parse_render(file_name: &str) -> BoxedResult<()> {\n\n let vdf_text = read_asset_file(file_name)?;\n\n let vdf = PartialVdf::parse_raw(&vdf_text)?;\n\n assert_ron_snapshot!(vdf);\n\n\n\n let mut buf = String::new();\n\n vdf.render_raw(&mut buf)?;\n\n assert_snapshot!(buf);\n\n\n\n Ok(())\n\n}\n\n\n\n// Generates tests where the `name`s indicate the unit test name and the file without an extension\n\nmacro_rules! parse_test_generator {\n\n ( $test_type:ident, $( $name:ident ),* ) => {\n\n $(\n\n #[test]\n\n fn $name() -> BoxedResult<()> {\n\n ($test_type)(&format!(\"{}.vdf\", stringify!($name)))\n\n }\n", "file_path": "keyvalues-parser/src/text/tests.rs", "rank": 27, "score": 116888.60826102665 }, { "content": "fn write_pair<'a>(\n\n writer: &mut impl Write,\n\n num_indents: usize,\n\n key: &str,\n\n value: &Value<'a>,\n\n render_type: RenderType,\n\n) -> fmt::Result {\n\n // Write the indented key\n\n writer.write_str(&multiple_char('\\t', num_indents))?;\n\n write_str(writer, key, render_type)?;\n\n\n\n // Followed by the value\n\n if value.is_str() {\n\n writer.write_char('\\t')?;\n\n } else {\n\n writer.write_char('\\n')?;\n\n }\n\n value.write_indented(writer, num_indents, render_type)?;\n\n\n\n writer.write_char('\\n')\n\n}\n\n\n", "file_path": "keyvalues-parser/src/text/render.rs", "rank": 28, "score": 113647.31478538223 }, { "content": "fn write_obj<'a>(\n\n writer: &mut impl Write,\n\n num_indents: usize,\n\n obj: &Obj<'a>,\n\n render_type: RenderType,\n\n) -> fmt::Result {\n\n for (key, values) in obj.iter() {\n\n for value in values {\n\n write_pair(writer, num_indents, key, value, render_type)?;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\nimpl<'a> fmt::Display for PartialVdf<'a> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n self._render(f, RenderType::Raw)\n\n }\n\n}\n", "file_path": "keyvalues-parser/src/text/render.rs", "rank": 29, "score": 113516.62473463849 }, { "content": "#[test]\n\nfn invalid_vdf_seq_key() {\n\n let naive_token_stream = NaiveTokenStream(vec![\n\n NaiveToken::str(\"outer\"),\n\n NaiveToken::ObjBegin,\n\n NaiveToken::SeqBegin,\n\n NaiveToken::SeqEnd,\n\n NaiveToken::ObjEnd,\n\n ]);\n\n\n\n assert!(Vdf::try_from(&naive_token_stream).is_err());\n\n}\n\n\n", "file_path": "keyvalues-serde/src/tokens/tests.rs", "rank": 30, "score": 111778.57432554619 }, { "content": "fn read_asset_file(file_name: &str) -> std::io::Result<String> {\n\n let asset_path = Path::new(env!(\"CARGO_MANIFEST_DIR\"))\n\n .join(\"examples\")\n\n .join(file_name);\n\n fs::read_to_string(asset_path)\n\n}\n\n\n", "file_path": "keyvalues-parser/examples/parse_mutate_render.rs", "rank": 31, "score": 111229.0102989245 }, { "content": "#[test]\n\nfn str_when_wanting_obj() -> BoxedResult<()> {\n\n let vdf_text = read_asset_file(\"string_container.vdf\")?;\n\n let result: Result<Container<HashMap<String, String>>> = from_str(&vdf_text);\n\n assert!(result.is_err());\n\n Ok(())\n\n}\n\n\n", "file_path": "keyvalues-serde/tests/malformed.rs", "rank": 32, "score": 110563.7379351744 }, { "content": "#[test]\n\nfn obj_when_wanting_str() -> BoxedResult<()> {\n\n let vdf_text = read_asset_file(\"obj_container.vdf\")?;\n\n let result: Result<Container<String>> = from_str(&vdf_text);\n\n assert!(result.is_err());\n\n Ok(())\n\n}\n\n\n", "file_path": "keyvalues-serde/tests/malformed.rs", "rank": 33, "score": 110563.7379351744 }, { "content": "#[test]\n\nfn str_when_wanting_top_level_obj() -> BoxedResult<()> {\n\n let vdf_text = read_asset_file(\"top_level_string.vdf\")?;\n\n let result: Result<Container<String>> = from_str(&vdf_text);\n\n assert!(result.is_err());\n\n Ok(())\n\n}\n\n\n", "file_path": "keyvalues-serde/tests/malformed.rs", "rank": 34, "score": 106446.69274140101 }, { "content": "fn from_str_helper<'de, T>(s: &'de str) -> T\n\nwhere\n\n T: Deserialize<'de>,\n\n{\n\n from_str(black_box(s)).unwrap()\n\n}\n\n\n", "file_path": "keyvalues-serde/benches/ser_de.rs", "rank": 35, "score": 99570.32226536826 }, { "content": "fn read_asset_file(file_name: &str) -> std::io::Result<String> {\n\n let asset_path = Path::new(env!(\"CARGO_MANIFEST_DIR\"))\n\n .join(\"examples\")\n\n .join(file_name);\n\n fs::read_to_string(asset_path)\n\n}\n\n\n", "file_path": "keyvalues-serde/examples/controller_mappings.rs", "rank": 36, "score": 87339.51742506276 }, { "content": "pub fn from_reader<R: Read, T: DeserializeOwned>(rdr: R) -> Result<T> {\n\n from_reader_with_key(rdr).map(|(t, _)| t)\n\n}\n\n\n", "file_path": "keyvalues-serde/src/de/mod.rs", "rank": 37, "score": 82428.2586414847 }, { "content": "#[test]\n\nfn token_stream_from_vdf() {\n\n let s = r#\"\n\n\"Outer Key\"\n\n{\n\n \"Inner Key\" \"Inner Value\"\n\n \"Inner Key\"\n\n {\n\n }\n\n}\n\n \"#;\n\n let vdf = Vdf::parse(s).unwrap();\n\n let token_stream = TokenStream::from(vdf);\n\n assert_eq!(\n\n token_stream,\n\n TokenStream(vec![\n\n Token::Key(Cow::from(\"Outer Key\")),\n\n Token::ObjBegin,\n\n Token::Key(Cow::from(\"Inner Key\")),\n\n Token::SeqBegin,\n\n Token::Str(Cow::from(\"Inner Value\")),\n\n Token::ObjBegin,\n\n Token::ObjEnd,\n\n Token::SeqEnd,\n\n Token::ObjEnd,\n\n ])\n\n );\n\n}\n", "file_path": "keyvalues-serde/src/tokens/tests.rs", "rank": 38, "score": 82313.39127230697 }, { "content": "#[test]\n\nfn complex_vdfs_iteration() {\n\n let key1 = Cow::from(\"key1\");\n\n let key4 = Cow::from(\"key4\");\n\n let val1 = Value::Str(Cow::from(\"val1\"));\n\n let val2 = Value::Str(Cow::from(\"val2\"));\n\n let empty_obj = Value::Obj(Obj::new());\n\n\n\n let pairs = vec![\n\n (key1.clone(), vec![val1.clone(), val2.clone()]),\n\n (Cow::from(\"key2\"), Vec::new()),\n\n (Cow::from(\"key3\"), Vec::new()),\n\n (key4.clone(), vec![empty_obj.clone()]),\n\n ];\n\n\n\n let obj: Obj = pairs.into_iter().collect();\n\n let vdfs: Vec<_> = obj.into_vdfs().collect();\n\n\n\n assert_eq!(\n\n vdfs,\n\n vec![\n\n Vdf::new(key1.clone(), val1),\n\n Vdf::new(key1, val2),\n\n Vdf::new(key4, empty_obj),\n\n ]\n\n );\n\n}\n", "file_path": "keyvalues-parser/src/tests.rs", "rank": 39, "score": 82242.87589848746 }, { "content": "#[test]\n\nfn simple_vdfs_iteration() {\n\n let inner = BTreeMap::new();\n\n let obj = Obj(inner);\n\n let mut vdfs_iter = obj.into_vdfs();\n\n\n\n assert_eq!(vdfs_iter.next(), None);\n\n}\n\n\n", "file_path": "keyvalues-parser/src/tests.rs", "rank": 40, "score": 82242.87589848746 }, { "content": "#[test]\n\nfn invalid_vdf_nested_seq() {\n\n let naive_token_stream = NaiveTokenStream(vec![\n\n NaiveToken::str(\"outer\"),\n\n NaiveToken::ObjBegin,\n\n NaiveToken::str(\"nested sequence\"),\n\n NaiveToken::SeqBegin,\n\n NaiveToken::str(\"the calm before the storm\"),\n\n NaiveToken::SeqBegin,\n\n NaiveToken::SeqEnd,\n\n NaiveToken::SeqEnd,\n\n NaiveToken::ObjEnd,\n\n ]);\n\n\n\n assert!(Vdf::try_from(&naive_token_stream).is_err());\n\n}\n\n\n", "file_path": "keyvalues-serde/src/tokens/tests.rs", "rank": 41, "score": 80855.62382752352 }, { "content": "#[test]\n\nfn vdf_from_token_stream_basics() {\n\n let naive_token_stream = NaiveTokenStream(vec![\n\n NaiveToken::str(\"outer\"),\n\n NaiveToken::ObjBegin,\n\n NaiveToken::str(\"sequence start\"),\n\n NaiveToken::SeqBegin,\n\n NaiveToken::ObjBegin,\n\n NaiveToken::str(\"inner key\"),\n\n NaiveToken::str(\"inner val\"),\n\n NaiveToken::ObjEnd,\n\n NaiveToken::str(\"some other inner val\"),\n\n NaiveToken::SeqEnd,\n\n NaiveToken::ObjEnd,\n\n ]);\n\n\n\n let ideal = {\n\n let mut sequence_obj = Obj::new();\n\n sequence_obj.insert(\n\n Cow::from(\"inner key\"),\n\n vec![Value::Str(Cow::from(\"inner val\"))],\n", "file_path": "keyvalues-serde/src/tokens/tests.rs", "rank": 42, "score": 80855.62382752352 }, { "content": "#[test]\n\nfn borrowed_escaped_string() -> BoxedResult<()> {\n\n let vdf_text = read_asset_file(\"escaped_string.vdf\")?;\n\n let vdf: Container<Cow<str>> = from_str(&vdf_text)?;\n\n\n\n assert_eq!(vdf, Container::new(Cow::from(\"tab\\tseparated\")));\n\n Ok(())\n\n}\n\n\n", "file_path": "keyvalues-serde/tests/special_cases.rs", "rank": 43, "score": 78722.91663163912 }, { "content": "fn read_app_info() -> Result<String, std::io::Error> {\n\n let vdf_path = Path::new(env!(\"CARGO_MANIFEST_DIR\"))\n\n .join(\"tests\")\n\n .join(\"assets\")\n\n .join(\"app_info.vdf\");\n\n fs::read_to_string(vdf_path)\n\n}\n\n\n", "file_path": "keyvalues-parser/benches/parser.rs", "rank": 44, "score": 78681.83393562782 }, { "content": "#[test]\n\nfn check_deserialization_key() -> BoxedResult<()> {\n\n let vdf_text = read_asset_file(\"hashmap_top_level.vdf\")?;\n\n let (_, key): (HashMap<u64, String>, Cow<str>) = from_str_with_key(&vdf_text)?;\n\n\n\n assert_eq!(key, \"Key\", \"Incorrect deserialization key\");\n\n Ok(())\n\n}\n\n\n", "file_path": "keyvalues-serde/tests/special_cases.rs", "rank": 45, "score": 78501.99173598255 }, { "content": "fn multiple_char(c: char, amount: usize) -> String {\n\n std::iter::repeat(c).take(amount).collect()\n\n}\n\n\n", "file_path": "keyvalues-parser/src/text/render.rs", "rank": 46, "score": 71605.89468791265 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let Args { vdf_file } = Args::parse();\n\n\n\n let contents = std::fs::read_to_string(vdf_file)?;\n\n match Vdf::parse(&contents) {\n\n Ok(contents) => {\n\n println!(\"{:#?}\", contents);\n\n }\n\n Err(err) => {\n\n eprintln!(\"Failed parsing with escaped characters: {:#?}\", err);\n\n\n\n match Vdf::parse_with_opts(\n\n &contents,\n\n Opts {\n\n parse_escaped_characters: false,\n\n },\n\n ) {\n\n Ok(contents) => {\n\n println!(\"{:#?}\", contents);\n\n }\n", "file_path": "parse-vdf-file/src/main.rs", "rank": 47, "score": 71062.77359790239 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let vdf_text = read_asset_file(\"parse_mutate_render.vdf\")?;\n\n let mut controller_mappings = Vdf::parse(&vdf_text)?;\n\n\n\n // Reading information from VDF:\n\n // This involves a lot of `Option`s so it's moved inside a function\n\n let version = get_version(&controller_mappings).expect(\"controller_mappings must have version\");\n\n println!(\"Old Controller Mappings Version: {}\", version);\n\n\n\n // Mutating information from VDF:\n\n // Updating the version\n\n update_version(&mut controller_mappings, \"3\".to_string())\n\n .expect(\"controller_mappings must have version\");\n\n\n\n // Render the VDF:\n\n // `Vdf` implements `Display` which also provides `.to_string()`\n\n println!(\"Updated Controller Mappings:\\n{}\", controller_mappings);\n\n assert_eq!(get_version(&controller_mappings), Some(\"3\"));\n\n\n\n Ok(())\n\n}\n", "file_path": "keyvalues-parser/examples/parse_mutate_render.rs", "rank": 48, "score": 68455.55162573911 }, { "content": "# _keyvalues-parser_\n\n\n\n`keyvalues-parser` uses [`pest`](https://lib.rs/crates/pest) to parse\n\n[VDF text v1 and v2](https://developer.valvesoftware.com/wiki/KeyValues)\n\nfiles to an untyped Rust structure to ease manipulation and navigation. The\n\nparser provides an untyped `Vdf` representation as well as a linear\n\n`TokenStream`\n\n\n\nThe library is primarily used in conjunction with\n\n[`keyvalues-serde`](https://github.com/LovecraftianHorror/vdf-rs/tree/main/keyvalues-serde)\n\nwhich provides a more ergonommic (yet more limiting) means of dealing with VDF\n\ntext\n\n\n\n## Installation\n\n\n\n**Note: this requires at least Rust `1.42.0`**\n\n\n\nJust add the library to your `Cargo.toml`\n\n\n\n```toml\n\n[dependencies]\n\nkeyvalues-parser = \"0.1.0\"\n\n```\n\n\n\n## Usage\n\n\n\n<!-- TODO: just use a badge for this. The link can very easily fall out of date -->\n\nThere is documentation available\n\n[here](https://docs.rs/keyvalues-parser/0.1.0/keyvalues_parser/) and there are\n\nexamples available in the\n\n[examples directory](https://github.com/LovecraftianHorror/vdf-rs/tree/main/keyvalues-parser/examples)\n\n\n\n### Quickstart\n\n\n\n`loginusers.vdf`\n\n\n\n```vdf\n\n\"users\"\n\n{\n\n \"12345678901234567\"\n\n {\n\n \"AccountName\" \"ACCOUNT_NAME\"\n\n \"PersonaName\" \"PERSONA_NAME\"\n\n \"RememberPassword\" \"1\"\n\n \"MostRecent\" \"1\"\n\n \"Timestamp\" \"1234567890\"\n\n }\n\n}\n\n```\n\n\n\n`main.rs`\n\n\n\n```rust\n\nuse keyvalues_parser::Vdf;\n\n\n\nfn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let vdf_text = std::fs::read_to_string(\"loginusers.vdf\")?;\n\n let vdf = Vdf::parse(&vdf_text)?;\n\n assert_eq!(Some(\"12345678901234567\"), vdf.value.keys().next());\n\n\n\n Ok(())\n\n}\n\n```\n\n\n\n## Limitations\n\n\n\nVDF text is drastically underspecified. This leads to the following liberties\n\nbeing taken\n\n\n\n- Not respecting the ordering of key-value pairs, where the pairs are stored in a `BTreeMap` that sorts the values based on the key\n\n- Because of limitations in representing sequences, an empty `Vec` of values will be rendered as a missing keyvalue pair\n\n\n", "file_path": "keyvalues-parser/README.md", "rank": 49, "score": 51247.648908880954 }, { "content": "## Benchmarks\n\n\n\nA set of basic benchmarks can be found in the \n\n[benches directory](https://github.com/LovecraftianHorror/vdf-rs/tree/main/keyvalues-parser/benches)\n\n\n\nThese just test timing and throughput for both parsing and rendering of a\n\nfairly typical VDF file\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n - Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)\n\n - MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\n### Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted\n\nfor inclusion in the work by you, as defined in the Apache-2.0 license, shall\n\nbe dual licensed as above, without any additional terms or conditions.\n", "file_path": "keyvalues-parser/README.md", "rank": 50, "score": 51229.855200462705 }, { "content": "#[test]\n\nfn non_finite_float_serialization_failure() {\n\n let vdf = Container::new(std::f32::NAN);\n\n if let Err(Error::NonFiniteFloat(f)) = to_string(&vdf) {\n\n assert!(f.is_nan());\n\n } else {\n\n panic!(\"Serialization should fail with NaN float\");\n\n }\n\n}\n\n\n", "file_path": "keyvalues-serde/tests/special_cases.rs", "rank": 51, "score": 49556.67314407679 }, { "content": "#[test]\n\nfn wants_too_many_members() -> BoxedResult<()> {\n\n let vdf_text = read_asset_file(\"string_container.vdf\")?;\n\n let result: Result<Pair> = from_str(&vdf_text);\n\n assert!(result.is_err());\n\n Ok(())\n\n}\n", "file_path": "keyvalues-serde/tests/malformed.rs", "rank": 52, "score": 48976.526965528654 }, { "content": "#[test]\n\nfn extract_only_some_members() -> BoxedResult<()> {\n\n let vdf_text = read_asset_file(\"multiple_members.vdf\")?;\n\n let vdf: Container<String> = from_str(&vdf_text)?;\n\n\n\n assert_eq!(vdf, Container::new(String::from(\"Value\")));\n\n Ok(())\n\n}\n\n\n", "file_path": "keyvalues-serde/tests/special_cases.rs", "rank": 53, "score": 48976.526965528654 }, { "content": "#[test]\n\nfn incorrect_seq_length() -> BoxedResult<()> {\n\n let vdf_len_one = read_asset_file(\"string_container.vdf\")?;\n\n let len_two: Result<Container<(String, String)>> = from_str(&vdf_len_one);\n\n assert!(len_two.is_err());\n\n\n\n let vdf_len_two = read_asset_file(\"sequence_string_double.vdf\")?;\n\n let len_one: Result<Container<(String,)>> = from_str(&vdf_len_two);\n\n assert!(len_one.is_err());\n\n let len_three: Result<Container<(String, String, String)>> = from_str(&vdf_len_two);\n\n assert!(len_three.is_err());\n\n Ok(())\n\n}\n\n\n\n#[derive(Deserialize, Debug)]\n\npub struct Pair {\n\n pub first: String,\n\n pub second: String,\n\n}\n\n\n", "file_path": "keyvalues-serde/tests/malformed.rs", "rank": 54, "score": 48976.526965528654 }, { "content": "#[test]\n\nfn hashmap_top_level() -> BoxedResult<()> {\n\n let val = hashmap! {\n\n 0 => \"Foo\",\n\n 1 => \"Bar\",\n\n 2 => \"Baz\",\n\n };\n\n let vdf_text = read_asset_file(\"hashmap_top_level.vdf\")?;\n\n test_vdf_deserialization(&vdf_text, &val)?;\n\n\n\n // Using a hashmap on the top level has no way of indicating what the key should be so it must\n\n // be passed in separately\n\n let val_text = to_string_with_key(&val, \"Key\")?;\n\n assert_eq!(vdf_text, val_text, \"Failed serializing\");\n\n Ok(())\n\n}\n\n\n\n// Deserialization throws away the top level key, so `from_str_with_key` is needed to read it\n", "file_path": "keyvalues-serde/tests/special_cases.rs", "rank": 55, "score": 48293.62551041589 }, { "content": "#[test]\n\nfn snapshot_writing_to_file() -> BoxedResult<()> {\n\n let vdf_struct = Container::new(123);\n\n let dir = TempDir::new(\"keyvalues-serde\")?;\n\n let file_path = dir.path().join(\"sample.vdf\");\n\n\n\n // Write a vdf to a file then verify it's correct\n\n let mut file = File::create(&file_path)?;\n\n to_writer(&mut file, &vdf_struct)?;\n\n let vdf_text = fs::read_to_string(&file_path)?;\n\n assert_snapshot!(vdf_text);\n\n\n\n // And the same with a custom key\n\n let mut file = File::create(&file_path)?;\n\n to_writer_with_key(&mut file, &vdf_struct, \"Custom\")?;\n\n let vdf_text = fs::read_to_string(&file_path)?;\n\n assert_snapshot!(vdf_text);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "keyvalues-serde/tests/special_cases.rs", "rank": 56, "score": 48293.62551041589 }, { "content": "#[test]\n\nfn borrowed_string_is_borrowed() -> BoxedResult<()> {\n\n let vdf_text = read_asset_file(\"string_container.vdf\")?;\n\n let vdf: BorrowedString = from_str(&vdf_text)?;\n\n\n\n assert!(matches!(vdf.inner, Cow::Borrowed(_)));\n\n Ok(())\n\n}\n", "file_path": "keyvalues-serde/tests/special_cases.rs", "rank": 57, "score": 48293.62551041589 }, { "content": "#[test]\n\nfn non_finite_float_deserialization_failure() -> BoxedResult<()> {\n\n let vdf_text = read_asset_file(\"subnormal_float.vdf\")?;\n\n if let Err(Error::NonFiniteFloat(f)) = from_str::<Container<f32>>(&vdf_text) {\n\n assert!(f.is_infinite());\n\n } else {\n\n panic!(\"Deserialization should fail with inf float\");\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "keyvalues-serde/tests/special_cases.rs", "rank": 58, "score": 47018.32498220987 }, { "content": "#[test]\n\nfn non_normal_but_finite_float_serialization() -> BoxedResult<()> {\n\n let vdf_text = read_asset_file(\"zero_float.vdf\")?;\n\n let vdf: Container<f32> = from_str(&vdf_text)?;\n\n\n\n assert_eq!(vdf, Container::new(0.0f32));\n\n Ok(())\n\n}\n\n\n", "file_path": "keyvalues-serde/tests/special_cases.rs", "rank": 59, "score": 47018.32498220987 }, { "content": "fn to_string_helper<T>(t: &T) -> String\n\nwhere\n\n T: Serialize,\n\n{\n\n to_string(black_box(t)).unwrap()\n\n}\n\n\n", "file_path": "keyvalues-serde/benches/ser_de.rs", "rank": 60, "score": 45286.589077922385 }, { "content": "fn read_app_info() -> Result<String, std::io::Error> {\n\n let vdf_path = Path::new(env!(\"CARGO_MANIFEST_DIR\"))\n\n .join(\"tests\")\n\n .join(\"assets\")\n\n .join(\"app_info.vdf\");\n\n fs::read_to_string(vdf_path)\n\n}\n\n\n", "file_path": "keyvalues-serde/benches/ser_de.rs", "rank": 61, "score": 42609.37166330261 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let vdf_text = read_asset_file(\"controller_mappings.vdf\")?;\n\n\n\n // Deserialize the VDF file\n\n let mut mappings: ControllerMappings = keyvalues_serde::from_str(&vdf_text)?;\n\n println!(\"Deserialized representation:\");\n\n println!(\"{:#?}\", mappings);\n\n\n\n // Modify the VDF to your heart's content\n\n mappings.game = String::from(\"Custom layout\");\n\n\n\n // Serialize it back to VDF text\n\n let modified_text = keyvalues_serde::to_string(&mappings)?;\n\n println!(\"Reserialized representation:\");\n\n println!(\"{}\", modified_text);\n\n\n\n Ok(())\n\n}\n", "file_path": "keyvalues-serde/examples/controller_mappings.rs", "rank": 62, "score": 42475.43975921004 }, { "content": "use criterion::{black_box, criterion_group, criterion_main, Criterion, Throughput};\n\nuse keyvalues_parser::Vdf;\n\n\n\nuse std::{fs, path::Path};\n\n\n", "file_path": "keyvalues-parser/benches/parser.rs", "rank": 63, "score": 40114.190597556575 }, { "content": "use clap::Parser;\n\nuse keyvalues_parser::{text::parse::Opts, Vdf};\n\n\n\n#[derive(Parser)]\n", "file_path": "parse-vdf-file/src/main.rs", "rank": 64, "score": 33827.53829387878 }, { "content": " Err(err) => {\n\n eprintln!(\"Failed parsing without escaped characters: {:#?}\", err);\n\n return Err(err.into());\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "parse-vdf-file/src/main.rs", "rank": 65, "score": 33819.27231663515 }, { "content": "#[cfg_attr(test, derive(serde::Deserialize, serde::Serialize))]\n\n#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]\n\npub struct PartialVdf<'a> {\n\n pub key: Key<'a>,\n\n pub value: Value<'a>,\n\n pub bases: Vec<Cow<'a, str>>,\n\n}\n\n\n\nimpl<'a> Vdf<'a> {\n\n /// Creates a [`Vdf`][Vdf] using a provided key and value\n\n ///\n\n /// ```\n\n /// use keyvalues_parser::{Vdf, Value};\n\n /// use std::borrow::Cow;\n\n ///\n\n /// let vdf = Vdf::new(Cow::from(\"Much Key\"), Value::Str(Cow::from(\"Such Wow\")));\n\n /// // prints\n\n /// // \"Much Key\" \"Such Wow\"\n\n /// println!(\"{}\", vdf);\n\n /// ```\n\n pub fn new(key: Key<'a>, value: Value<'a>) -> Self {\n\n Self { key, value }\n\n }\n\n}\n\n\n", "file_path": "keyvalues-parser/src/lib.rs", "rank": 66, "score": 33020.078805551944 }, { "content": " iter::FromIterator,\n\n ops::{Deref, DerefMut},\n\n};\n\n\n\npub mod error;\n\n#[cfg(test)]\n\nmod tests;\n\npub mod text;\n\n\n\n/// A Key is simply an alias for `Cow<str>`\n\npub type Key<'a> = Cow<'a, str>;\n\n\n\n/// A loosely typed representation of VDF text\n\n///\n\n/// `Vdf` is represented as a single [`Key`][Key] mapped to a single [`Value`][Value]\n\n///\n\n/// ## Parse\n\n///\n\n/// `Vdf`s will generally be created through the use of [`Vdf::parse()`][Vdf::parse] which takes a\n\n/// string representing VDF text and attempts to parse it to a `Vdf` representation.\n", "file_path": "keyvalues-parser/src/lib.rs", "rank": 67, "score": 33015.74786266424 }, { "content": " type Item = Vdf<'a>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n // Iteration will pop the first pair off `current_entry` if it's set and then falls back to\n\n // reading in a new `current_entry` from `it`. If `it` is exhausted then we're done\n\n loop {\n\n match self.current_entry.take() {\n\n // There is a pair to return\n\n Some((key, mut values)) if !values.is_empty() => {\n\n let value = values.pop().expect(\"values isn't empty\");\n\n self.current_entry = Some((key.clone(), values));\n\n return Some(Vdf::new(key, value));\n\n }\n\n _ => match self.it.next() {\n\n // Store the next entry. Flip the values so that `pop`ing returns correct order\n\n Some((key, values)) => {\n\n self.current_entry = Some((key, values.into_iter().rev().collect()));\n\n }\n\n // Fin\n\n None => {\n", "file_path": "keyvalues-parser/src/lib.rs", "rank": 68, "score": 33015.51022718678 }, { "content": " ///\n\n /// ```\n\n /// use keyvalues_parser::{Obj, Value};\n\n ///\n\n /// let value = Value::Obj(Obj::new());\n\n /// assert_eq!(value.unwrap_obj(), Obj::new());\n\n /// ```\n\n ///\n\n /// ```should_panic\n\n /// use keyvalues_parser::Value;\n\n /// use std::borrow::Cow;\n\n ///\n\n /// let value = Value::Str(Cow::from(\"D'Oh\"));\n\n /// value.unwrap_obj(); // <-- panics\n\n /// ```\n\n pub fn unwrap_obj(self) -> Obj<'a> {\n\n self.expect_obj(\"Called `unwrap_obj` on a `Value::Str` variant\")\n\n }\n\n\n\n /// Refer to [Value::unwrap_str]. Same situation, but with a custom message\n", "file_path": "keyvalues-parser/src/lib.rs", "rank": 69, "score": 33015.04809892561 }, { "content": " return None;\n\n }\n\n },\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// Enum representing all valid VDF values\n\n///\n\n/// VDF is composed of [`Key`s][Key] and their respective [`Value`s][Value] where this represents\n\n/// the latter. A value is either going to be a `Str(Cow<str>)`, or an `Obj(Obj)` that contains a\n\n/// list of keys and values.\n\n#[cfg_attr(test, derive(serde::Deserialize, serde::Serialize))]\n\n#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]\n\npub enum Value<'a> {\n\n Str(Cow<'a, str>),\n\n Obj(Obj<'a>),\n\n}\n\n\n", "file_path": "keyvalues-parser/src/lib.rs", "rank": 70, "score": 33014.44442873327 }, { "content": "/// \"Inner Key\"\n\n/// {\n\n/// }\n\n/// }\n\n/// \"#;\n\n/// let mut parsed = Vdf::parse(vdf_text)?;\n\n///\n\n/// // Mutate: i.e. remove the last \"Inner Key\" pair\n\n/// parsed\n\n/// .value\n\n/// .get_mut_obj()\n\n/// .unwrap()\n\n/// .get_mut(\"Inner Key\")\n\n/// .unwrap()\n\n/// .pop();\n\n///\n\n/// // Render: prints\n\n/// // \"Outer Key\"\n\n/// // {\n\n/// // \"Inner Key\" \"Inner Value\"\n", "file_path": "keyvalues-parser/src/lib.rs", "rank": 71, "score": 33014.13964054304 }, { "content": " /// Gets the inner `&Obj` if this value is a `Value::Obj`\n\n pub fn get_obj(&self) -> Option<&Obj> {\n\n if let Self::Obj(obj) = self {\n\n Some(obj)\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n /// Gets the inner `&mut str` if this is a `Value::Str`\n\n pub fn get_mut_str(&mut self) -> Option<&mut Cow<'a, str>> {\n\n if let Self::Str(s) = self {\n\n Some(s)\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n /// Gets the inner `&mut Obj` if this is a `Value::Obj`\n\n pub fn get_mut_obj(&mut self) -> Option<&mut Obj<'a>> {\n", "file_path": "keyvalues-parser/src/lib.rs", "rank": 72, "score": 33014.097558384936 }, { "content": " /// assert_eq!(value.unwrap_str(), \"Sample text\");\n\n /// ```\n\n ///\n\n /// ```should_panic\n\n /// use keyvalues_parser::{Value, Obj};\n\n ///\n\n /// let value = Value::Obj(Obj::new());\n\n /// value.unwrap_str(); // <-- panics\n\n /// ```\n\n pub fn unwrap_str(self) -> Cow<'a, str> {\n\n self.expect_str(\"Called `unwrap_str` on a `Value::Obj` variant\")\n\n }\n\n\n\n /// Unwraps the [`Obj`][Obj] from the `Value::Obj`\n\n ///\n\n /// # Panics\n\n ///\n\n /// If the variant was `Value::Str`\n\n ///\n\n /// # Examples\n", "file_path": "keyvalues-parser/src/lib.rs", "rank": 73, "score": 33014.02373523063 }, { "content": "impl<'a> Value<'a> {\n\n /// Returns if the current value is the `Str` variant\n\n pub fn is_str(&self) -> bool {\n\n self.get_str().is_some()\n\n }\n\n\n\n /// Returns if the current value is the `Obj` variant\n\n pub fn is_obj(&self) -> bool {\n\n self.get_obj().is_some()\n\n }\n\n\n\n /// Gets the inner `&str` if this is a `Value::Str`\n\n pub fn get_str(&self) -> Option<&str> {\n\n if let Self::Str(s) = self {\n\n Some(s)\n\n } else {\n\n None\n\n }\n\n }\n\n\n", "file_path": "keyvalues-parser/src/lib.rs", "rank": 74, "score": 33013.42435389179 }, { "content": " fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.0\n\n }\n\n}\n\n\n\npub struct IntoVdfs<'a> {\n\n current_entry: Option<ObjInnerPair<'a>>,\n\n it: IntoIter<Key<'a>, Vec<Value<'a>>>,\n\n}\n\n\n\nimpl<'a> IntoVdfs<'a> {\n\n fn new(obj: Obj<'a>) -> Self {\n\n Self {\n\n current_entry: None,\n\n it: obj.into_inner().into_iter(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for IntoVdfs<'a> {\n", "file_path": "keyvalues-parser/src/lib.rs", "rank": 75, "score": 33012.644527402685 }, { "content": "//!\n\n//! # Limitations\n\n//!\n\n//! VDF text is drastically underspecified. This leads to the following liberties\n\n//! being taken\n\n//!\n\n//! - Not respecting the ordering of key-value pairs, where the pairs are stored in a `BTreeMap` that sorts the values based on the key\n\n//! - Because of limitations in representing sequences, an empty `Vec` of values will be rendered as a missing keyvalue pair\n\n//!\n\n//! # Benchmarks\n\n//!\n\n//! A set of basic benchmarks can be found in the\n\n//! [benches directory](https://github.com/LovecraftianHorror/vdf-rs/tree/main/keyvalues-parser/benches)\n\n//!\n\n//! These just test timing and throughput for both parsing and rendering of a\n\n//! fairly typical VDF file\n\n\n\nuse std::{\n\n borrow::Cow,\n\n collections::{btree_map::IntoIter, BTreeMap},\n", "file_path": "keyvalues-parser/src/lib.rs", "rank": 76, "score": 33012.61253432747 }, { "content": " if let Self::Obj(obj) = self {\n\n Some(obj)\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n /// Unwraps the `Cow<str>` from the `Value::Str`\n\n ///\n\n /// # Panics\n\n ///\n\n /// If the variant was `Value::Obj`\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use keyvalues_parser::Value;\n\n /// use std::borrow::Cow;\n\n ///\n\n /// let value = Value::Str(Cow::from(\"Sample text\"));\n", "file_path": "keyvalues-parser/src/lib.rs", "rank": 77, "score": 33012.388867100446 }, { "content": "/// // }\n\n/// println!(\"{}\", parsed);\n\n/// # Ok::<(), keyvalues_parser::error::Error>(())\n\n/// ```\n\n#[cfg_attr(test, derive(serde::Deserialize, serde::Serialize))]\n\n#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]\n\npub struct Vdf<'a> {\n\n pub key: Key<'a>,\n\n pub value: Value<'a>,\n\n}\n\n\n\nimpl<'a> From<PartialVdf<'a>> for Vdf<'a> {\n\n fn from(partial: PartialVdf<'a>) -> Self {\n\n Self {\n\n key: partial.key,\n\n value: partial.value,\n\n }\n\n }\n\n}\n\n\n", "file_path": "keyvalues-parser/src/lib.rs", "rank": 78, "score": 33011.2962900383 }, { "content": "impl<'a> FromIterator<ObjInnerPair<'a>> for Obj<'a> {\n\n fn from_iter<T: IntoIterator<Item = ObjInnerPair<'a>>>(iter: T) -> Self {\n\n let mut inner = BTreeMap::new();\n\n for (key, values) in iter {\n\n inner.insert(key, values);\n\n }\n\n\n\n Self(inner)\n\n }\n\n}\n\n\n\nimpl<'a> Deref for Obj<'a> {\n\n type Target = ObjInner<'a>;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl<'a> DerefMut for Obj<'a> {\n", "file_path": "keyvalues-parser/src/lib.rs", "rank": 79, "score": 33011.21457342146 }, { "content": "//! \"RememberPassword\" \"1\"\n\n//! \"MostRecent\" \"1\"\n\n//! \"Timestamp\" \"1234567890\"\n\n//! }\n\n//! }\n\n//! ```\n\n//!\n\n//! `main.rs`\n\n//!\n\n//! ```no_run\n\n//! use keyvalues_parser::Vdf;\n\n//!\n\n//! let vdf_text = std::fs::read_to_string(\"loginusers.vdf\")?;\n\n//! let vdf = Vdf::parse(&vdf_text)?;\n\n//! assert_eq!(\n\n//! \"12345678901234567\",\n\n//! vdf.value.unwrap_obj().keys().next().unwrap()\n\n//! );\n\n//! # Ok::<(), Box<dyn std::error::Error>>(())\n\n//! ```\n", "file_path": "keyvalues-parser/src/lib.rs", "rank": 80, "score": 33010.911168473314 }, { "content": "///\n\n/// ## Mutate\n\n///\n\n/// From there you can manipulate/extract from the representation as desired by using the standard\n\n/// conventions on the internal types (plain old `BTreeMap`s, `Vec`s, and `Cow`s all the way down)\n\n///\n\n/// ## Render\n\n///\n\n/// The `Vdf` can also be rendered back to its text form through its `Display` implementation\n\n///\n\n/// ## Example\n\n///\n\n/// ```\n\n/// use keyvalues_parser::Vdf;\n\n///\n\n/// // Parse\n\n/// let vdf_text = r#\"\n\n/// \"Outer Key\"\n\n/// {\n\n/// \"Inner Key\" \"Inner Value\"\n", "file_path": "keyvalues-parser/src/lib.rs", "rank": 81, "score": 33009.021261330905 }, { "content": " pub fn expect_str(self, msg: &str) -> Cow<'a, str> {\n\n if let Self::Str(s) = self {\n\n s\n\n } else {\n\n panic!(\"{}\", msg)\n\n }\n\n }\n\n\n\n /// Refer to [Value::unwrap_obj]. Same situation, but with a custom message\n\n pub fn expect_obj(self, msg: &str) -> Obj<'a> {\n\n if let Self::Obj(obj) = self {\n\n obj\n\n } else {\n\n panic!(\"{}\", msg)\n\n }\n\n }\n\n}\n", "file_path": "keyvalues-parser/src/lib.rs", "rank": 82, "score": 33008.52886214445 }, { "content": "//! `keyvalues-parser` uses [`pest`](https://lib.rs/crates/pest) to parse\n\n//! [VDF text v1 and v2](https://developer.valvesoftware.com/wiki/KeyValues)\n\n//! files to an untyped Rust structure to ease manipulation and navigation. The\n\n//! parser provides an untyped `Vdf` representation as well as a linear\n\n//! `TokenStream`\n\n//!\n\n//! The library is primarily used in conjunction with\n\n//! [`keyvalues-serde`](https://github.com/LovecraftianHorror/vdf-rs/tree/main/keyvalues-serde)\n\n//! which provides a more ergonommic (yet more limiting) means of dealing with VDF\n\n//! text\n\n//!\n\n//! # Installation\n\n//!\n\n//! **Note: this requires at least Rust `1.42.0`**\n\n//!\n\n//! Just add the library to your `Cargo.toml`\n\n//!\n\n//! ```toml\n\n//! [dependencies]\n\n//! keyvalues-parser = \"0.1.0\"\n", "file_path": "keyvalues-parser/src/lib.rs", "rank": 83, "score": 33007.28929140195 }, { "content": "use crate::{Obj, Value, Vdf};\n\n\n\nuse std::{borrow::Cow, collections::BTreeMap};\n\n\n\n#[test]\n", "file_path": "keyvalues-parser/src/tests.rs", "rank": 84, "score": 33007.18094101059 }, { "content": "//! All error information for parsing and rendering\n\n\n\n// This library supports an MSRV of 1.42.0 which is before the addition of\n\n// clippy::nonstandard_macro_braces. This lint is used within `thiserror` which in turn gets\n\n// expanded out here causing clippy to throw out an unknown lint warning which fails CI. Until this\n\n// gets resolved upstream I'm going to allow `unknown_clippy_lints` as a stopgap. Relevant:\n\n// https://github.com/dtolnay/thiserror/issues/140\n\n// https://github.com/dtolnay/thiserror/issues/141\n\n#![allow(renamed_and_removed_lints)]\n\n#![allow(clippy::unknown_clippy_lints)]\n\n\n\nuse thiserror::Error as ThisError;\n\n\n\nuse crate::text::parse::{EscapedPestError, RawPestError};\n\n\n\n/// Just a type alias for `Result` with a [`keyvalues::error::Error`][Error]\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n\n/// All possible errors when parsing or rendering VDF text\n\n///\n", "file_path": "keyvalues-parser/src/error.rs", "rank": 85, "score": 33002.21039117922 }, { "content": "//! ```\n\n//!\n\n//! # Usage\n\n//!\n\n//! There is documentation available\n\n//! [here](https://docs.rs/keyvalues-parser/0.1.0/keyvalues_parser/) and there are\n\n//! examples available in the\n\n//! [examples directory](https://github.com/LovecraftianHorror/vdf-rs/tree/main/keyvalues-parser/examples)\n\n//!\n\n//! ## Quickstart\n\n//!\n\n//! `loginusers.vdf`\n\n//!\n\n//! ```vdf\n\n//! \"users\"\n\n//! {\n\n//! \"12345678901234567\"\n\n//! {\n\n//! \"AccountName\" \"ACCOUNT_NAME\"\n\n//! \"PersonaName\" \"PERSONA_NAME\"\n", "file_path": "keyvalues-parser/src/lib.rs", "rank": 86, "score": 33001.18173800259 }, { "content": "/// Currently the two variants are parse errors which currently only occurs when `pest` encounters\n\n#[derive(ThisError, Clone, Debug, PartialEq)]\n\npub enum Error {\n\n #[error(\"Failed parsing input Error: {0}\")]\n\n EscapedParseError(#[from] EscapedPestError),\n\n #[error(\"Failed parsing input Error: {0}\")]\n\n RawParseError(#[from] RawPestError),\n\n #[error(\"Failed rendering input Error: {0}\")]\n\n RenderError(#[from] std::fmt::Error),\n\n #[error(\"Encountered invalid character in raw string: {invalid_char:?}\")]\n\n RawRenderError { invalid_char: char },\n\n}\n", "file_path": "keyvalues-parser/src/error.rs", "rank": 87, "score": 32998.86089861514 }, { "content": "#[derive(Parser)]\n\nstruct Args {\n\n vdf_file: String,\n\n}\n\n\n", "file_path": "parse-vdf-file/src/main.rs", "rank": 88, "score": 32958.57671628242 }, { "content": " ) -> fmt::Result {\n\n write_pair(writer, num_indents, &self.key, &self.value, render_type)\n\n }\n\n}\n\n\n\nimpl<'a> fmt::Display for Value<'a> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n self.write_indented(f, 0, RenderType::Escaped)\n\n }\n\n}\n\n\n\nimpl<'a> Value<'a> {\n\n fn write_indented(\n\n &self,\n\n writer: &mut impl Write,\n\n num_indents: usize,\n\n render_type: RenderType,\n\n ) -> fmt::Result {\n\n // Only `Obj` gets indented\n\n match self {\n", "file_path": "keyvalues-parser/src/text/render.rs", "rank": 98, "score": 32150.952133185096 }, { "content": " writer.write_char('\\n')?;\n\n }\n\n\n\n write_pair(writer, 0, &self.key, &self.value, render_type)\n\n }\n\n\n\n fn find_invalid_raw_char(&self) -> Option<char> {\n\n find_invalid_raw_char(&self.key).or_else(|| self.value.find_invalid_raw_char())\n\n }\n\n}\n\n\n\nimpl<'a> fmt::Display for Vdf<'a> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n self.write_indented(f, 0, RenderType::Escaped)\n\n }\n\n}\n\n\n\nimpl<'a> Vdf<'a> {\n\n pub fn render(&self, writer: &mut impl Write) -> crate::error::Result<()> {\n\n write!(writer, \"{}\", self).map_err(Into::into)\n", "file_path": "keyvalues-parser/src/text/render.rs", "rank": 99, "score": 32150.873166218214 } ]
Rust
agent/test-agent/tests/mock.rs
webern/bottlerocket-test-system
6b523be719c79449784ae75b7bc18056d857aa89
/*! The purpose of this test is to demonstrate the mocking of a [`Client`] and a [`Bootstrap`] in order to test a [`Runner`] with the [`TestAgent`]. !*/ use async_trait::async_trait; use model::{Configuration, Outcome}; use serde::{Deserialize, Serialize}; use std::fmt::{Debug, Display}; use std::path::PathBuf; use tempfile::{tempdir, TempDir}; use test_agent::{BootstrapData, Client, Runner}; use test_agent::{Spec, TestResults}; use tokio::time::{sleep, Duration}; struct MyRunner { _spec: Spec<MyConfig>, } #[derive(Debug, Default, Clone, Copy, Serialize, Deserialize)] struct MyConfig {} impl Configuration for MyConfig {} #[async_trait] impl Runner for MyRunner { type C = MyConfig; type E = String; async fn new(spec: Spec<Self::C>) -> Result<Self, Self::E> { Ok(Self { _spec: spec }) } async fn run(&mut self) -> Result<TestResults, Self::E> { println!("MyRunner::run"); for i in 1..=5 { println!("Hello {}", i); sleep(Duration::from_millis(50)).await; } Ok(TestResults { outcome: Outcome::Pass, ..TestResults::default() }) } async fn terminate(&mut self) -> Result<(), Self::E> { println!("MyRunner::terminate"); Ok(()) } } struct MockClient { results_dir: TempDir, results_file: TempDir, } #[async_trait] impl Client for MockClient { type E = String; async fn new(_: BootstrapData) -> Result<Self, Self::E> { Ok(Self { results_dir: tempdir().unwrap(), results_file: tempdir().unwrap(), }) } async fn spec<C>(&self) -> Result<Spec<C>, Self::E> where C: Configuration, { println!("MockClient::get"); Ok(Spec { name: "mock-test".into(), configuration: C::default(), secrets: Default::default(), results_dir: Default::default(), }) } async fn send_test_starting(&self) -> Result<(), Self::E> { println!("MockClient::send_test_starting"); Ok(()) } async fn send_test_done(&self, results: TestResults) -> Result<(), Self::E> { println!("MockClient::send_test_done: {:?}", results); Ok(()) } async fn send_test_results(&self, results: TestResults) -> Result<(), Self::E> { println!("MockClient::send_test_results: {:?}", results); Ok(()) } async fn send_error<E>(&self, error: E) -> Result<(), Self::E> where E: Debug + Display + Send + Sync, { println!("MockClient::send_error {}", error); Ok(()) } async fn keep_running(&self) -> Result<bool, Self::E> { Ok(false) } async fn results_directory(&self) -> Result<PathBuf, Self::E> { Ok(self.results_dir.path().to_path_buf()) } async fn results_file(&self) -> Result<PathBuf, Self::E> { Ok(self.results_file.path().join("result.tar.gz")) } async fn retries(&self) -> Result<u32, Self::E> { Ok(0) } } #[tokio::test] async fn mock_test() -> std::io::Result<()> { let mut agent_main = test_agent::TestAgent::<MockClient, MyRunner>::new(BootstrapData { test_name: String::from("hello-test"), }) .await .unwrap(); agent_main.run().await.unwrap(); assert!(std::path::Path::new(&agent_main.results_file().await.unwrap()).is_file()); Ok(()) }
/*! The purpose of this test is to demonstrate the mocking of a [`Client`] and a [`Bootstrap`] in order to test a [`Runner`] with the [`TestAgent`]. !*/ use async_trait::async_trait; use model::{Configuration, Outcome}; use serde::{Deserialize, Serialize}; use std::fmt::{Debug, Display}; use std::path::PathBuf; use tempfile::{tempdir, TempDir}; use test_agent::{BootstrapData, Client, Runner}; use test_agent::{Spec, TestResults}; use tokio::time::{sleep, Duration}; struct MyRunner { _spec: Spec<MyConfig>, } #[derive(Debug, Default, Clone, Copy, Serialize, Deserialize)] struct MyConfig {} impl Configuration for MyConfig {} #[async_trait] impl Runner for MyRunner { type C = MyConfig; type E = String; async fn new(spec: Spec<Self::C>) -> Result<Self, Self::E> { Ok(Self { _spec: spec }) } async fn run(&mut self) -> Result<Tes
_main.run().await.unwrap(); assert!(std::path::Path::new(&agent_main.results_file().await.unwrap()).is_file()); Ok(()) }
tResults, Self::E> { println!("MyRunner::run"); for i in 1..=5 { println!("Hello {}", i); sleep(Duration::from_millis(50)).await; } Ok(TestResults { outcome: Outcome::Pass, ..TestResults::default() }) } async fn terminate(&mut self) -> Result<(), Self::E> { println!("MyRunner::terminate"); Ok(()) } } struct MockClient { results_dir: TempDir, results_file: TempDir, } #[async_trait] impl Client for MockClient { type E = String; async fn new(_: BootstrapData) -> Result<Self, Self::E> { Ok(Self { results_dir: tempdir().unwrap(), results_file: tempdir().unwrap(), }) } async fn spec<C>(&self) -> Result<Spec<C>, Self::E> where C: Configuration, { println!("MockClient::get"); Ok(Spec { name: "mock-test".into(), configuration: C::default(), secrets: Default::default(), results_dir: Default::default(), }) } async fn send_test_starting(&self) -> Result<(), Self::E> { println!("MockClient::send_test_starting"); Ok(()) } async fn send_test_done(&self, results: TestResults) -> Result<(), Self::E> { println!("MockClient::send_test_done: {:?}", results); Ok(()) } async fn send_test_results(&self, results: TestResults) -> Result<(), Self::E> { println!("MockClient::send_test_results: {:?}", results); Ok(()) } async fn send_error<E>(&self, error: E) -> Result<(), Self::E> where E: Debug + Display + Send + Sync, { println!("MockClient::send_error {}", error); Ok(()) } async fn keep_running(&self) -> Result<bool, Self::E> { Ok(false) } async fn results_directory(&self) -> Result<PathBuf, Self::E> { Ok(self.results_dir.path().to_path_buf()) } async fn results_file(&self) -> Result<PathBuf, Self::E> { Ok(self.results_file.path().join("result.tar.gz")) } async fn retries(&self) -> Result<u32, Self::E> { Ok(0) } } #[tokio::test] async fn mock_test() -> std::io::Result<()> { let mut agent_main = test_agent::TestAgent::<MockClient, MyRunner>::new(BootstrapData { test_name: String::from("hello-test"), }) .await .unwrap(); agent
random
[ { "content": "/// Print a value using `serde_json` `to_string_pretty` for types that implement Serialize.\n\npub fn json_display<T: Serialize>(object: T) -> String {\n\n serde_json::to_string_pretty(&object).unwrap_or_else(|e| format!(\"Serialization failed: {}\", e))\n\n}\n\n\n\n/// Implement `Display` using `serde_json` `to_string_pretty` for types that implement Serialize.\n\n#[macro_export]\n\nmacro_rules! impl_display_as_json {\n\n ($i:ident) => {\n\n impl std::fmt::Display for $i {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let s = serde_json::to_string_pretty(self)\n\n .unwrap_or_else(|e| format!(\"Serialization failed: {}\", e));\n\n std::fmt::Display::fmt(&s, f)\n\n }\n\n }\n\n };\n\n}\n\n\n\n/// Set up the config for aws calls using `aws_secret_name` if provided and `sts::assume_role`\n\n/// if a role arn is provided.\n", "file_path": "bottlerocket/agents/src/lib.rs", "rank": 3, "score": 166842.5399219283 }, { "content": "/// We need this to provide a default for serde.\n\nfn kind() -> String {\n\n String::from(\"kind\")\n\n}\n", "file_path": "selftest/src/test_settings.rs", "rank": 4, "score": 163056.71056924778 }, { "content": "fn resource_name_and_field_name(input: &str) -> Result<Option<(String, String)>> {\n\n let captures = match REGEX.captures(input) {\n\n None => return Ok(None),\n\n Some(some) => some,\n\n };\n\n let resource_name = captures\n\n .get(1)\n\n .context(error::ConfigResolutionSnafu {\n\n what: \"Resource name could not be extracted from capture.\".to_string(),\n\n })?\n\n .as_str();\n\n let field_name = captures\n\n .get(2)\n\n .context(error::ConfigResolutionSnafu {\n\n what: \"Resource value could not be extracted from capture.\".to_string(),\n\n })?\n\n .as_str();\n\n Ok(Some((resource_name.to_string(), field_name.to_string())))\n\n}\n\n\n", "file_path": "model/src/clients/resource_client.rs", "rank": 5, "score": 153809.02328844828 }, { "content": "#[test]\n\nfn test_pattern1() {\n\n let (resource_name, field_name) = resource_name_and_field_name(r\"${dup1.info}\")\n\n .unwrap()\n\n .unwrap();\n\n assert_eq!(resource_name, \"dup1\");\n\n assert_eq!(field_name, \"info\");\n\n assert!(resource_name_and_field_name(r\"hello\").unwrap().is_none());\n\n assert!(resource_name_and_field_name(r\"${hello}\").unwrap().is_none());\n\n assert!(resource_name_and_field_name(r\"foo${x.y}\")\n\n .unwrap()\n\n .is_none());\n\n assert!(resource_name_and_field_name(r\"${x.y}foo\")\n\n .unwrap()\n\n .is_none());\n\n assert!(resource_name_and_field_name(r\"foo${x.y}bar\")\n\n .unwrap()\n\n .is_none());\n\n assert!(resource_name_and_field_name(r\"${.x}\").unwrap().is_none());\n\n assert!(resource_name_and_field_name(r\"${x.}\").unwrap().is_none());\n\n assert!(resource_name_and_field_name(r\"${.}\").unwrap().is_none());\n", "file_path": "model/src/clients/resource_client.rs", "rank": 6, "score": 148805.7905936493 }, { "content": "struct ExampleTestRunner {\n\n config: ExampleConfig,\n\n}\n\n\n\n/// When implementing an actual [`Runner`], you may need some input in order to start the test.\n\n/// You would define that input in a struct which implements [`Configuration`].\n", "file_path": "agent/test-agent/examples/example_test_agent/main.rs", "rank": 7, "score": 146964.42786222845 }, { "content": "fn display_option(o: &Option<u64>) -> String {\n\n match o {\n\n Some(count) => format!(\"{}\", count),\n\n None => \"\".to_string(),\n\n }\n\n}\n", "file_path": "bottlerocket/testsys/src/status.rs", "rank": 8, "score": 144060.58620375083 }, { "content": "fn default_count() -> i32 {\n\n 1\n\n}\n\n\n\nimpl Configuration for EcsTestConfig {}\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize, Default)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct VSphereVmConfig {\n\n /// The name of the OVA file used for the VSphere worker nodes.\n\n pub ova_name: String,\n\n\n\n /// TUF repository where the OVA file can be found\n\n pub tuf_repo: TufRepoConfig,\n\n\n\n /// The number of VMs to create. If no value is provided 2 VMs will be created.\n\n pub vm_count: Option<i32>,\n\n\n\n /// URL of the vCenter instance to connect to\n\n pub vcenter_host_url: String,\n", "file_path": "bottlerocket/types/src/agent_config.rs", "rank": 9, "score": 135580.56274790526 }, { "content": "/// Creates the labels that we will add to the test pod deployment.\n\nfn create_labels<S1, S2>(job_type: JobType, agent: S1, instance: S2) -> BTreeMap<String, String>\n\nwhere\n\n S1: AsRef<str>,\n\n S2: AsRef<str>,\n\n{\n\n [\n\n (APP_NAME, instance.as_ref()),\n\n (APP_INSTANCE, agent.as_ref()),\n\n (\n\n APP_COMPONENT,\n\n match job_type {\n\n JobType::TestAgent => TEST_AGENT,\n\n JobType::ResourceAgent => RESOURCE_AGENT,\n\n },\n\n ),\n\n (APP_PART_OF, TESTSYS),\n\n (APP_MANAGED_BY, CONTROLLER),\n\n (APP_CREATED_BY, CONTROLLER),\n\n ]\n\n .iter()\n\n .map(|(k, v)| ((*k).to_owned(), (*v).to_owned()))\n\n .collect()\n\n}\n\n\n", "file_path": "controller/src/job/job_builder.rs", "rank": 10, "score": 134358.7908248278 }, { "content": "#[test]\n\nfn test() {\n\n let tempdir = tempfile::TempDir::new().unwrap();\n\n let dir = tempdir.path();\n\n let key1 = \"piano\";\n\n let value1 = \"lake\";\n\n let key2 = \"bread\";\n\n let value2 = \"mall\";\n\n let secret_name = SecretName::new(\"poet\").unwrap();\n\n let secret_dir = dir.join(secret_name.as_str());\n\n fs::create_dir_all(&secret_dir).unwrap();\n\n fs::write(secret_dir.join(key1), &value1).unwrap();\n\n fs::write(secret_dir.join(key2), &value2).unwrap();\n\n let secrets = SecretsReader::new_custom_directory(&dir);\n\n let data = secrets.get_secret(&secret_name).unwrap();\n\n assert_eq!(\n\n String::from_utf8(data.get(key1).unwrap().to_owned()).unwrap(),\n\n value1\n\n );\n\n assert_eq!(\n\n String::from_utf8(data.get(key2).unwrap().to_owned()).unwrap(),\n\n value2\n\n );\n\n}\n", "file_path": "agent/agent-common/src/secrets.rs", "rank": 11, "score": 129485.97815424763 }, { "content": "fn parse_key_val(s: &str) -> Result<(String, String)> {\n\n let mut iter = s.splitn(2, '=');\n\n let key = iter\n\n .next()\n\n .context(error::ArgumentMissingSnafu { arg: s.to_string() })?;\n\n let value = iter\n\n .next()\n\n .context(error::ArgumentMissingSnafu { arg: s.to_string() })?;\n\n Ok((key.to_string(), value.to_string()))\n\n}\n", "file_path": "bottlerocket/testsys/src/add_secret_map.rs", "rank": 12, "score": 116213.63499273386 }, { "content": "use model::Configuration;\n\nuse resource_agent::clients::{AgentClient, ClientResult};\n\nuse resource_agent::provider::{ProviderError, Spec};\n\nuse resource_agent::{BootstrapData, ResourceAction};\n\n\n\n/// Create an [`AgentClient`] that does nothing so that we can test without Kubernetes.\n\npub(crate) struct MockAgentClient;\n\n\n\n#[async_trait::async_trait]\n\nimpl AgentClient for MockAgentClient {\n\n async fn new(_data: BootstrapData) -> ClientResult<Self> {\n\n Ok(Self {})\n\n }\n\n\n\n async fn send_init_error(&self, _action: ResourceAction, _error: &str) -> ClientResult<()> {\n\n Ok(())\n\n }\n\n\n\n async fn get_spec<Config>(&self) -> ClientResult<Spec<Config>>\n\n where\n", "file_path": "agent/resource-agent/tests/mock/agent_client.rs", "rank": 13, "score": 115231.56912294628 }, { "content": "use agent_common::secrets::SecretData;\n\nuse model::{Configuration, SecretName};\n\nuse resource_agent::clients::{ClientResult, InfoClient};\n\nuse resource_agent::BootstrapData;\n\n\n\n/// Create an [`InfoClient`] that does nothing so that we can test without Kubernetes.\n\npub(crate) struct MockInfoClient {}\n\n\n\n#[async_trait::async_trait]\n\nimpl InfoClient for MockInfoClient {\n\n async fn new(_data: BootstrapData) -> ClientResult<Self> {\n\n Ok(Self {})\n\n }\n\n\n\n async fn get_info<Info>(&self) -> ClientResult<Info>\n\n where\n\n Info: Configuration,\n\n {\n\n Ok(Info::default())\n\n }\n", "file_path": "agent/resource-agent/tests/mock/info_client.rs", "rank": 14, "score": 115228.71344509519 }, { "content": " Config: Configuration,\n\n {\n\n Ok(Spec::default())\n\n }\n\n\n\n async fn get_created_resource<Resource>(&self) -> ClientResult<Option<Resource>>\n\n where\n\n Resource: Configuration,\n\n {\n\n Ok(Some(Resource::default()))\n\n }\n\n\n\n /// Notify Kubernetes that the creation of resources is starting.\n\n async fn send_create_starting(&self) -> ClientResult<()> {\n\n Ok(())\n\n }\n\n\n\n async fn send_create_succeeded<Resource>(&self, _resource: Resource) -> ClientResult<()>\n\n where\n\n Resource: Configuration,\n", "file_path": "agent/resource-agent/tests/mock/agent_client.rs", "rank": 15, "score": 115209.0073033305 }, { "content": "\n\n async fn send_info<Info>(&self, _info: Info) -> ClientResult<()>\n\n where\n\n Info: Configuration,\n\n {\n\n Ok(())\n\n }\n\n\n\n async fn get_secret(&self, _secret_name: &SecretName) -> ClientResult<SecretData> {\n\n Ok(SecretData::default())\n\n }\n\n}\n", "file_path": "agent/resource-agent/tests/mock/info_client.rs", "rank": 16, "score": 115203.9659404954 }, { "content": " {\n\n Ok(())\n\n }\n\n\n\n /// Notify Kubernetes that the creation of resources failed and provide an error message.\n\n async fn send_create_failed(&self, _error: &ProviderError) -> ClientResult<()> {\n\n Ok(())\n\n }\n\n\n\n /// Notify Kubernetes that the destruction of resources is starting.\n\n async fn send_destroy_starting(&self) -> ClientResult<()> {\n\n Ok(())\n\n }\n\n\n\n /// Notify Kubernetes that the destruction of resources succeeded.\n\n async fn send_destroy_succeeded(&self) -> ClientResult<()> {\n\n Ok(())\n\n }\n\n\n\n /// Notify Kubernetes that the destruction of resources failed and provide an error message.\n\n async fn send_destroy_failed(&self, _error: &ProviderError) -> ClientResult<()> {\n\n Ok(())\n\n }\n\n\n\n async fn get_keep_running(&self) -> ClientResult<bool> {\n\n Ok(false)\n\n }\n\n}\n", "file_path": "agent/resource-agent/tests/mock/agent_client.rs", "rank": 17, "score": 115200.54741649021 }, { "content": "#[derive(Debug, Clone, Default, Serialize, Deserialize)]\n\nstruct Nested {\n\n data: Value,\n\n}\n\n\n\nimpl Configuration for ExampleConfig {}\n\n\n\n#[async_trait]\n\nimpl test_agent::Runner for ExampleTestRunner {\n\n type C = ExampleConfig;\n\n type E = String;\n\n\n\n async fn new(spec: Spec<Self::C>) -> Result<Self, Self::E> {\n\n println!(\"Initializing example testsys agent...\");\n\n Ok(Self {\n\n config: spec.configuration,\n\n })\n\n }\n\n\n\n async fn run(&mut self) -> Result<TestResults, Self::E> {\n\n println!(\"ExampleTestRunner::run\");\n", "file_path": "agent/test-agent/examples/example_test_agent/main.rs", "rank": 18, "score": 109384.67302831315 }, { "content": "#[derive(Debug, Clone, Default, Serialize, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct ExampleConfig {\n\n person: String,\n\n hello_count: u32,\n\n hello_duration_milliseconds: u32,\n\n nested: Option<Nested>,\n\n}\n\n\n", "file_path": "agent/test-agent/examples/example_test_agent/main.rs", "rank": 19, "score": 107870.60546829904 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename = \"SCREAMING_SNAKE_CASE\")]\n\nstruct Inner {\n\n /// The path to the [kind] binary. Defaults to `kind` (i.e. by default the kind binary is\n\n /// expected to be found via `$PATH`).\n\n ///\n\n /// # Example\n\n ///\n\n /// ```text\n\n /// TESTSYS_SELFTEST_KIND_PATH=/wherever/kind\n\n /// ```\n\n ///\n\n /// [kind]: https://kind.sigs.k8s.io/\n\n #[serde(default = \"kind\")]\n\n kind_path: String,\n\n}\n\n\n\nlazy_static::lazy_static! {\n\n static ref TEST_SETTINGS: Inner =\n\n envy::prefixed(\"TESTSYS_SELFTEST_\")\n\n .from_env::<Inner>()\n\n .expect(\"Error parsing TestSettings environment variables\");\n\n}\n\n\n", "file_path": "selftest/src/test_settings.rs", "rank": 20, "score": 107088.09755553857 }, { "content": "#[test]\n\nfn secret_name_deserialize() {\n\n use serde_json::json;\n\n #[derive(Deserialize)]\n\n struct Something {\n\n foo: SecretName,\n\n }\n\n let bad_json = json!({ \"foo\": \"/\" });\n\n assert!(serde_json::from_value::<Something>(bad_json).is_err());\n\n let good_json = json!({ \"foo\": \"bar-baz\" });\n\n let deserialized = serde_json::from_value::<Something>(good_json).unwrap();\n\n assert_eq!(deserialized.foo.as_str(), \"bar-baz\");\n\n}\n", "file_path": "model/src/agent.rs", "rank": 21, "score": 104337.52699561692 }, { "content": "#[test]\n\nfn testsys_constants_macro_test() {\n\n assert_eq!(\"testsys.bottlerocket.aws\", testsys!());\n\n assert_eq!(\"testsys.bottlerocket.aws/v1\", API_VERSION);\n\n assert_eq!(\"testsys.bottlerocket.aws/foo\", testsys!(\"foo\"));\n\n}\n", "file_path": "model/src/constants.rs", "rank": 22, "score": 100953.68697183685 }, { "content": "#[test]\n\nfn k8s_version_valid() {\n\n use std::str::FromStr;\n\n let input = \"v1.21.3\";\n\n let k8s_version = K8sVersion::from_str(input).unwrap();\n\n assert_eq!(\"v1.21\", k8s_version.major_minor_with_v());\n\n assert_eq!(\"1.21\", k8s_version.major_minor_without_v());\n\n assert_eq!(\"v1.21.3\", k8s_version.full_version_with_v());\n\n assert_eq!(\"1.21.3\", k8s_version.full_version_without_v());\n\n}\n", "file_path": "bottlerocket/types/src/agent_config.rs", "rank": 23, "score": 100296.67524074382 }, { "content": "#[test]\n\nfn k8s_version_invalid() {\n\n let input = \"1.foo\";\n\n assert!(K8sVersion::parse(input).is_err())\n\n}\n\n\n", "file_path": "bottlerocket/types/src/agent_config.rs", "rank": 24, "score": 100296.67524074382 }, { "content": "pub fn integ_test_dependent_path() -> PathBuf {\n\n let mut path = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n path.pop();\n\n path.join(\"testsys/tests/data/integ-test-dependent.yaml\")\n\n}\n\n\n", "file_path": "bottlerocket/testsys/tests/data.rs", "rank": 25, "score": 98829.16793940472 }, { "content": "pub fn integ_test_depended_on_path() -> PathBuf {\n\n let mut path = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n path.pop();\n\n path.join(\"testsys/tests/data/integ-test-depended-on.yaml\")\n\n}\n\n\n", "file_path": "bottlerocket/testsys/tests/data.rs", "rank": 26, "score": 98829.16793940472 }, { "content": "/// `handle_reconciliation_error` is called when `reconcile` returns an error.\n\nfn handle_reconciliation_error(e: &ReconciliationError, _: Context) -> RequeueAction {\n\n error!(\"Reconciliation error: {}\", e);\n\n requeue()\n\n}\n", "file_path": "controller/src/test_controller/mod.rs", "rank": 27, "score": 96556.1743102588 }, { "content": "pub fn integ_test_resource_destruction_never_path() -> PathBuf {\n\n let mut path = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n path.pop();\n\n path.join(\"testsys/tests/data/resource-destruction-never.yaml\")\n\n}\n", "file_path": "bottlerocket/testsys/tests/data.rs", "rank": 28, "score": 96084.62959336619 }, { "content": "#[async_trait]\n\npub trait Client: Sized {\n\n /// The error type returned by this trait's functions.\n\n type E: Debug + Display + Send + Sync + 'static;\n\n\n\n /// Create a new instance of the `Client`. The [`TestAgent`] will instantiate the `Client` with\n\n /// this function after it obtains `BootstrapData`.\n\n async fn new(bootstrap_data: BootstrapData) -> Result<Self, Self::E>;\n\n\n\n /// Get the information needed by a test [`Runner`] from the k8s API.\n\n async fn spec<C>(&self) -> Result<Spec<C>, Self::E>\n\n where\n\n C: Configuration;\n\n\n\n /// Get the directory that the test's results are stored in.\n\n async fn results_directory(&self) -> Result<PathBuf, Self::E>;\n\n\n\n /// Get the file that the test's tar results should be stored in.\n\n async fn results_file(&self) -> Result<PathBuf, Self::E>;\n\n\n\n /// Determine if the pod should keep running after it has finished or encountered and error.\n", "file_path": "agent/test-agent/src/lib.rs", "rank": 29, "score": 95018.57113359807 }, { "content": "/// Defines the service account for an agent of type `agent_type`.\n\npub fn agent_service_account(agent_type: AgentType) -> ServiceAccount {\n\n ServiceAccount {\n\n metadata: ObjectMeta {\n\n name: Some(agent_type.service_account_name()),\n\n namespace: Some(NAMESPACE.to_string()),\n\n annotations: Some(btreemap! {\n\n \"kubernetes.io/service-account.name\".to_string() => agent_type.service_account_name()\n\n }),\n\n ..Default::default()\n\n },\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "model/src/system/agent.rs", "rank": 30, "score": 92703.36582993488 }, { "content": "/// Defines the cluster role for an agent of type `agent_type`.\n\npub fn agent_cluster_role(agent_type: AgentType) -> ClusterRole {\n\n ClusterRole {\n\n metadata: ObjectMeta {\n\n name: Some(agent_type.role_name()),\n\n namespace: Some(NAMESPACE.to_string()),\n\n ..Default::default()\n\n },\n\n rules: Some(agent_type.policy_rules()),\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "model/src/system/agent.rs", "rank": 31, "score": 92703.36582993488 }, { "content": "fn env_vars(raw_vars: Vec<(&str, String)>) -> Vec<EnvVar> {\n\n raw_vars\n\n .into_iter()\n\n .map(|(name, value)| EnvVar {\n\n name: name.to_owned(),\n\n value: Some(value),\n\n value_from: None,\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "controller/src/job/job_builder.rs", "rank": 32, "score": 92582.30931875229 }, { "content": "#[async_trait]\n\npub trait Runner: Sized + Send {\n\n /// Input that you need to initialize your test run.\n\n type C: Configuration;\n\n\n\n /// The error type returned by this trait's functions.\n\n type E: Debug + Display + Send + Sync + 'static;\n\n\n\n /// Creates a new instance of the `Runner`.\n\n async fn new(spec: Spec<Self::C>) -> Result<Self, Self::E>;\n\n\n\n /// Runs the test(s) and returns when they are done. If the tests cannot be completed, returns\n\n /// an error.\n\n async fn run(&mut self) -> Result<TestResults, Self::E>;\n\n\n\n /// Rerun a failed test.\n\n async fn rerun_failed(\n\n &mut self,\n\n _prev_test_result: &TestResults,\n\n ) -> Result<TestResults, Self::E> {\n\n info!(\"Tried to rerun test, but no retry method was defined.\");\n", "file_path": "agent/test-agent/src/lib.rs", "rank": 33, "score": 92116.99364472195 }, { "content": "/// Defines the cluster role binding for an agent of type `agent_type`.\n\npub fn agent_cluster_role_binding(agent_type: AgentType) -> ClusterRoleBinding {\n\n ClusterRoleBinding {\n\n metadata: ObjectMeta {\n\n name: Some(agent_type.binding_name()),\n\n namespace: Some(NAMESPACE.to_string()),\n\n ..Default::default()\n\n },\n\n role_ref: RoleRef {\n\n kind: \"ClusterRole\".to_string(),\n\n name: agent_type.role_name(),\n\n api_group: \"rbac.authorization.k8s.io\".to_string(),\n\n },\n\n subjects: Some(vec![Subject {\n\n kind: \"ServiceAccount\".to_string(),\n\n name: agent_type.service_account_name(),\n\n namespace: Some(NAMESPACE.to_string()),\n\n ..Default::default()\n\n }]),\n\n }\n\n}\n", "file_path": "model/src/system/agent.rs", "rank": 34, "score": 90199.85338240625 }, { "content": "pub trait AllowNotFound<T, E>\n\nwhere\n\n E: HttpStatusCode + Display,\n\n{\n\n /// When an operation returns a `Result`, sometimes it is ok if that result is a `404`. For\n\n /// example, if you are deleting something and if it is fine for the object you were trying to\n\n /// delete to not exist.\n\n ///\n\n /// In this case you can call `.is_found()` to transform the `Result` into a `bool` with the\n\n /// following logic:\n\n ///\n\n /// Returns `Ok(true)` if the the result was `Ok`. Returns `Ok(false)` if the result was `Err`\n\n /// but the error was a `404`. Returns `Err(e)` for any error that is not a `404`.\n\n ///\n\n /// If you want to log the error or do anything else with it in the case of a `404` then you can\n\n /// do so in `handle_not_found`.\n\n ///\n\n #[allow(clippy::wrong_self_convention)]\n\n fn allow_not_found<O>(self, handle_not_found: O) -> std::result::Result<Option<T>, E>\n\n where\n", "file_path": "model/src/clients/http_status_code.rs", "rank": 40, "score": 89273.46626060386 }, { "content": " use std::fmt::Debug;\n\n\n\n const CLUSTER_NAME: &str = \"test-client\";\n\n const TEST_NAME: &str = \"my-test\";\n\n\n\n #[derive(Default, Debug, Clone, Serialize, Deserialize, Eq, PartialEq)]\n\n #[serde(rename_all = \"camelCase\")]\n\n struct TestConfig {\n\n field_a: u64,\n\n field_b: u64,\n\n }\n\n\n\n impl Configuration for TestConfig {}\n\n\n\n const TEST_CONFIG: TestConfig = TestConfig {\n\n field_a: 13,\n\n field_b: 14,\n\n };\n\n\n\n #[tokio::test]\n", "file_path": "model/src/clients/test_client.rs", "rank": 41, "score": 89141.99961078537 }, { "content": "\n\n fn api(&self) -> &Api<Self::Crd> {\n\n &self.api\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\n#[cfg(feature = \"integ\")]\n\nmod test {\n\n use super::*;\n\n use crate::constants::NAMESPACE;\n\n use crate::{Agent, Configuration, TestSpec};\n\n use k8s_openapi::api::core::v1::Namespace;\n\n use k8s_openapi::apiextensions_apiserver::pkg::apis::apiextensions::v1::CustomResourceDefinition;\n\n use k8s_openapi::apimachinery::pkg::apis::meta::v1::ObjectMeta;\n\n use kube::api::PostParams;\n\n use kube::core::object::HasStatus;\n\n use kube::CustomResourceExt;\n\n use selftest::Cluster;\n\n use serde::{Deserialize, Serialize};\n", "file_path": "model/src/clients/test_client.rs", "rank": 42, "score": 89134.01873847505 }, { "content": "use super::error::Result;\n\nuse crate::clients::crd_client::JsonPatch;\n\nuse crate::clients::CrdClient;\n\nuse crate::{AgentStatus, TaskState, Test, TestResults, TestStatus};\n\nuse kube::Api;\n\n\n\n/// An API Client for TestSys Test CRD objects.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n///# use model::clients::{CrdClient, TestClient};\n\n///# async fn no_run() {\n\n/// let test_client = TestClient::new().await.unwrap();\n\n/// let test = test_client.get(\"my-test\").await.unwrap();\n\n///# }\n\n/// ```\n\n#[derive(Clone)]\n\npub struct TestClient {\n\n api: Api<Test>,\n", "file_path": "model/src/clients/test_client.rs", "rank": 43, "score": 89130.86716913582 }, { "content": " None,\n\n tokio::time::Duration::from_secs(10),\n\n )\n\n .await\n\n .unwrap();\n\n let tc = TestClient::new_from_k8s_client(cluster.k8s_client().await.unwrap());\n\n\n\n tc.create(Test {\n\n metadata: ObjectMeta {\n\n name: Some(TEST_NAME.into()),\n\n ..ObjectMeta::default()\n\n },\n\n spec: TestSpec {\n\n agent: Agent {\n\n name: \"my-agent\".into(),\n\n image: \"foo:v0.1.0\".into(),\n\n configuration: Some(TEST_CONFIG.into_map().unwrap()),\n\n ..Agent::default()\n\n },\n\n ..TestSpec::default()\n", "file_path": "model/src/clients/test_client.rs", "rank": 44, "score": 89130.2956992271 }, { "content": " async fn test() {\n\n let cluster = Cluster::new(CLUSTER_NAME).unwrap();\n\n let k8s_client = cluster.k8s_client().await.unwrap();\n\n let ns_api: Api<Namespace> = Api::all(k8s_client.clone());\n\n ns_api\n\n .create(&PostParams::default(), &crate::system::testsys_namespace())\n\n .await\n\n .unwrap();\n\n cluster\n\n .wait_for_object::<Namespace>(NAMESPACE, None, tokio::time::Duration::from_secs(10))\n\n .await\n\n .unwrap();\n\n let crd_api: Api<CustomResourceDefinition> = Api::all(k8s_client.clone());\n\n crd_api\n\n .create(&PostParams::default(), &Test::crd())\n\n .await\n\n .unwrap();\n\n cluster\n\n .wait_for_object::<CustomResourceDefinition>(\n\n \"tests.testsys.bottlerocket.aws\",\n", "file_path": "model/src/clients/test_client.rs", "rank": 45, "score": 89127.39210361279 }, { "content": "}\n\n\n\nimpl TestClient {\n\n /// Mark the TestSys [`Test`] as ok to delete by setting the `keep_running`\n\n /// flag to false\n\n pub async fn send_keep_running<S>(&self, name: S, keep_running: bool) -> Result<Test>\n\n where\n\n S: AsRef<str> + Send,\n\n {\n\n self.patch(\n\n name,\n\n vec![JsonPatch::new_replace_operation(\n\n \"/spec/agent/keepRunning\",\n\n keep_running,\n\n )],\n\n \"set 'keep running'\",\n\n )\n\n .await\n\n }\n\n\n", "file_path": "model/src/clients/test_client.rs", "rank": 46, "score": 89127.00257953988 }, { "content": " JsonPatch::new_add_operation(\"/status/agent/taskState\", TaskState::Error),\n\n JsonPatch::new_add_operation(\"/status/agent/error\", error),\n\n ],\n\n \"send agent error\",\n\n )\n\n .await\n\n }\n\n}\n\n\n\nimpl CrdClient for TestClient {\n\n type Crd = Test;\n\n type CrdStatus = TestStatus;\n\n\n\n fn new_from_api(api: Api<Self::Crd>) -> Self {\n\n Self { api }\n\n }\n\n\n\n fn kind(&self) -> &'static str {\n\n \"test\"\n\n }\n", "file_path": "model/src/clients/test_client.rs", "rank": 47, "score": 89126.03361089107 }, { "content": " /// Get the TestSys [`Test`]'s `status.agent` field.\n\n pub async fn get_agent_status<S>(&self, name: S) -> Result<AgentStatus>\n\n where\n\n S: AsRef<str> + Send,\n\n {\n\n Ok(self.get(name).await?.status.unwrap_or_default().agent)\n\n }\n\n\n\n pub async fn send_resource_error(&self, test_name: &str, error: &str) -> Result<Test> {\n\n self.patch_status(\n\n test_name,\n\n vec![JsonPatch::new_add_operation(\n\n \"/status/controller/resourceError\",\n\n error,\n\n )],\n\n \"send resource error\",\n\n )\n\n .await\n\n }\n\n\n", "file_path": "model/src/clients/test_client.rs", "rank": 48, "score": 89120.7077254528 }, { "content": " )\n\n .await\n\n }\n\n\n\n pub async fn send_test_completed(&self, name: &str, results: TestResults) -> Result<Test> {\n\n self.patch_status(\n\n name,\n\n vec![\n\n JsonPatch::new_add_operation(\"/status/agent/taskState\", TaskState::Completed),\n\n JsonPatch::new_add_operation(\"/status/agent/results/-\", results),\n\n ],\n\n \"send test completion results\",\n\n )\n\n .await\n\n }\n\n\n\n pub async fn send_agent_error(&self, name: &str, error: &str) -> Result<Test> {\n\n self.patch_status(\n\n name,\n\n vec![\n", "file_path": "model/src/clients/test_client.rs", "rank": 49, "score": 89117.52445175787 }, { "content": " pub async fn send_agent_task_state(&self, name: &str, task_state: TaskState) -> Result<Test> {\n\n self.patch_status(\n\n name,\n\n vec![JsonPatch::new_add_operation(\n\n \"/status/agent/taskState\",\n\n task_state,\n\n )],\n\n \"send agent task state\",\n\n )\n\n .await\n\n }\n\n\n\n pub async fn send_test_results(&self, name: &str, results: TestResults) -> Result<Test> {\n\n self.patch_status(\n\n name,\n\n vec![JsonPatch::new_add_operation(\n\n \"/status/agent/results/-\",\n\n results,\n\n )],\n\n \"send test results\",\n", "file_path": "model/src/clients/test_client.rs", "rank": 50, "score": 89117.42950527072 }, { "content": " .await\n\n .unwrap();\n\n assert!(matches!(\n\n tc.get(TEST_NAME).await.unwrap().agent_status().task_state,\n\n TaskState::Running\n\n ));\n\n\n\n tc.send_resource_error(TEST_NAME, \"something bad happened\")\n\n .await\n\n .unwrap();\n\n assert_eq!(\n\n tc.get(TEST_NAME)\n\n .await\n\n .unwrap()\n\n .status()\n\n .cloned()\n\n .unwrap()\n\n .controller\n\n .resource_error\n\n .unwrap(),\n", "file_path": "model/src/clients/test_client.rs", "rank": 51, "score": 89116.54208223594 }, { "content": " \"something bad happened\"\n\n );\n\n\n\n tc.send_agent_error(TEST_NAME, \"something terrible happened\")\n\n .await\n\n .unwrap();\n\n assert_eq!(\n\n tc.get(TEST_NAME)\n\n .await\n\n .unwrap()\n\n .status()\n\n .cloned()\n\n .unwrap()\n\n .agent\n\n .error\n\n .unwrap(),\n\n \"something terrible happened\"\n\n );\n\n assert!(matches!(\n\n tc.get(TEST_NAME).await.unwrap().agent_status().task_state,\n\n TaskState::Error\n\n ));\n\n }\n\n}\n", "file_path": "model/src/clients/test_client.rs", "rank": 52, "score": 89116.44094361295 }, { "content": " },\n\n ..Test::default()\n\n })\n\n .await\n\n .unwrap();\n\n\n\n tc.initialize_status(TEST_NAME).await.unwrap();\n\n\n\n // If status is already initialized, it should be an error to do so again.\n\n assert!(tc.initialize_status(TEST_NAME).await.is_err());\n\n\n\n tc.send_agent_task_state(TEST_NAME, TaskState::Error)\n\n .await\n\n .unwrap();\n\n assert!(matches!(\n\n tc.get(TEST_NAME).await.unwrap().agent_status().task_state,\n\n TaskState::Error\n\n ));\n\n\n\n tc.send_agent_task_state(TEST_NAME, TaskState::Running)\n", "file_path": "model/src/clients/test_client.rs", "rank": 53, "score": 89113.37096170321 }, { "content": "pub(crate) mod mock;\n\n\n\nuse mock::agent_client::MockAgentClient;\n\nuse mock::info_client::MockInfoClient;\n\nuse mock::{InstanceCreator, InstanceDestroyer};\n\nuse resource_agent::{Agent, BootstrapData, ResourceAction, Types};\n\nuse std::marker::PhantomData;\n\n\n\n/// This test demonstrates the the use of mock clients so that [`Create`] and [`Destroy`] implementations can be tested\n\n/// in the absence of Kubernetes.\n\n#[tokio::test]\n\nasync fn mock_test() {\n\n let types = Types {\n\n info_client: PhantomData::<MockInfoClient>::default(),\n\n agent_client: PhantomData::<MockAgentClient>::default(),\n\n };\n\n\n\n let agent = Agent::new(\n\n types,\n\n BootstrapData {\n", "file_path": "agent/resource-agent/tests/mock_test.rs", "rank": 54, "score": 88060.79471613417 }, { "content": " resource_name: \"some-instances\".to_string(),\n\n action: ResourceAction::Create,\n\n },\n\n InstanceCreator {},\n\n InstanceDestroyer {},\n\n )\n\n .await\n\n .unwrap();\n\n\n\n agent.run().await.unwrap();\n\n\n\n let types = Types {\n\n info_client: PhantomData::<MockInfoClient>::default(),\n\n agent_client: PhantomData::<MockAgentClient>::default(),\n\n };\n\n\n\n let agent = Agent::new(\n\n types,\n\n BootstrapData {\n\n resource_name: \"some-instances\".to_string(),\n", "file_path": "agent/resource-agent/tests/mock_test.rs", "rank": 55, "score": 88046.86632894663 }, { "content": " action: ResourceAction::Destroy,\n\n },\n\n InstanceCreator {},\n\n InstanceDestroyer {},\n\n )\n\n .await\n\n .unwrap();\n\n agent.run().await.unwrap();\n\n}\n", "file_path": "agent/resource-agent/tests/mock_test.rs", "rank": 56, "score": 88025.28490372258 }, { "content": "/// The private error type for the default [`Bootstrap`].\n\n#[derive(Debug, Snafu)]\n\npub(crate) enum InnerError {\n\n #[snafu(display(\"Unable to read environment variable: '{}': {}\", key, source))]\n\n EnvRead {\n\n key: String,\n\n source: std::env::VarError,\n\n },\n\n}\n\n\n\nimpl BootstrapData {\n\n pub fn from_env() -> Result<BootstrapData, BootstrapError> {\n\n Ok(BootstrapData {\n\n test_name: std::env::var(ENV_TEST_NAME).context(EnvReadSnafu { key: ENV_TEST_NAME })?,\n\n })\n\n }\n\n}\n", "file_path": "agent/test-agent/src/bootstrap.rs", "rank": 57, "score": 81002.34491534752 }, { "content": "/*!\n\n\n\nThe `bootstrap` module defines a struct and function for getting the necessary information from the\n\ncontainer environment to construct the [`Agent`] and all of its parts.\n\n\n\n!*/\n\n\n\nuse model::constants::ENV_TEST_NAME;\n\nuse snafu::{ResultExt, Snafu};\n\n\n\n/// Data that is read from the TestPod's container environment and filesystem.\n\npub struct BootstrapData {\n\n /// The name of the TestSys Test.\n\n pub test_name: String,\n\n}\n\n\n\n/// The public error type for the default [`Bootstrap`].\n\n#[derive(Debug, Snafu)]\n\npub struct BootstrapError(InnerError);\n\n\n", "file_path": "agent/test-agent/src/bootstrap.rs", "rank": 58, "score": 81000.33650942406 }, { "content": "\n\n#[derive(Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct CreatedInstances {\n\n instance_ids: Vec<String>,\n\n}\n\n\n\nimpl Configuration for CreatedInstances {}\n\n\n\n#[async_trait::async_trait]\n\nimpl Create for InstanceCreator {\n\n type Config = InstanceConfig;\n\n type Info = Memo;\n\n type Resource = CreatedInstances;\n\n\n\n async fn create<I>(\n\n &self,\n\n spec: Spec<Self::Config>,\n\n client: &I,\n\n ) -> ProviderResult<Self::Resource>\n", "file_path": "agent/resource-agent/tests/mock/mod.rs", "rank": 59, "score": 78849.49135697666 }, { "content": "/// InstanceDestroyer pretends to destroy instances for the sake demonstrating a mock resource\n\n/// provider.\n\npub(crate) struct InstanceDestroyer {}\n\n\n\n#[derive(Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct Memo {\n\n information: String,\n\n}\n\n\n\nimpl Configuration for Memo {}\n\n\n\n#[derive(Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct InstanceConfig {\n\n num_instances: u32,\n\n instance_type: String,\n\n}\n\n\n\nimpl Configuration for InstanceConfig {}\n", "file_path": "agent/resource-agent/tests/mock/mod.rs", "rank": 60, "score": 78847.4203965342 }, { "content": "/*!\n\n\n\nThis test module provides mock implementations of the [`AgentClient`] and [`InfoClient`] that\n\ndemonstrate what can be done to test without Kubernetes.\n\n\n\nAlso provided here are a very simple mock implementations of the [`Create`] and [`Destroy`] traits.\n\n\n\n!*/\n\n\n\npub(crate) mod agent_client;\n\npub(crate) mod info_client;\n\n\n\nuse model::Configuration;\n\nuse resource_agent::clients::InfoClient;\n\nuse resource_agent::provider::{Create, Destroy, ProviderError, ProviderResult, Resources, Spec};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// InstanceCreator pretends to create instances for the sake demonstrating a mock resource provider.\n\npub(crate) struct InstanceCreator {}\n\n\n", "file_path": "agent/resource-agent/tests/mock/mod.rs", "rank": 61, "score": 78842.83002775414 }, { "content": " where\n\n I: InfoClient,\n\n {\n\n client\n\n .send_info(Memo {\n\n information: format!(\"Create {} instances\", spec.configuration.num_instances),\n\n })\n\n .await\n\n .map_err(|e| ProviderError::new_with_source(Resources::Clear, e))?;\n\n Ok(CreatedInstances {\n\n instance_ids: vec![\"123\".to_string(), \"456\".to_string()],\n\n })\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl Destroy for InstanceDestroyer {\n\n type Config = InstanceConfig;\n\n type Info = Memo;\n\n type Resource = CreatedInstances;\n", "file_path": "agent/resource-agent/tests/mock/mod.rs", "rank": 62, "score": 78831.0268809497 }, { "content": "\n\n async fn destroy<I>(\n\n &self,\n\n _spec: Option<Spec<Self::Config>>,\n\n resource: Option<Self::Resource>,\n\n client: &I,\n\n ) -> ProviderResult<()>\n\n where\n\n I: InfoClient,\n\n {\n\n let resource = match resource {\n\n Some(some) => some,\n\n None => {\n\n return Err(ProviderError::new_with_context(\n\n Resources::Unknown,\n\n \"Resource was 'None', unable to destroy resources.\",\n\n ));\n\n }\n\n };\n\n\n", "file_path": "agent/resource-agent/tests/mock/mod.rs", "rank": 63, "score": 78825.2762802319 }, { "content": " for instance_id in resource.instance_ids {\n\n client\n\n .send_info(Memo {\n\n information: format!(\"Destroying instance '{}'\", instance_id),\n\n })\n\n .await\n\n .map_err(|e| ProviderError::new_with_source(Resources::Clear, e))?;\n\n }\n\n\n\n client\n\n .send_info(Memo {\n\n information: \"Done destroying resources\".into(),\n\n })\n\n .await\n\n .map_err(|e| ProviderError::new_with_source(Resources::Clear, e))?;\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "agent/resource-agent/tests/mock/mod.rs", "rank": 64, "score": 78811.60815925148 }, { "content": "\n\n async fn new(bootstrap_data: BootstrapData) -> Result<Self, Self::E> {\n\n Ok(Self {\n\n client: TestClient::new().await.context(K8sSnafu)?,\n\n name: bootstrap_data.test_name,\n\n results_dir: TempDir::new().context(TempDirCreateSnafu)?,\n\n })\n\n }\n\n\n\n async fn keep_running(&self) -> Result<bool, Self::E> {\n\n let test_data = self.client.get(&self.name).await.context(K8sSnafu)?;\n\n Ok(test_data.spec.agent.keep_running)\n\n }\n\n\n\n async fn retries(&self) -> Result<u32, Self::E> {\n\n let test_data = self.client.get(&self.name).await.context(K8sSnafu)?;\n\n Ok(test_data.spec.retries.unwrap_or_default())\n\n }\n\n\n\n async fn spec<C>(&self) -> Result<Spec<C>, Self::E>\n", "file_path": "agent/test-agent/src/k8s_client.rs", "rank": 65, "score": 78061.25876097969 }, { "content": "\n\n Ok(Spec {\n\n name: self.name.clone(),\n\n configuration,\n\n secrets: test_data.spec.agent.secrets.unwrap_or_default(),\n\n results_dir: self.results_dir.path().to_path_buf(),\n\n })\n\n }\n\n\n\n async fn send_test_starting(&self) -> Result<(), Self::E> {\n\n self.client\n\n .send_agent_task_state(&self.name, TaskState::Running)\n\n .await\n\n .context(K8sSnafu)?;\n\n Ok(())\n\n }\n\n\n\n async fn send_test_results(&self, results: TestResults) -> Result<(), Self::E> {\n\n self.client\n\n .send_test_results(&self.name, results)\n", "file_path": "agent/test-agent/src/k8s_client.rs", "rank": 66, "score": 78056.91476422807 }, { "content": "use crate::{BootstrapData, Client, DefaultClient, Spec, TestResults};\n\nuse async_trait::async_trait;\n\nuse model::clients::{CrdClient, ResourceClient, TestClient};\n\nuse model::constants::TESTSYS_RESULTS_FILE;\n\nuse model::{Configuration, TaskState};\n\nuse serde_json::Value;\n\nuse snafu::{ResultExt, Snafu};\n\nuse std::fmt::{Debug, Display};\n\nuse std::path::PathBuf;\n\nuse tempfile::TempDir;\n\n\n\n/// The public error type for the default [`Client`].\n\n#[derive(Debug, Snafu)]\n\npub struct ClientError(InnerError);\n\n\n\n/// The private error type for the default [`Client`].\n\n#[derive(Debug, Snafu)]\n\npub(crate) enum InnerError {\n\n /// Any error when using the k8s client will have a descriptive error message. The user of\n\n /// `DefaultClient` is in a better position to provide context than we are, so we forward the\n", "file_path": "agent/test-agent/src/k8s_client.rs", "rank": 67, "score": 78053.67957293738 }, { "content": " /// error message.\n\n #[snafu(display(\"{}\", source))]\n\n K8s { source: model::clients::Error },\n\n\n\n #[snafu(display(\"Unable to deserialize test configuration: {}\", source))]\n\n Deserialization { source: serde_json::Error },\n\n\n\n #[snafu(display(\"Unable to create resource client: {}\", source))]\n\n ResourceClientCreate { source: model::clients::Error },\n\n\n\n #[snafu(display(\"Unable to resolve config templates: {}\", source))]\n\n ResolveConfig { source: model::clients::Error },\n\n\n\n #[snafu(display(\"An error occured while creating a `TempDir`: {}\", source))]\n\n TempDirCreate { source: std::io::Error },\n\n}\n\n\n\n#[async_trait]\n\nimpl Client for DefaultClient {\n\n type E = ClientError;\n", "file_path": "agent/test-agent/src/k8s_client.rs", "rank": 68, "score": 78053.37290118416 }, { "content": " .await\n\n .context(K8sSnafu)?;\n\n Ok(())\n\n }\n\n\n\n async fn send_test_done(&self, results: TestResults) -> Result<(), Self::E> {\n\n self.client\n\n .send_test_completed(&self.name, results)\n\n .await\n\n .context(K8sSnafu)?;\n\n Ok(())\n\n }\n\n\n\n async fn send_error<E>(&self, error: E) -> Result<(), Self::E>\n\n where\n\n E: Debug + Display + Send + Sync,\n\n {\n\n self.client\n\n .send_agent_error(&self.name, &error.to_string())\n\n .await\n", "file_path": "agent/test-agent/src/k8s_client.rs", "rank": 69, "score": 78050.80301198407 }, { "content": " where\n\n C: Configuration,\n\n {\n\n let test_data = self.client.get(&self.name).await.context(K8sSnafu)?;\n\n\n\n let raw_config = match test_data.spec.agent.configuration {\n\n Some(serde_map) => serde_map,\n\n None => Default::default(),\n\n };\n\n\n\n let resource_client = ResourceClient::new()\n\n .await\n\n .context(ResourceClientCreateSnafu)?;\n\n let resolved_config = resource_client\n\n .resolve_templated_config(raw_config)\n\n .await\n\n .context(ResolveConfigSnafu)?;\n\n\n\n let configuration =\n\n serde_json::from_value(Value::Object(resolved_config)).context(DeserializationSnafu)?;\n", "file_path": "agent/test-agent/src/k8s_client.rs", "rank": 70, "score": 78047.39825742775 }, { "content": " .context(K8sSnafu)?;\n\n Ok(())\n\n }\n\n\n\n async fn results_directory(&self) -> Result<PathBuf, Self::E> {\n\n return Ok(self.results_dir.path().to_path_buf());\n\n }\n\n\n\n async fn results_file(&self) -> Result<PathBuf, Self::E> {\n\n return Ok(PathBuf::from(TESTSYS_RESULTS_FILE));\n\n }\n\n}\n", "file_path": "agent/test-agent/src/k8s_client.rs", "rank": 71, "score": 78034.72342996448 }, { "content": "#[derive(Serialize)]\n\nstruct Results {\n\n #[serde(skip_serializing)]\n\n width: u16,\n\n results: Vec<ResultRow>,\n\n #[serde(skip_serializing)]\n\n min_name_width: u16,\n\n #[serde(skip_serializing)]\n\n min_object_width: u16,\n\n #[serde(skip_serializing)]\n\n min_state_width: u16,\n\n #[serde(skip_serializing)]\n\n min_passed_width: u16,\n\n #[serde(skip_serializing)]\n\n min_skipped_width: u16,\n\n #[serde(skip_serializing)]\n\n min_failed_width: u16,\n\n}\n\n\n\nimpl Default for Results {\n\n fn default() -> Self {\n", "file_path": "bottlerocket/testsys/src/status.rs", "rank": 72, "score": 67332.36712051947 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct Args {\n\n /// Set logging verbosity [trace|debug|info|warn|error]. If the environment variable `RUST_LOG`\n\n /// is present, it overrides the default logging behavior. See https://docs.rs/env_logger/latest\n\n #[structopt(long = \"log-level\", default_value = \"info\")]\n\n log_level: LevelFilter,\n\n /// Path to the kubeconfig file. Also can be passed with the KUBECONFIG environment variable.\n\n #[structopt(long = \"kubeconfig\")]\n\n kubeconfig: Option<PathBuf>,\n\n #[structopt(subcommand)]\n\n command: Command,\n\n}\n\n\n", "file_path": "bottlerocket/testsys/src/main.rs", "rank": 73, "score": 67332.34408555207 }, { "content": "fn main() {\n\n // Re-run this build script if the model changes.\n\n println!(\"cargo:rerun-if-changed=../client/src/model\");\n\n println!(\"cargo:rerun-if-changed=../client/src/system\");\n\n // Re-run the yaml generation if these variables change\n\n println!(\"cargo:rerun-if-env-changed=TESTSYS_CONTROLLER_IMAGE\");\n\n println!(\"cargo:rerun-if-env-changed=TESTSYS_CONTROLLER_IMAGE_PULL_SECRET\");\n\n\n\n let path = PathBuf::from(YAMLGEN_DIR)\n\n .join(\"deploy\")\n\n .join(\"testsys-crd.yaml\");\n\n let mut testsys_crd = File::create(&path).unwrap();\n\n\n\n let path = PathBuf::from(YAMLGEN_DIR)\n\n .join(\"deploy\")\n\n .join(\"testsys-controller.yaml\");\n\n let mut testsys_controller = File::create(&path).unwrap();\n\n\n\n let path = PathBuf::from(YAMLGEN_DIR)\n\n .join(\"deploy\")\n", "file_path": "yamlgen/build.rs", "rank": 74, "score": 67317.13380606602 }, { "content": "#[derive(Tabled, Default, Clone, Serialize)]\n\nstruct ResultRow {\n\n #[header(\"NAME\")]\n\n name: String,\n\n #[header(\"TYPE\")]\n\n object_type: String,\n\n #[header(\"STATE\")]\n\n state: String,\n\n #[header(\"PASSED\")]\n\n #[field(display_with = \"display_option\")]\n\n passed: Option<u64>,\n\n #[header(\"SKIPPED\")]\n\n #[field(display_with = \"display_option\")]\n\n skipped: Option<u64>,\n\n #[header(\"FAILED\")]\n\n #[field(display_with = \"display_option\")]\n\n failed: Option<u64>,\n\n}\n\n\n", "file_path": "bottlerocket/testsys/src/status.rs", "rank": 75, "score": 66314.84274071328 }, { "content": "#[derive(Serialize)]\n\nstruct DockerConfigAuth {\n\n auth: String,\n\n}\n\n\n\nimpl DockerConfigJson {\n\n pub(crate) fn new(username: &str, password: &str, registry: &str) -> DockerConfigJson {\n\n let mut auths = HashMap::new();\n\n let auth = base64::encode(format!(\"{}:{}\", username, password));\n\n auths.insert(registry.to_string(), DockerConfigAuth { auth });\n\n DockerConfigJson { auths }\n\n }\n\n}\n", "file_path": "bottlerocket/testsys/src/k8s.rs", "rank": 76, "score": 65329.00610977838 }, { "content": "/// Extract the value of `RUST_LOG` if it exists, otherwise log this crate at\n\n/// `DEFAULT_LEVEL_FILTER`.\n\nfn init_logger() {\n\n match std::env::var(env_logger::DEFAULT_FILTER_ENV).ok() {\n\n Some(_) => {\n\n // RUST_LOG exists; env_logger will use it.\n\n Builder::from_default_env().init();\n\n }\n\n None => {\n\n // RUST_LOG does not exist; use default log level for this crate only.\n\n Builder::new()\n\n .filter(Some(env!(\"CARGO_CRATE_NAME\")), DEFAULT_LEVEL_FILTER)\n\n .filter(Some(\"model\"), DEFAULT_LEVEL_FILTER)\n\n .init();\n\n }\n\n }\n\n}\n", "file_path": "controller/src/main.rs", "rank": 77, "score": 65119.76017697329 }, { "content": "#[test]\n\nfn good_secret_name_2() {\n\n let input = \"0-1_foO\";\n\n let secret_name = SecretName::new(input).unwrap();\n\n assert_eq!(secret_name.as_str(), input);\n\n}\n\n\n", "file_path": "model/src/agent.rs", "rank": 78, "score": 64101.27113238517 }, { "content": "#[test]\n\nfn good_secret_name_1() {\n\n let input = \"-\";\n\n let secret_name = SecretName::new(input).unwrap();\n\n assert_eq!(secret_name.as_str(), input);\n\n}\n\n\n", "file_path": "model/src/agent.rs", "rank": 79, "score": 64101.27113238517 }, { "content": "#[test]\n\nfn bad_secret_name_1() {\n\n let input = \"bad/name/1\";\n\n assert!(SecretName::new(input).err().is_some())\n\n}\n\n\n", "file_path": "model/src/agent.rs", "rank": 80, "score": 64101.27113238517 }, { "content": "#[test]\n\nfn bad_secret_name_2() {\n\n let input = \"\";\n\n assert!(SecretName::new(input).err().is_some())\n\n}\n\n\n", "file_path": "model/src/agent.rs", "rank": 81, "score": 64101.27113238517 }, { "content": "/// Extract the value of `RUST_LOG` if it exists, otherwise log this crate at\n\n/// `DEFAULT_LEVEL_FILTER`.\n\nfn init_logger() {\n\n match std::env::var(env_logger::DEFAULT_FILTER_ENV).ok() {\n\n Some(_) => {\n\n // RUST_LOG exists; env_logger will use it.\n\n Builder::from_default_env().init();\n\n }\n\n None => {\n\n // RUST_LOG does not exist; use default log level for this crate only.\n\n Builder::new()\n\n .filter(Some(env!(\"CARGO_CRATE_NAME\")), LevelFilter::Trace)\n\n .filter(Some(\"resource_agent\"), LevelFilter::Trace)\n\n .filter(Some(\"model\"), LevelFilter::Trace)\n\n .init();\n\n }\n\n }\n\n}\n", "file_path": "agent/resource-agent/examples/duplicator_resource_agent/main.rs", "rank": 82, "score": 60488.92727634893 }, { "content": "/// Extract the value of `RUST_LOG` if it exists, otherwise log this crate at\n\n/// `DEFAULT_LEVEL_FILTER`.\n\nfn init_logger() {\n\n match std::env::var(env_logger::DEFAULT_FILTER_ENV).ok() {\n\n Some(_) => {\n\n // RUST_LOG exists; env_logger will use it.\n\n Builder::from_default_env().init();\n\n }\n\n None => {\n\n // RUST_LOG does not exist; use default log level for this crate only.\n\n Builder::new()\n\n .filter(Some(env!(\"CARGO_CRATE_NAME\")), LevelFilter::Trace)\n\n .filter(Some(\"resource_agent\"), LevelFilter::Trace)\n\n .filter(Some(\"model\"), LevelFilter::Trace)\n\n .init();\n\n }\n\n }\n\n}\n", "file_path": "agent/resource-agent/examples/example_resource_agent/main.rs", "rank": 83, "score": 60488.92727634893 }, { "content": "/// Retrieve the results from a sonobuoy test and convert them into `TestResults`.\n\npub fn results_sonobuoy(\n\n kubeconfig_path: &str,\n\n sonobuoy_config: &SonobuoyConfig,\n\n results_dir: &Path,\n\n) -> Result<TestResults, error::Error> {\n\n let kubeconfig_arg = vec![\"--kubeconfig\", kubeconfig_path];\n\n\n\n info!(\"Running sonobuoy retrieve\");\n\n let results_filepath = results_dir.join(SONOBUOY_RESULTS_FILENAME);\n\n let status = Command::new(\"/usr/bin/sonobuoy\")\n\n .args(kubeconfig_arg.to_owned())\n\n .arg(\"retrieve\")\n\n .arg(\"--filename\")\n\n .arg(results_filepath.as_os_str())\n\n .status()\n\n .context(error::SonobuoyProcessSnafu)?;\n\n ensure!(status.success(), error::SonobuoyRunSnafu);\n\n\n\n info!(\"Sonobuoy testing has completed, printing results\");\n\n let sonobuoy_results_exist_status = Command::new(\"/usr/bin/sonobuoy\")\n", "file_path": "bottlerocket/agents/src/sonobuoy.rs", "rank": 84, "score": 60096.30567944676 }, { "content": "/// Defines the testsys-controller deployment\n\npub fn controller_deployment(\n\n controller_image: String,\n\n image_pull_secret: Option<String>,\n\n) -> Deployment {\n\n let image_pull_secrets =\n\n image_pull_secret.map(|secret| vec![LocalObjectReference { name: Some(secret) }]);\n\n\n\n Deployment {\n\n metadata: ObjectMeta {\n\n labels: Some(\n\n btreemap! {\n\n APP_COMPONENT => \"controller\",\n\n APP_MANAGED_BY => \"testsys\",\n\n APP_PART_OF => \"testsys\",\n\n LABEL_COMPONENT => \"controller\",\n\n }\n\n .iter()\n\n .map(|(k, v)| (k.to_string(), v.to_string()))\n\n .collect(),\n\n ),\n", "file_path": "model/src/system/controller.rs", "rank": 85, "score": 60091.327398942216 }, { "content": "/// If the command was successful (exit code zero), returns the command's `stdout`. Otherwise\n\n/// returns a provider error.\n\n/// - `output`: the `Output` object from a `std::process::Command`\n\n/// - `hint`: the command that was executed, e.g. `echo hello world`\n\n/// - `resources`: whether or not resources will be leftover if this command failed\n\npub fn provider_error_for_cmd_output(\n\n output: Output,\n\n hint: &str,\n\n resources: Resources,\n\n) -> ProviderResult<String> {\n\n let stdout = String::from_utf8_lossy(&output.stdout);\n\n if output.status.success() {\n\n Ok(stdout.to_string())\n\n } else {\n\n let stderr = String::from_utf8_lossy(&output.stderr);\n\n let code = output.status.code().unwrap_or(-1);\n\n Err(ProviderError::new_with_context(\n\n resources,\n\n format!(\n\n \"Error running '{}', exit code {}\\nstderr:\\n{}\\nstdout:\\n{}\",\n\n hint, code, stderr, stdout\n\n ),\n\n ))\n\n }\n\n}\n", "file_path": "bottlerocket/agents/src/lib.rs", "rank": 86, "score": 58288.24448112518 }, { "content": "/// Defines the testsys K8S namespace\n\npub fn testsys_namespace() -> Namespace {\n\n Namespace {\n\n metadata: ObjectMeta {\n\n labels: Some(btreemap! {\n\n \"name\".to_string() => \"testsys\".to_string()\n\n }),\n\n name: Some(NAMESPACE.to_string()),\n\n ..Default::default()\n\n },\n\n spec: None,\n\n status: None,\n\n }\n\n}\n", "file_path": "model/src/system/namespace.rs", "rank": 87, "score": 56807.21371231359 }, { "content": "/// Initialize the logger with the value passed by `--log-level` (or its default) when the\n\n/// `RUST_LOG` environment variable is not present. If present, the `RUST_LOG` environment variable\n\n/// overrides `--log-level`/`level`.\n\nfn init_logger(level: LevelFilter) {\n\n match std::env::var(env_logger::DEFAULT_FILTER_ENV).ok() {\n\n Some(_) => {\n\n // RUST_LOG exists; env_logger will use it.\n\n Builder::from_default_env().init();\n\n }\n\n None => {\n\n // RUST_LOG does not exist; use default log level for this crate only.\n\n Builder::new()\n\n .filter(Some(env!(\"CARGO_CRATE_NAME\")), level)\n\n .init();\n\n }\n\n }\n\n}\n", "file_path": "bottlerocket/testsys/src/main.rs", "rank": 88, "score": 55931.22871526412 }, { "content": "/// Defines the testsys-controller service account\n\npub fn controller_service_account() -> ServiceAccount {\n\n ServiceAccount {\n\n metadata: ObjectMeta {\n\n name: Some(TESTSYS_CONTROLLER_SERVICE_ACCOUNT.to_string()),\n\n namespace: Some(NAMESPACE.to_string()),\n\n annotations: Some(btreemap! {\n\n \"kubernetes.io/service-account.name\".to_string() => TESTSYS_CONTROLLER_SERVICE_ACCOUNT.to_string()\n\n }),\n\n ..Default::default()\n\n },\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "model/src/system/controller.rs", "rank": 89, "score": 55086.87234609046 }, { "content": "/// Defines the testsys-controller cluster role\n\npub fn controller_cluster_role() -> ClusterRole {\n\n ClusterRole {\n\n metadata: ObjectMeta {\n\n name: Some(TESTSYS_CONTROLLER_CLUSTER_ROLE.to_string()),\n\n namespace: Some(NAMESPACE.to_string()),\n\n ..Default::default()\n\n },\n\n rules: Some(vec![\n\n PolicyRule {\n\n api_groups: Some(vec![TESTSYS.to_string()]),\n\n resources: Some(vec![\"tests\".to_string(), \"tests/status\".to_string()]),\n\n verbs: vec![\n\n \"create\",\n\n \"delete\",\n\n \"deletecollection\",\n\n \"get\",\n\n \"list\",\n\n \"patch\",\n\n \"update\",\n\n \"watch\",\n", "file_path": "model/src/system/controller.rs", "rank": 90, "score": 55086.87234609046 }, { "content": "fn is_pod_running(pod: &Pod) -> bool {\n\n pod.status\n\n .as_ref()\n\n .and_then(|s| s.phase.as_ref().map(|s| s == \"Running\"))\n\n .unwrap_or(false)\n\n}\n", "file_path": "selftest/src/cluster.rs", "rank": 91, "score": 54918.617664791156 }, { "content": "/// Defines the testsys-controller cluster role binding\n\npub fn controller_cluster_role_binding() -> ClusterRoleBinding {\n\n ClusterRoleBinding {\n\n metadata: ObjectMeta {\n\n name: Some(\"testsys-controller-role-binding\".to_string()),\n\n namespace: Some(NAMESPACE.to_string()),\n\n ..Default::default()\n\n },\n\n role_ref: RoleRef {\n\n api_group: \"rbac.authorization.k8s.io\".to_string(),\n\n kind: \"ClusterRole\".to_string(),\n\n name: TESTSYS_CONTROLLER_CLUSTER_ROLE.to_string(),\n\n },\n\n subjects: Some(vec![Subject {\n\n kind: \"ServiceAccount\".to_string(),\n\n name: TESTSYS_CONTROLLER_SERVICE_ACCOUNT.to_string(),\n\n namespace: Some(NAMESPACE.to_string()),\n\n ..Default::default()\n\n }]),\n\n }\n\n}\n\n\n", "file_path": "model/src/system/controller.rs", "rank": 92, "score": 53515.7978245822 }, { "content": "#[async_trait::async_trait]\n\npub trait CrdClient: Sized {\n\n type Crd: kube::Resource<DynamicType = ()>\n\n + Serialize\n\n + DeserializeOwned\n\n + Debug\n\n + Clone\n\n + Send\n\n + Sync\n\n + CrdExt;\n\n type CrdStatus: Serialize + Default + Send;\n\n\n\n // The following need to be implemented which allows the rest of the functions to have\n\n // default implementations.\n\n\n\n fn new_from_api(api: Api<Self::Crd>) -> Self;\n\n fn kind(&self) -> &'static str;\n\n fn api(&self) -> &Api<Self::Crd>;\n\n\n\n async fn new() -> Result<Self> {\n\n let k8s_client = kube::Client::try_default()\n", "file_path": "model/src/clients/crd_client.rs", "rank": 93, "score": 51067.26794470971 }, { "content": "/// The `Configuration` trait is for structs that can be used for custom data, which is represented\n\n/// in a CRD model like this:\n\n///\n\n/// ```yaml\n\n/// configuration:\n\n/// additionalProperties: true\n\n/// nullable: true\n\n/// type: object\n\n/// ```\n\n///\n\n/// The traits aggregated by the `Configuration` trait are typical of \"plain old data\" types and\n\n/// provide a way for clients to strongly type this data which is otherwise unconstrained by the\n\n/// API.\n\n///\n\npub trait Configuration:\n\n Serialize + DeserializeOwned + Clone + Debug + Default + Send + Sync + Sized + 'static\n\n{\n\n /// Convert the `Configuration` object to a serde `Map`.\n\n fn into_map(self) -> Result<Map<String, Value>> {\n\n match self.into_value()? {\n\n Value::Object(map) => Ok(map),\n\n _ => Err(error::ConfigWrongValueTypeSnafu {}.build().into()),\n\n }\n\n }\n\n\n\n /// Convert the `Configuration` object to a serde `Value`.\n\n fn into_value(self) -> Result<Value> {\n\n Ok(serde_json::to_value(self).context(error::ConfigSerializationSnafu)?)\n\n }\n\n\n\n /// Deserialize the `Configuration` object from a serde `Map`.\n\n fn from_map(map: Map<String, Value>) -> Result<Self> {\n\n Self::from_value(Value::Object(map))\n\n }\n\n\n\n /// Deserialize the `Configuration` object from a serde `Value`.\n\n fn from_value(value: Value) -> Result<Self> {\n\n Ok(serde_json::from_value(value).context(error::ConfigDeserializationSnafu)?)\n\n }\n\n}\n", "file_path": "model/src/configuration.rs", "rank": 94, "score": 50593.741198410506 }, { "content": "/// When something goes wrong, we need to let the controller know whether or not we have existing\n\n/// robots out there that need to be destroyed. We can do this by checking our `ProductionMemo`.\n\nfn resources_situation(memo: &ProductionMemo) -> Resources {\n\n if memo.existing_robot_ids.is_empty() {\n\n Resources::Clear\n\n } else {\n\n Resources::Remaining\n\n }\n\n}\n", "file_path": "agent/resource-agent/examples/example_resource_agent/provider.rs", "rank": 95, "score": 50186.77496053844 }, { "content": "#[async_trait::async_trait]\n\npub trait AgentClient: Sized {\n\n /// Create a new `AgentClient`.\n\n async fn new(data: BootstrapData) -> ClientResult<Self>;\n\n\n\n /// If there is a problem during the `Agent::new` function, this will be used to send the error.\n\n async fn send_init_error(&self, action: ResourceAction, error: &str) -> ClientResult<()>;\n\n\n\n /// Get the resource specifications for this resource provider.\n\n async fn get_spec<Config>(&self) -> ClientResult<Spec<Config>>\n\n where\n\n Config: Configuration;\n\n\n\n /// Get the resource that this resource provider created. `None` if it hasn't been created.\n\n async fn get_created_resource<Resource>(&self) -> ClientResult<Option<Resource>>\n\n where\n\n Resource: Configuration;\n\n\n\n /// Notify Kubernetes that the creation of resources is starting.\n\n async fn send_create_starting(&self) -> ClientResult<()>;\n\n\n", "file_path": "agent/resource-agent/src/clients/agent_client.rs", "rank": 96, "score": 49986.44704027444 }, { "content": "fn volumes(agent: &Agent) -> Option<Vec<Volume>> {\n\n let secrets = agent.secret_names();\n\n if secrets.is_empty() {\n\n return None;\n\n }\n\n Some(\n\n secrets\n\n .iter()\n\n .map(|&name| Volume {\n\n name: name.as_str().into(),\n\n secret: Some(SecretVolumeSource {\n\n secret_name: Some(name.as_str().into()),\n\n ..SecretVolumeSource::default()\n\n }),\n\n ..Volume::default()\n\n })\n\n .collect(),\n\n )\n\n}\n", "file_path": "controller/src/job/job_builder.rs", "rank": 97, "score": 49562.97880611895 }, { "content": "\n\n #[derive(Default, Debug, Clone, Copy, Serialize, Deserialize, Eq, PartialEq)]\n\n #[serde(rename_all = \"camelCase\")]\n\n struct CreatedRobot {\n\n instance_id: u64,\n\n }\n\n\n\n impl Configuration for CreatedRobot {}\n\n\n\n const CREATED_ROBOT: CreatedRobot = CreatedRobot {\n\n instance_id: 123456,\n\n };\n\n\n\n #[tokio::test]\n\n async fn test() {\n\n let cluster = Cluster::new(CLUSTER_NAME).unwrap();\n\n let k8s_client = cluster.k8s_client().await.unwrap();\n\n let ns_api: Api<Namespace> = Api::all(k8s_client.clone());\n\n ns_api\n\n .create(&PostParams::default(), &crate::system::testsys_namespace())\n", "file_path": "model/src/clients/resource_client.rs", "rank": 98, "score": 49387.43003964259 }, { "content": " struct AgentInfo {\n\n field_a: String,\n\n field_b: u64,\n\n }\n\n\n\n impl Configuration for AgentInfo {}\n\n\n\n #[derive(Default, Debug, Clone, Copy, Serialize, Deserialize, Eq, PartialEq)]\n\n #[serde(rename_all = \"camelCase\")]\n\n struct RobotRequest {\n\n robot_lucky_number: u64,\n\n robot_unlucky_number: u64,\n\n }\n\n\n\n impl Configuration for RobotRequest {}\n\n\n\n const ROBOT_REQUEST: RobotRequest = RobotRequest {\n\n robot_lucky_number: 7,\n\n robot_unlucky_number: 13,\n\n };\n", "file_path": "model/src/clients/resource_client.rs", "rank": 99, "score": 49382.152000817674 } ]
Rust
demo/src/features/mesh/jobs/write.rs
jlowry/rafx
515aeeae3e57e1c520e320316197326294fc099e
use rafx::render_feature_write_job_prelude::*; use rafx::api::RafxPrimitiveTopology; use rafx::framework::{VertexDataLayout, VertexDataSetLayout}; use serde::{Deserialize, Serialize}; #[derive(Copy, Clone, Debug, Serialize, Deserialize, Default)] #[repr(C)] pub struct MeshVertex { pub position: [f32; 3], pub normal: [f32; 3], pub tangent: [f32; 4], pub tex_coord: [f32; 2], } lazy_static::lazy_static! { pub static ref MESH_VERTEX_LAYOUT : VertexDataSetLayout = { use rafx::api::RafxFormat; VertexDataLayout::build_vertex_layout(&MeshVertex::default(), |builder, vertex| { builder.add_member(&vertex.position, "POSITION", RafxFormat::R32G32B32_SFLOAT); builder.add_member(&vertex.normal, "NORMAL", RafxFormat::R32G32B32_SFLOAT); builder.add_member(&vertex.tangent, "TANGENT", RafxFormat::R32G32B32A32_SFLOAT); builder.add_member(&vertex.tex_coord, "TEXCOORD", RafxFormat::R32G32_SFLOAT); }).into_set(RafxPrimitiveTopology::TriangleList) }; } use super::ExtractedFrameNodeMeshData; use rafx::api::{RafxIndexBufferBinding, RafxIndexType, RafxVertexBufferBinding}; use rafx::framework::{DescriptorSetArc, MaterialPassResource, ResourceArc}; use rafx::nodes::{FrameNodeIndex, PerViewNode}; struct PreparedSubmitNodeMeshData { material_pass_resource: ResourceArc<MaterialPassResource>, per_view_descriptor_set: DescriptorSetArc, per_material_descriptor_set: Option<DescriptorSetArc>, per_instance_descriptor_set: DescriptorSetArc, frame_node_index: FrameNodeIndex, mesh_part_index: usize, } impl std::fmt::Debug for PreparedSubmitNodeMeshData { fn fmt( &self, f: &mut std::fmt::Formatter<'_>, ) -> std::fmt::Result { f.debug_struct("PreparedSubmitNodeMeshData") .field("frame_node_index", &self.frame_node_index) .field("mesh_part_index", &self.mesh_part_index) .finish() } } pub struct MeshWriteJob { extracted_frame_node_mesh_data: Vec<Option<ExtractedFrameNodeMeshData>>, prepared_submit_node_mesh_data: Vec<PreparedSubmitNodeMeshData>, } impl MeshWriteJob { pub fn new() -> Self { MeshWriteJob { extracted_frame_node_mesh_data: Default::default(), prepared_submit_node_mesh_data: Default::default(), } } pub fn push_submit_node( &mut self, view_node: &PerViewNode, per_view_descriptor_set: DescriptorSetArc, per_material_descriptor_set: Option<DescriptorSetArc>, per_instance_descriptor_set: DescriptorSetArc, mesh_part_index: usize, material_pass_resource: ResourceArc<MaterialPassResource>, ) -> usize { let submit_node_index = self.prepared_submit_node_mesh_data.len(); self.prepared_submit_node_mesh_data .push(PreparedSubmitNodeMeshData { material_pass_resource: material_pass_resource.clone(), per_view_descriptor_set, per_material_descriptor_set, per_instance_descriptor_set, frame_node_index: view_node.frame_node_index(), mesh_part_index, }); submit_node_index } pub fn set_extracted_frame_node_mesh_data( &mut self, extracted_frame_node_mesh_data: Vec<Option<ExtractedFrameNodeMeshData>>, ) { self.extracted_frame_node_mesh_data = extracted_frame_node_mesh_data; } } impl WriteJob for MeshWriteJob { fn render_element( &self, write_context: &mut RenderJobWriteContext, _view: &RenderView, render_phase_index: RenderPhaseIndex, index: SubmitNodeId, ) -> RafxResult<()> { profiling::scope!(super::RENDER_ELEMENT_SCOPE_NAME); let command_buffer = &write_context.command_buffer; let render_node_data = &self.prepared_submit_node_mesh_data[index as usize]; let frame_node_data: &ExtractedFrameNodeMeshData = self.extracted_frame_node_mesh_data [render_node_data.frame_node_index as usize] .as_ref() .unwrap(); let mesh_part = &frame_node_data.mesh_asset.inner.mesh_parts [render_node_data.mesh_part_index] .as_ref() .unwrap(); let pipeline = write_context .resource_context .graphics_pipeline_cache() .get_or_create_graphics_pipeline( render_phase_index, &render_node_data.material_pass_resource, &write_context.render_target_meta, &*MESH_VERTEX_LAYOUT, )?; command_buffer.cmd_bind_pipeline(&pipeline.get_raw().pipeline)?; render_node_data .per_view_descriptor_set .bind(command_buffer)?; if let Some(per_material_descriptor_set) = &render_node_data.per_material_descriptor_set { per_material_descriptor_set.bind(command_buffer).unwrap(); } render_node_data .per_instance_descriptor_set .bind(command_buffer)?; command_buffer.cmd_bind_vertex_buffers( 0, &[RafxVertexBufferBinding { buffer: &frame_node_data .mesh_asset .inner .vertex_buffer .get_raw() .buffer, byte_offset: mesh_part.vertex_buffer_offset_in_bytes as u64, }], )?; command_buffer.cmd_bind_index_buffer(&RafxIndexBufferBinding { buffer: &frame_node_data .mesh_asset .inner .index_buffer .get_raw() .buffer, byte_offset: mesh_part.index_buffer_offset_in_bytes as u64, index_type: RafxIndexType::Uint16, })?; command_buffer.cmd_draw_indexed( mesh_part.index_buffer_size_in_bytes / 2, 0, 0, )?; Ok(()) } fn feature_debug_name(&self) -> &'static str { super::render_feature_debug_name() } fn feature_index(&self) -> RenderFeatureIndex { super::render_feature_index() } }
use rafx::render_feature_write_job_prelude::*; use rafx::api::RafxPrimitiveTopology; use rafx::framework::{VertexDataLayout, VertexDataSetLayout}; use serde::{Deserialize, Serialize}; #[derive(Copy, Clone, Debug, Serialize, Deserialize, Default)] #[repr(C)] pub struct MeshVertex { pub position: [f32; 3], pub normal: [f32; 3], pub tangent: [f32; 4], pub tex_coord: [f32; 2], } lazy_static::lazy_static! { pub static ref MESH_VERTEX_LAYOUT : VertexDataSetLayout = { use rafx::api::RafxFormat; VertexDataLayout::build_vertex_layout(&MeshVertex::default(), |builder, vertex| { builder.add_member(&vertex.position, "POSITION", RafxFormat::R32G32B32_SFLOAT); builder.add_member(&vertex.normal, "NORMAL", RafxFormat::R32G32B32_SFLOAT); builder.add_member(&vertex.tangent, "TANGENT", RafxFormat::R32G32B32A32_SFLOAT); builder.add_member(&vertex.tex_coord, "TEXCOORD", RafxFormat::R32G32_SFLOAT); }).into_set(RafxPrimitiveTopology::TriangleList) }; } use super::ExtractedFrameNodeMeshData; use rafx::api::{RafxIndexBufferBinding, RafxIndexType, RafxVertexBufferBinding}; use rafx::framework::{DescriptorSetArc, MaterialPassResource, ResourceArc}; use rafx::nodes::{FrameNodeIndex, PerViewNode}; struct PreparedSubmitNodeMeshData { material_pass_resource: ResourceArc<MaterialPassResource>, per_view_descriptor_set: DescriptorSetArc, per_material_descriptor_set: Option<DescriptorSetArc>, per_instance_descriptor_set: DescriptorSetArc, frame_node_index: FrameNodeIndex, mesh_part_index: usize, } impl std::fmt::Debug for PreparedSubmitNodeMeshData { fn fmt( &self, f: &mut std::fmt::Formatter<'_>, ) -> std::fmt::Result { f.debug_struct("PreparedSubmitNodeMeshData") .field("frame_node_index", &self.frame_node_index) .field("mesh_part_index", &self.mesh_part_index) .finish() } } pub struct MeshWriteJob { extracted_frame_node_mesh_data: Vec<Option<ExtractedFrameNodeMeshData>>, prepared_submit_node_mesh_data: Vec<PreparedSubmitNodeMeshData>, } impl MeshWriteJob { pub fn new() -> Self { MeshWriteJob { extracted_frame_node_mesh_data: Default::default(), prepared_submit_node_mesh_data: Default::default(), } } pub fn push_submit_node( &mut self, view_node: &PerViewNode, per_view_descriptor_set: DescriptorSetArc, per_material_descriptor_set: Option<DescriptorSetArc>, per_instance_descriptor_set: DescriptorSetArc, mesh_part_index: usize, material_pass_resource: ResourceArc<MaterialPassResource>, ) -> usize { let submit_node_index = self.prepared_submit_node_mesh_data.len(); self.prepared_submit_node_mesh_data .push(PreparedSubmitNodeMeshData { material_pass_resource: material_pass_resource.clone(), per_view_descriptor_set, per_material_descriptor_set, per_instance_descriptor_set, frame_node_index: view_node.frame_node_index(), mesh_part_index, }); submit_node_index }
} impl WriteJob for MeshWriteJob { fn render_element( &self, write_context: &mut RenderJobWriteContext, _view: &RenderView, render_phase_index: RenderPhaseIndex, index: SubmitNodeId, ) -> RafxResult<()> { profiling::scope!(super::RENDER_ELEMENT_SCOPE_NAME); let command_buffer = &write_context.command_buffer; let render_node_data = &self.prepared_submit_node_mesh_data[index as usize]; let frame_node_data: &ExtractedFrameNodeMeshData = self.extracted_frame_node_mesh_data [render_node_data.frame_node_index as usize] .as_ref() .unwrap(); let mesh_part = &frame_node_data.mesh_asset.inner.mesh_parts [render_node_data.mesh_part_index] .as_ref() .unwrap(); let pipeline = write_context .resource_context .graphics_pipeline_cache() .get_or_create_graphics_pipeline( render_phase_index, &render_node_data.material_pass_resource, &write_context.render_target_meta, &*MESH_VERTEX_LAYOUT, )?; command_buffer.cmd_bind_pipeline(&pipeline.get_raw().pipeline)?; render_node_data .per_view_descriptor_set .bind(command_buffer)?; if let Some(per_material_descriptor_set) = &render_node_data.per_material_descriptor_set { per_material_descriptor_set.bind(command_buffer).unwrap(); } render_node_data .per_instance_descriptor_set .bind(command_buffer)?; command_buffer.cmd_bind_vertex_buffers( 0, &[RafxVertexBufferBinding { buffer: &frame_node_data .mesh_asset .inner .vertex_buffer .get_raw() .buffer, byte_offset: mesh_part.vertex_buffer_offset_in_bytes as u64, }], )?; command_buffer.cmd_bind_index_buffer(&RafxIndexBufferBinding { buffer: &frame_node_data .mesh_asset .inner .index_buffer .get_raw() .buffer, byte_offset: mesh_part.index_buffer_offset_in_bytes as u64, index_type: RafxIndexType::Uint16, })?; command_buffer.cmd_draw_indexed( mesh_part.index_buffer_size_in_bytes / 2, 0, 0, )?; Ok(()) } fn feature_debug_name(&self) -> &'static str { super::render_feature_debug_name() } fn feature_index(&self) -> RenderFeatureIndex { super::render_feature_index() } }
pub fn set_extracted_frame_node_mesh_data( &mut self, extracted_frame_node_mesh_data: Vec<Option<ExtractedFrameNodeMeshData>>, ) { self.extracted_frame_node_mesh_data = extracted_frame_node_mesh_data; }
function_block-full_function
[ { "content": "pub fn default_daemon() -> distill::daemon::AssetDaemon {\n\n use crate::assets::*;\n\n\n\n distill::daemon::AssetDaemon::default()\n\n .with_importer(\"sampler\", SamplerImporter)\n\n .with_importer(\"material\", MaterialImporter)\n\n .with_importer(\"materialinstance\", MaterialInstanceImporter)\n\n .with_importer(\"compute\", ComputePipelineImporter)\n\n .with_importer(\"cookedshaderpackage\", ShaderImporterCooked)\n\n .with_importer(\"png\", ImageImporter)\n\n .with_importer(\"jpg\", ImageImporter)\n\n .with_importer(\"jpeg\", ImageImporter)\n\n .with_importer(\"tga\", ImageImporter)\n\n .with_importer(\"bmp\", ImageImporter)\n\n .with_importer(\"basis\", BasisImageImporter)\n\n}\n", "file_path": "rafx-assets/src/distill_impl/mod.rs", "rank": 0, "score": 230337.676184978 }, { "content": "fn recursive_modify_user_type<F: Fn(&mut UserType) -> bool>(\n\n user_types: &mut FnvHashMap<String, UserType>,\n\n type_name: &str,\n\n f: &F,\n\n) {\n\n let user_type = user_types.get_mut(type_name);\n\n let recurse = if let Some(user_type) = user_type {\n\n (f)(user_type)\n\n } else {\n\n // for now skip types we don't recognize\n\n return;\n\n };\n\n\n\n if recurse {\n\n if let Some(fields) = user_types.get(type_name).map(|x| x.fields.clone()) {\n\n for field in &*fields {\n\n recursive_modify_user_type(user_types, &field.type_name, f);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 1, "score": 222516.95648905967 }, { "content": "pub fn push_view_indexed_value<T: Clone>(\n\n vec: &mut Vec<Option<T>>,\n\n view_index: RenderViewIndex,\n\n value: T,\n\n) {\n\n // Grow the array if necessary\n\n vec.resize(vec.len().max(view_index as usize + 1), None);\n\n vec[view_index as usize] = Some(value);\n\n}\n\n\n\n// pub struct FeatureCommandWriterSet {\n\n// prepare_jobs: Vec<Box<dyn FeatureCommandWriter>>,\n\n// }\n\n\n\npub struct PreparedRenderData {\n\n feature_writers: Vec<Option<Box<dyn WriteJob>>>,\n\n submit_nodes: MergedFrameSubmitNodes,\n\n}\n\n\n\nimpl PreparedRenderData {\n", "file_path": "rafx-framework/src/nodes/jobs/write.rs", "rank": 2, "score": 215173.62244893826 }, { "content": "//\n\n// Static functions\n\n//\n\npub fn handle_load_result<AssetT: Clone>(\n\n load_op: AssetLoadOp,\n\n loaded_asset: RafxResult<AssetT>,\n\n asset_lookup: &mut AssetLookup<AssetT>,\n\n result_tx: Sender<AssetT>,\n\n) {\n\n match loaded_asset {\n\n Ok(loaded_asset) => {\n\n asset_lookup.set_uncommitted(load_op.load_handle(), loaded_asset.clone());\n\n result_tx.send(loaded_asset).unwrap();\n\n load_op.complete()\n\n }\n\n Err(err) => {\n\n load_op.error(err);\n\n }\n\n }\n\n}\n\n\n", "file_path": "rafx-assets/src/assets/asset_type_handler.rs", "rank": 3, "score": 212761.67188202176 }, { "content": "fn random_color(rng: &mut impl Rng) -> Vec3 {\n\n let r = rng.gen_range(0.2, 1.0);\n\n let g = rng.gen_range(0.2, 1.0);\n\n let b = rng.gen_range(0.2, 1.0);\n\n let v = Vec3::new(r, g, b);\n\n v.normalize()\n\n}\n\n\n", "file_path": "demo/src/scenes/mod.rs", "rank": 4, "score": 211149.87752146527 }, { "content": "pub fn rendering_destroy(resources: &mut Resources) -> RafxResult<()> {\n\n // Destroy these first\n\n {\n\n {\n\n let swapchain_helper = resources.remove::<RafxSwapchainHelper>().unwrap();\n\n let mut asset_manager = resources.get_mut::<AssetManager>().unwrap();\n\n let game_renderer = resources.get::<Renderer>().unwrap();\n\n SwapchainHandler::destroy_swapchain(\n\n swapchain_helper,\n\n &mut *asset_manager,\n\n &*game_renderer,\n\n )?;\n\n }\n\n\n\n resources.remove::<Renderer>();\n\n\n\n #[cfg(feature = \"use-imgui\")]\n\n {\n\n use crate::features::imgui::ImGuiRendererPlugin;\n\n ImGuiRendererPlugin::legion_destroy(resources);\n", "file_path": "demo/src/init.rs", "rank": 5, "score": 210130.19350559515 }, { "content": "// https://graphics.stanford.edu/~seander/bithacks.html#RoundUpPowerOf2\n\nfn next_power_of_2(mut v: usize) -> usize {\n\n v -= 1;\n\n v |= v >> 1;\n\n v |= v >> 2;\n\n v |= v >> 4;\n\n v |= v >> 8;\n\n v |= v >> 16;\n\n v |= v >> 32;\n\n v += 1;\n\n v\n\n}\n\n\n\n// Structs can be used in one of these three ways. The usage will determine the memory layout\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 6, "score": 205280.1640580434 }, { "content": "pub fn slice_size_in_bytes<T>(slice: &[T]) -> usize {\n\n let range = slice.as_ptr_range();\n\n (range.end as *const u8 as usize) - (range.start as *const u8 as usize)\n\n}\n\n\n\npub unsafe fn force_to_static_lifetime<T>(value: &T) -> &'static T {\n\n std::mem::transmute(value)\n\n}\n\n\n\npub unsafe fn force_to_static_lifetime_mut<T>(value: &mut T) -> &'static mut T {\n\n std::mem::transmute(value)\n\n}\n", "file_path": "rafx-base/src/memory.rs", "rank": 7, "score": 183680.67535602697 }, { "content": "#[derive(Default, Clone, Copy)]\n\nstruct PositionColorVertex {\n\n position: [f32; 2],\n\n color: [f32; 3],\n\n}\n\n\n", "file_path": "rafx/examples/asset_triangle/asset_triangle.rs", "rank": 8, "score": 183110.2785423807 }, { "content": "#[derive(Default, Clone, Copy)]\n\nstruct PositionColorVertex {\n\n position: [f32; 2],\n\n color: [f32; 3],\n\n}\n\n\n", "file_path": "rafx/examples/framework_triangle/framework_triangle.rs", "rank": 9, "score": 183110.2785423807 }, { "content": "pub fn vertex_buffer_adjusted_buffer_index(binding: u32) -> NSUInteger {\n\n // Argument buffers will be 0-4\n\n // vertex buffers will be 30 - n\n\n (30 - binding) as _\n\n}\n\n\n\npub(crate) fn resource_type_mtl_data_type(resource_type: RafxResourceType) -> Option<MTLDataType> {\n\n if resource_type.intersects(\n\n RafxResourceType::UNIFORM_BUFFER\n\n | RafxResourceType::BUFFER\n\n | RafxResourceType::BUFFER_READ_WRITE,\n\n ) {\n\n Some(MTLDataType::Pointer)\n\n } else if resource_type\n\n .intersects(RafxResourceType::TEXTURE | RafxResourceType::TEXTURE_READ_WRITE)\n\n {\n\n Some(MTLDataType::Texture)\n\n } else if resource_type.intersects(RafxResourceType::SAMPLER) {\n\n Some(MTLDataType::Sampler)\n\n } else {\n", "file_path": "rafx-api/src/backends/metal/internal/util.rs", "rank": 10, "score": 174881.68365051312 }, { "content": "// A simple loader that just deserializes data\n\nstruct DefaultAssetLoader<AssetDataT>\n\nwhere\n\n AssetDataT: TypeUuid + Send + for<'a> serde::Deserialize<'a> + 'static,\n\n{\n\n phantom_data: PhantomData<AssetDataT>,\n\n}\n\n\n\nimpl<AssetDataT> Default for DefaultAssetLoader<AssetDataT>\n\nwhere\n\n AssetDataT: TypeUuid + Send + for<'a> serde::Deserialize<'a> + 'static,\n\n{\n\n fn default() -> Self {\n\n DefaultAssetLoader {\n\n phantom_data: Default::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl<AssetDataT> DynAssetLoader<AssetDataT> for DefaultAssetLoader<AssetDataT>\n\nwhere\n", "file_path": "rafx-assets/src/distill_impl/asset_storage.rs", "rank": 11, "score": 169533.07680070266 }, { "content": "fn parse_ron_or_default<'de, T: Default + Deserialize<'de>>(data: &'de str) -> Result<T, String> {\n\n if !data.is_empty() {\n\n ron::de::from_str(&data)\n\n .map_err(|e| format!(\"Failed to parse annotation data. It should be an instance of '{}' encoded as RON.\\n Error: '{}'\\n Annotation Data: '{}'\", core::any::type_name::<T>(), e, data))\n\n } else {\n\n Ok(Default::default())\n\n }\n\n}\n\n\n\n#[derive(Default, Debug)]\n\npub(crate) struct StructAnnotations {\n\n pub(crate) export: Option<ExportAnnotation>,\n\n}\n\n\n\nimpl StructAnnotations {\n\n fn new(annotations: &[AnnotationText]) -> Result<Self, String> {\n\n let mut parsed_annotations = StructAnnotations::default();\n\n\n\n for annotation in annotations {\n\n let mut position = 0;\n", "file_path": "rafx-shader-processor/src/parse_declarations.rs", "rank": 12, "score": 168303.68760351243 }, { "content": "pub fn logging_init() {\n\n #[cfg(not(debug_assertions))]\n\n let log_level = log::LevelFilter::Info;\n\n #[cfg(debug_assertions)]\n\n let log_level = log::LevelFilter::Debug;\n\n\n\n // Setup logging\n\n env_logger::Builder::from_default_env()\n\n .default_format_timestamp_nanos(true)\n\n .filter_module(\n\n \"rafx_assets::resources::descriptor_sets\",\n\n log::LevelFilter::Info,\n\n )\n\n .filter_module(\"rafx_framework::nodes\", log::LevelFilter::Info)\n\n .filter_module(\"rafx_framework::visibility\", log::LevelFilter::Info)\n\n .filter_module(\"rafx_assets::graph\", log::LevelFilter::Debug)\n\n .filter_module(\"rafx_framework::graph\", log::LevelFilter::Debug)\n\n .filter_module(\"rafx_framework::resources\", log::LevelFilter::Debug)\n\n .filter_module(\"rafx_framework::graph::graph_plan\", log::LevelFilter::Info)\n\n .filter_module(\"rafx_api\", log::LevelFilter::Debug)\n", "file_path": "demo/src/main.rs", "rank": 13, "score": 163039.09751712432 }, { "content": "pub fn rendering_init(\n\n resources: &mut Resources,\n\n sdl2_window: &sdl2::video::Window,\n\n asset_source: AssetSource,\n\n) -> RafxResult<()> {\n\n resources.insert(StaticVisibilityNodeSet::default());\n\n resources.insert(DynamicVisibilityNodeSet::default());\n\n resources.insert(ViewportsResource::default());\n\n\n\n MeshRendererPlugin::legion_init(resources);\n\n SpriteRendererPlugin::legion_init(resources);\n\n SkyboxRendererPlugin::legion_init(resources);\n\n TileLayerRendererPlugin::legion_init(resources);\n\n Debug3DRendererPlugin::legion_init(resources);\n\n TextRendererPlugin::legion_init(resources);\n\n\n\n //\n\n // Create the api. GPU programming is fundamentally unsafe, so all rafx APIs should be\n\n // considered unsafe. However, rafx APIs are only gated by unsafe if they can cause undefined\n\n // behavior on the CPU for reasons other than interacting with the GPU.\n", "file_path": "demo/src/init.rs", "rank": 14, "score": 163039.09751712432 }, { "content": "// Texture must be in COPY_SRC state\n\n// After this call, it will be in COPY_DST state\n\n// Vulkan requires this on a graphics queue. Metal allows this on any queue.\n\npub fn generate_mipmaps(\n\n command_buffer: &RafxCommandBuffer,\n\n _texture: &RafxTexture,\n\n) -> RafxResult<()> {\n\n match command_buffer {\n\n #[cfg(feature = \"rafx-vulkan\")]\n\n RafxCommandBuffer::Vk(inner) => generate_mipmaps_vk(inner, _texture),\n\n #[cfg(feature = \"rafx-metal\")]\n\n RafxCommandBuffer::Metal(inner) => generate_mipmaps_metal(inner, _texture),\n\n #[cfg(any(\n\n feature = \"rafx-empty\",\n\n not(any(feature = \"rafx-metal\", feature = \"rafx-vulkan\"))\n\n ))]\n\n RafxCommandBuffer::Empty(_) => unimplemented!(),\n\n }\n\n}\n\n\n\n// This custom path for metal can be removed after I implement cmd_blit\n", "file_path": "rafx-api/src/extra/mipmaps.rs", "rank": 15, "score": 158931.15845785447 }, { "content": "// This function is a little more complex to use than enqueue_load_images but can support cubemaps\n\n// We create a layer for each layer_image_assignment, and copy from the decoded_image\n\n// at the index matching the assignment\n\npub fn enqueue_load_image(\n\n device_context: &RafxDeviceContext,\n\n upload: &mut RafxTransferUpload,\n\n image_data: &GpuImageData,\n\n params: ImageUploadParams,\n\n) -> Result<RafxTexture, RafxUploadError> {\n\n // All images must have identical mip level count, sizes, etc.\n\n #[cfg(debug_assertions)]\n\n image_data.verify_state();\n\n\n\n //\n\n // Determine the total amount of data we need to upload and verify there is enough space\n\n //\n\n let bytes_required = image_data.total_size(IMAGE_UPLOAD_REQUIRED_SUBRESOURCE_ALIGNMENT as u64);\n\n\n\n let has_space_available = upload.has_space_available(\n\n bytes_required as usize,\n\n IMAGE_UPLOAD_REQUIRED_SUBRESOURCE_ALIGNMENT as usize,\n\n 1,\n\n );\n", "file_path": "rafx-assets/src/image_upload.rs", "rank": 16, "score": 157000.50372808645 }, { "content": "pub fn load_image_blocking(\n\n device_context: &RafxDeviceContext,\n\n transfer_queue: &RafxQueue,\n\n dst_queue: &RafxQueue,\n\n upload_buffer_max_size: u64,\n\n image_data: &GpuImageData,\n\n params: ImageUploadParams,\n\n) -> Result<RafxTexture, RafxUploadError> {\n\n let total_size = image_data.total_size(IMAGE_UPLOAD_REQUIRED_SUBRESOURCE_ALIGNMENT);\n\n if upload_buffer_max_size < total_size {\n\n Err(RafxUploadError::BufferFull)?;\n\n }\n\n\n\n let mut upload = RafxTransferUpload::new(\n\n device_context,\n\n transfer_queue,\n\n dst_queue,\n\n upload_buffer_max_size,\n\n )?;\n\n\n\n let texture = enqueue_load_image(device_context, &mut upload, image_data, params)?;\n\n\n\n upload.block_until_upload_complete()?;\n\n\n\n Ok(texture)\n\n}\n", "file_path": "rafx-assets/src/image_upload.rs", "rank": 17, "score": 156995.18767116478 }, { "content": "pub fn round_size_up_to_alignment_u64(\n\n size: u64,\n\n required_alignment: u64,\n\n) -> u64 {\n\n assert!(required_alignment > 0);\n\n ((size + required_alignment - 1) / required_alignment) * required_alignment\n\n}\n\n\n", "file_path": "rafx-base/src/memory.rs", "rank": 18, "score": 156995.18767116478 }, { "content": "pub fn round_size_up_to_alignment_u32(\n\n size: u32,\n\n required_alignment: u32,\n\n) -> u32 {\n\n assert!(required_alignment > 0);\n\n ((size + required_alignment - 1) / required_alignment) * required_alignment\n\n}\n\n\n", "file_path": "rafx-base/src/memory.rs", "rank": 19, "score": 156995.18767116478 }, { "content": "pub fn enqueue_load_buffer(\n\n device_context: &RafxDeviceContext,\n\n upload: &mut RafxTransferUpload,\n\n // transfer_queue_family_index: u32,\n\n // dst_queue_family_index: u32,\n\n data: &[u8],\n\n) -> Result<RafxBuffer, RafxUploadError> {\n\n // Arbitrary, not sure if there is any requirement\n\n const REQUIRED_ALIGNMENT: usize = 16;\n\n\n\n // Push data into the staging buffer\n\n let offset = upload.push(data, REQUIRED_ALIGNMENT)?;\n\n let size = data.len() as u64;\n\n\n\n // Allocate a GPU buffer\n\n let dst_buffer = device_context.create_buffer(&RafxBufferDef {\n\n size,\n\n memory_usage: RafxMemoryUsage::GpuOnly,\n\n queue_type: upload.dst_queue().queue_type(),\n\n resource_type: RafxResourceType::VERTEX_BUFFER | RafxResourceType::INDEX_BUFFER,\n", "file_path": "rafx-assets/src/buffer_upload.rs", "rank": 20, "score": 156995.18767116478 }, { "content": "/// Creates a right-handed perspective projection matrix with [0,1] depth range.\n\npub fn perspective_rh(\n\n fov_y_radians: f32,\n\n aspect_ratio: f32,\n\n z_near: f32,\n\n z_far: f32,\n\n) -> glam::Mat4 {\n\n debug_assert!(z_near > 0.0 && z_far > 0.0);\n\n let (sin_fov, cos_fov) = (0.5 * fov_y_radians).sin_cos();\n\n let h = cos_fov / sin_fov;\n\n let w = h / aspect_ratio;\n\n let r = z_far / (z_near - z_far);\n\n glam::Mat4::from_cols(\n\n glam::Vec4::new(w, 0.0, 0.0, 0.0),\n\n glam::Vec4::new(0.0, h, 0.0, 0.0),\n\n glam::Vec4::new(0.0, 0.0, r, -1.0),\n\n glam::Vec4::new(0.0, 0.0, r * z_near, 0.0),\n\n )\n\n}\n\n\n", "file_path": "demo/src/features/mesh/shadow_map_resource.rs", "rank": 21, "score": 155131.97022529252 }, { "content": "pub fn sdl2_init() -> Sdl2Systems {\n\n // Setup SDL\n\n let context = sdl2::init().expect(\"Failed to initialize sdl2\");\n\n let video_subsystem = context\n\n .video()\n\n .expect(\"Failed to create sdl video subsystem\");\n\n\n\n // Create the window\n\n let window = video_subsystem\n\n .window(\"Rafx Demo\", 900, 600)\n\n .position_centered()\n\n .allow_highdpi()\n\n .resizable()\n\n .build()\n\n .expect(\"Failed to create window\");\n\n\n\n Sdl2Systems {\n\n context,\n\n video_subsystem,\n\n window,\n\n }\n\n}\n\n\n", "file_path": "demo/src/init.rs", "rank": 22, "score": 154268.28108133783 }, { "content": "pub fn do_find_supported_format(\n\n instance: &ash::Instance,\n\n physical_device: vk::PhysicalDevice,\n\n candidates: &[RafxFormat],\n\n image_tiling: vk::ImageTiling,\n\n features: vk::FormatFeatureFlags,\n\n) -> Option<RafxFormat> {\n\n for &candidate in candidates {\n\n let props = unsafe {\n\n instance.get_physical_device_format_properties(physical_device, candidate.into())\n\n };\n\n\n\n let is_supported = match image_tiling {\n\n vk::ImageTiling::LINEAR => (props.linear_tiling_features & features) == features,\n\n vk::ImageTiling::OPTIMAL => (props.optimal_tiling_features & features) == features,\n\n _ => unimplemented!(),\n\n };\n\n\n\n if is_supported {\n\n return Some(candidate);\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "rafx-api/src/backends/vulkan/device_context.rs", "rank": 23, "score": 153337.4806769156 }, { "content": "pub fn create_font_texture_with_ranges(\n\n font_data: &[u8],\n\n character_ranges_to_include: &[(u32, u32)],\n\n size: f32,\n\n margin: u32,\n\n) -> Option<FontTextureWithMeta> {\n\n // let character_ranges_to_include = vec![\n\n // (32, 128),\n\n // //(0x4e00, 0x5FCC)\n\n // ];\n\n\n\n let mut characters_to_include = vec![];\n\n\n\n //\n\n // Iterate codepoints in the font and find the characters within the given ranges\n\n //\n\n let face = ttf_parser::Face::from_slice(font_data, 0).unwrap();\n\n\n\n for subtable in face.character_mapping_subtables() {\n\n subtable.codepoints(|codepoint| {\n", "file_path": "demo/src/assets/font/font_cooking.rs", "rank": 24, "score": 153337.4806769156 }, { "content": "pub fn parse_shader_source_recursive(\n\n file_to_process: &FileToProcess,\n\n declarations: &mut Vec<DeclarationText>,\n\n included_files: &mut FnvHashSet<PathBuf>,\n\n) -> Result<(), String> {\n\n log::trace!(\"parse_shader_source_recursive {:?}\", file_to_process);\n\n let resolved_include = super::include_impl(\n\n &file_to_process.path,\n\n file_to_process.include_type,\n\n &file_to_process.requested_from,\n\n file_to_process.include_depth,\n\n )?;\n\n\n\n if included_files.contains(&resolved_include.resolved_path) {\n\n return Ok(());\n\n }\n\n\n\n included_files.insert(resolved_include.resolved_path.clone());\n\n\n\n let mut resolved_file_paths = file_to_process.clone();\n", "file_path": "rafx-shader-processor/src/parse_source.rs", "rank": 25, "score": 153337.4806769156 }, { "content": "/// The max number of mip levels an image can have given its size\n\npub fn mip_level_max_count_for_image_size(\n\n width: u32,\n\n height: u32,\n\n) -> u32 {\n\n let max_dimension = std::cmp::max(width, height);\n\n (max_dimension as f32).log2().floor() as u32 + 1\n\n}\n\n\n", "file_path": "rafx-api/src/extra/mipmaps.rs", "rank": 26, "score": 151607.9851752167 }, { "content": "pub fn blend_state_to_create_info(\n\n blend_state: &RafxBlendState,\n\n color_attachment_count: usize,\n\n) -> RafxBlendStateVkCreateInfo {\n\n let mut blend_attachments_states = vec![];\n\n\n\n blend_state.verify(color_attachment_count);\n\n\n\n if let Some(first_attachment) = blend_state.render_target_blend_states.first() {\n\n for attachment_index in 0..color_attachment_count {\n\n let attachment_state = if blend_state\n\n .render_target_mask\n\n .intersects(RafxBlendStateTargets::from_bits(1 << attachment_index).unwrap())\n\n {\n\n if blend_state.independent_blend {\n\n blend_state_render_target_to_create_info(\n\n &blend_state.render_target_blend_states[attachment_index],\n\n )\n\n } else {\n\n blend_state_render_target_to_create_info(first_attachment)\n", "file_path": "rafx-api/src/backends/vulkan/internal/util.rs", "rank": 27, "score": 151607.9851752167 }, { "content": "pub fn resource_type_to_descriptor_type(\n\n resource_type: RafxResourceType\n\n) -> Option<vk::DescriptorType> {\n\n match resource_type {\n\n RafxResourceType::SAMPLER => Some(vk::DescriptorType::SAMPLER),\n\n RafxResourceType::TEXTURE => Some(vk::DescriptorType::SAMPLED_IMAGE),\n\n RafxResourceType::UNIFORM_BUFFER => Some(vk::DescriptorType::UNIFORM_BUFFER),\n\n RafxResourceType::TEXTURE_READ_WRITE => Some(vk::DescriptorType::STORAGE_IMAGE),\n\n RafxResourceType::BUFFER => Some(vk::DescriptorType::STORAGE_BUFFER),\n\n RafxResourceType::BUFFER_READ_WRITE => Some(vk::DescriptorType::STORAGE_BUFFER),\n\n RafxResourceType::INPUT_ATTACHMENT => Some(vk::DescriptorType::INPUT_ATTACHMENT),\n\n RafxResourceType::TEXEL_BUFFER => Some(vk::DescriptorType::UNIFORM_TEXEL_BUFFER),\n\n RafxResourceType::TEXEL_BUFFER_READ_WRITE => Some(vk::DescriptorType::STORAGE_TEXEL_BUFFER),\n\n RafxResourceType::COMBINED_IMAGE_SAMPLER => {\n\n Some(vk::DescriptorType::COMBINED_IMAGE_SAMPLER)\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "rafx-api/src/backends/vulkan/internal/util.rs", "rank": 28, "score": 151607.9851752167 }, { "content": "pub fn get_descriptor_set_element_write(\n\n descriptor_set_layout: &ResourceArc<DescriptorSetLayoutResource>,\n\n key: &DescriptorSetElementKey,\n\n) -> Option<DescriptorSetElementWrite> {\n\n for binding in &descriptor_set_layout\n\n .get_raw()\n\n .descriptor_set_layout_def\n\n .bindings\n\n {\n\n let element_count = binding.resource.element_count_normalized() as usize;\n\n if key.dst_binding != binding.resource.binding || key.array_index >= element_count {\n\n continue;\n\n }\n\n\n\n return Some(DescriptorSetElementWrite {\n\n has_immutable_sampler: binding.immutable_samplers.is_some(),\n\n descriptor_type: binding.resource.resource_type,\n\n image_info: DescriptorSetWriteElementImage::default(),\n\n buffer_info: DescriptorSetWriteElementBuffer::default(),\n\n });\n\n }\n\n\n\n None\n\n}\n", "file_path": "rafx-framework/src/resources/descriptor_sets/mod.rs", "rank": 29, "score": 149940.0155287742 }, { "content": "pub fn sdl2_init() -> Sdl2Systems {\n\n // Setup SDL\n\n let context = sdl2::init().expect(\"Failed to initialize sdl2\");\n\n let video_subsystem = context\n\n .video()\n\n .expect(\"Failed to create sdl video subsystem\");\n\n\n\n // Create the window\n\n let window = video_subsystem\n\n .window(\"Rafx Example\", WINDOW_WIDTH, WINDOW_HEIGHT)\n\n .position_centered()\n\n .allow_highdpi()\n\n .resizable()\n\n .build()\n\n .expect(\"Failed to create window\");\n\n\n\n Sdl2Systems {\n\n context,\n\n video_subsystem,\n\n window,\n\n }\n\n}\n\n\n", "file_path": "rafx/examples/asset_triangle/asset_triangle.rs", "rank": 30, "score": 148674.603300399 }, { "content": "pub fn sdl2_init() -> Sdl2Systems {\n\n // Setup SDL\n\n let context = sdl2::init().expect(\"Failed to initialize sdl2\");\n\n let video_subsystem = context\n\n .video()\n\n .expect(\"Failed to create sdl video subsystem\");\n\n\n\n // Create the window\n\n let window = video_subsystem\n\n .window(\"Rafx Example\", WINDOW_WIDTH, WINDOW_HEIGHT)\n\n .position_centered()\n\n .allow_highdpi()\n\n .resizable()\n\n .build()\n\n .expect(\"Failed to create window\");\n\n\n\n Sdl2Systems {\n\n context,\n\n video_subsystem,\n\n window,\n\n }\n\n}\n\n\n", "file_path": "rafx/examples/framework_triangle/framework_triangle.rs", "rank": 31, "score": 148674.603300399 }, { "content": "pub fn sdl2_init() -> Sdl2Systems {\n\n // Setup SDL\n\n let context = sdl2::init().expect(\"Failed to initialize sdl2\");\n\n let video_subsystem = context\n\n .video()\n\n .expect(\"Failed to create sdl video subsystem\");\n\n\n\n // Create the window\n\n let window = video_subsystem\n\n .window(\"Rafx Example\", WINDOW_WIDTH, WINDOW_HEIGHT)\n\n .position_centered()\n\n .allow_highdpi()\n\n .resizable()\n\n .build()\n\n .expect(\"Failed to create window\");\n\n\n\n Sdl2Systems {\n\n context,\n\n video_subsystem,\n\n window,\n\n }\n\n}\n\n\n", "file_path": "rafx/examples/api_triangle/api_triangle.rs", "rank": 32, "score": 148674.603300399 }, { "content": "#[cfg(feature = \"use-imgui\")]\n\npub fn create_font_atlas_image_view(\n\n imgui_font_atlas_data: ImguiFontAtlasData,\n\n device_context: &RafxDeviceContext,\n\n upload: &mut RafxTransferUpload,\n\n dyn_resource_allocator: &DynResourceAllocatorSet,\n\n) -> RafxResult<ResourceArc<ImageViewResource>> {\n\n let image_data = GpuImageData::new_simple(\n\n imgui_font_atlas_data.width,\n\n imgui_font_atlas_data.height,\n\n GpuImageDataColorSpace::Linear.rgba8(),\n\n imgui_font_atlas_data.data,\n\n );\n\n\n\n let texture = image_upload::enqueue_load_image(\n\n device_context,\n\n upload,\n\n &image_data,\n\n ImageUploadParams {\n\n generate_mips: false,\n\n ..Default::default()\n\n },\n\n )\n\n .map_err(|x| Into::<RafxError>::into(x))?;\n\n\n\n let image = dyn_resource_allocator.insert_texture(texture);\n\n\n\n Ok(dyn_resource_allocator.insert_image_view(&image, None)?)\n\n}\n", "file_path": "demo/src/features/imgui/internal/imgui_font_atlas.rs", "rank": 33, "score": 148336.26615618137 }, { "content": "pub trait RenderGraphGenerator: 'static + Send {\n\n fn generate_render_graph(\n\n &self,\n\n asset_manager: &AssetManager,\n\n swapchain_image: ResourceArc<ImageViewResource>,\n\n main_view: RenderView,\n\n extract_resources: &ExtractResources,\n\n render_resources: &RenderResources,\n\n ) -> RafxResult<PreparedRenderGraph>;\n\n}\n", "file_path": "rafx-renderer/src/render_graph_generator.rs", "rank": 34, "score": 148147.9023902206 }, { "content": "pub fn color_render_target_binding_mtl_store_op(\n\n color_binding: &RafxColorRenderTargetBinding\n\n) -> MTLStoreAction {\n\n let resolve = color_binding.resolve_target.is_some()\n\n && color_binding.resolve_store_op == RafxStoreOp::Store;\n\n if color_binding.store_op == RafxStoreOp::Store {\n\n if resolve {\n\n MTLStoreAction::StoreAndMultisampleResolve\n\n } else {\n\n MTLStoreAction::Store\n\n }\n\n } else {\n\n if resolve {\n\n MTLStoreAction::MultisampleResolve\n\n } else {\n\n MTLStoreAction::DontCare\n\n }\n\n }\n\n}\n", "file_path": "rafx-api/src/backends/metal/internal/util.rs", "rank": 35, "score": 146775.97244244296 }, { "content": "pub fn create_uninitialized_write_set_for_layout(\n\n layout: &DescriptorSetLayout\n\n) -> DescriptorSetWriteSet {\n\n let mut write_set = DescriptorSetWriteSet::default();\n\n for binding in &layout.bindings {\n\n for array_index in 0..binding.resource.element_count_normalized() {\n\n let element_write = DescriptorSetElementWrite {\n\n has_immutable_sampler: binding.immutable_samplers.is_some(),\n\n descriptor_type: binding.resource.resource_type,\n\n image_info: DescriptorSetWriteElementImage::default(),\n\n buffer_info: DescriptorSetWriteElementBuffer::default(),\n\n };\n\n\n\n let key = DescriptorSetElementKey {\n\n dst_binding: binding.resource.binding as u32,\n\n array_index: array_index as usize,\n\n };\n\n\n\n write_set.elements.insert(key, element_write);\n\n }\n\n }\n\n\n\n write_set\n\n}\n", "file_path": "rafx-framework/src/resources/descriptor_sets/descriptor_write_set.rs", "rank": 36, "score": 146775.97244244296 }, { "content": "pub fn sdl2_init() -> Sdl2Systems {\n\n // Setup SDL\n\n let context = sdl2::init().expect(\"Failed to initialize sdl2\");\n\n let video_subsystem = context\n\n .video()\n\n .expect(\"Failed to create sdl video subsystem\");\n\n\n\n // Create the window\n\n let window = video_subsystem\n\n .window(\"Rafx Example\", 800, 600)\n\n .position_centered()\n\n .allow_highdpi()\n\n .resizable()\n\n .build()\n\n .expect(\"Failed to create window\");\n\n\n\n Sdl2Systems {\n\n context,\n\n video_subsystem,\n\n window,\n\n }\n\n}\n", "file_path": "rafx/examples/nodes_api_design/nodes_api_design.rs", "rank": 37, "score": 145277.1381522576 }, { "content": "pub fn run(args: &DemoArgs) -> RafxResult<()> {\n\n #[cfg(feature = \"profile-with-tracy\")]\n\n profiling::tracy_client::set_thread_name(\"Main Thread\");\n\n #[cfg(feature = \"profile-with-optick\")]\n\n profiling::optick::register_thread(\"Main Thread\");\n\n\n\n let mut scene_manager = SceneManager::default();\n\n\n\n let mut resources = Resources::default();\n\n resources.insert(TimeState::new());\n\n resources.insert(RenderOptions::default_2d());\n\n resources.insert(DebugUiState::default());\n\n\n\n let asset_source = if let Some(packfile) = &args.packfile {\n\n AssetSource::Packfile(packfile.to_path_buf())\n\n } else {\n\n AssetSource::Daemon {\n\n external_daemon: args.external_daemon,\n\n daemon_args: args.daemon_args.clone().into(),\n\n }\n", "file_path": "demo/src/lib.rs", "rank": 38, "score": 143085.77404779545 }, { "content": "/// Any data that can be stored in the ResourceMap must implement this trait. There is a blanket\n\n/// implementation provided for all compatible types\n\npub trait Resource: Downcast + Send + Sync + 'static {}\n\n\n\nimpl<T> Resource for T where T: Downcast + Send + Sync {}\n\n\n\n// Used for downcastic\n\nmod __resource_mopafy_scope {\n\n #![allow(clippy::all)]\n\n\n\n use super::Resource;\n\n\n\n downcast_rs::impl_downcast!(Resource);\n\n}\n\n\n\n/// Builder for creating a ResourceMap\n\npub struct ResourceMapBuilder {\n\n /// The ResourceMap being built\n\n resource_map: ResourceMap,\n\n}\n\n\n\nimpl ResourceMapBuilder {\n", "file_path": "rafx-base/src/resource_map.rs", "rank": 39, "score": 142863.11377653183 }, { "content": "#[profiling::function]\n\nfn update_main_view_2d(viewports_resource: &mut ViewportsResource) {\n\n let main_camera_phase_mask = RenderPhaseMaskBuilder::default()\n\n .add_render_phase::<DepthPrepassRenderPhase>()\n\n .add_render_phase::<OpaqueRenderPhase>()\n\n .add_render_phase::<TransparentRenderPhase>()\n\n .add_render_phase::<UiRenderPhase>()\n\n .build();\n\n\n\n let main_camera_feature_mask = RenderFeatureMaskBuilder::default()\n\n .add_render_feature::<ImGuiRenderFeature>()\n\n .add_render_feature::<SkyboxRenderFeature>()\n\n .add_render_feature::<SpriteRenderFeature>()\n\n .add_render_feature::<TextRenderFeature>()\n\n .build();\n\n\n\n const CAMERA_Z: f32 = 1000.0;\n\n\n\n // Round to a whole number\n\n let mut eye = Vec3::new(0., 0., CAMERA_Z);\n\n\n", "file_path": "demo/src/scenes/rafxmark_scene.rs", "rank": 40, "score": 142326.98171621974 }, { "content": "fn rust_header(rust_code: &mut Vec<String>) {\n\n rust_code.push(\"// This code is auto-generated by the shader processor.\\n\\n\".to_string());\n\n\n\n rust_code.push(\"#[allow(unused_imports)]\\n\".to_string());\n\n rust_code.push(\"use rafx_framework::RafxResult;\\n\\n\".to_string());\n\n rust_code.push(\"#[allow(unused_imports)]\\n\".to_string());\n\n rust_code.push(\"use rafx_framework::{ResourceArc, ImageViewResource, DynDescriptorSet, DescriptorSetAllocator, DescriptorSetInitializer, DescriptorSetArc, DescriptorSetWriter, DescriptorSetWriterContext, DescriptorSetBindings};\\n\\n\".to_string());\n\n}\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 41, "score": 140515.23436234723 }, { "content": "pub fn init_distill_daemon(connect_string: String) -> AssetResource {\n\n let rpc_loader = RpcIO::new(connect_string).unwrap();\n\n let loader = Loader::new(Box::new(rpc_loader));\n\n let resolver = Box::new(DefaultIndirectionResolver);\n\n AssetResource::new(loader, resolver)\n\n}\n\n\n", "file_path": "rafx-renderer/src/daemon.rs", "rank": 42, "score": 137893.8193512771 }, { "content": "pub fn handle_commit_requests<AssetDataT, AssetT>(\n\n load_queues: &mut LoadQueues<AssetDataT, AssetT>,\n\n asset_lookup: &mut AssetLookup<AssetT>,\n\n) {\n\n for request in load_queues.take_commit_requests() {\n\n log::trace!(\n\n \"commit asset {:?} {}\",\n\n request.load_handle,\n\n core::any::type_name::<AssetDataT>()\n\n );\n\n asset_lookup.commit(request.load_handle);\n\n }\n\n}\n\n\n", "file_path": "rafx-assets/src/assets/asset_type_handler.rs", "rank": 43, "score": 136644.1253893902 }, { "content": "pub fn handle_free_requests<AssetDataT, AssetT>(\n\n load_queues: &mut LoadQueues<AssetDataT, AssetT>,\n\n asset_lookup: &mut AssetLookup<AssetT>,\n\n) {\n\n for request in load_queues.take_free_requests() {\n\n log::trace!(\n\n \"free asset {:?} {}\",\n\n request.load_handle,\n\n core::any::type_name::<AssetDataT>()\n\n );\n\n asset_lookup.free(request.load_handle);\n\n }\n\n}\n", "file_path": "rafx-assets/src/assets/asset_type_handler.rs", "rank": 44, "score": 136644.1253893902 }, { "content": "// Used to catch asset changes and upload them to the GPU (or some other system)\n\npub trait ResourceLoader<AssetDataT, AssetT>: 'static + Send\n\nwhere\n\n AssetDataT: for<'a> serde::Deserialize<'a>,\n\n AssetT: 'static + Send,\n\n{\n\n fn update_asset(\n\n &mut self,\n\n load_handle: LoadHandle,\n\n load_op: AssetLoadOp,\n\n asset: AssetDataT,\n\n ) -> ResourceLoadResult<AssetT>;\n\n\n\n fn commit_asset_version(\n\n &mut self,\n\n load_handle: LoadHandle,\n\n );\n\n\n\n fn free(\n\n &mut self,\n\n load_handle: LoadHandle,\n\n );\n\n}\n", "file_path": "rafx-assets/src/resource_loader.rs", "rank": 45, "score": 136285.76357259776 }, { "content": "pub fn any_as_bytes<T: Copy>(data: &T) -> &[u8] {\n\n let ptr: *const T = data;\n\n let ptr = ptr as *const u8;\n\n let slice: &[u8] = unsafe { std::slice::from_raw_parts(ptr, std::mem::size_of::<T>()) };\n\n\n\n slice\n\n}\n\n\n", "file_path": "rafx-base/src/memory.rs", "rank": 46, "score": 135708.42550578187 }, { "content": "#[derive(Clone, Debug, Copy)]\n\nstruct QuadVertex {\n\n pub pos: [f32; 3],\n\n pub tex_coord: [f32; 2],\n\n}\n\n\n\n/// Static data the represents a \"unit\" quad\n\nconst QUAD_VERTEX_LIST: [QuadVertex; 4] = [\n\n // Top Right\n\n QuadVertex {\n\n pos: [0.5, 0.5, 0.0],\n\n tex_coord: [1.0, 0.0],\n\n },\n\n // Top Left\n\n QuadVertex {\n\n pos: [-0.5, 0.5, 0.0],\n\n tex_coord: [0.0, 0.0],\n\n },\n\n // Bottom Right\n\n QuadVertex {\n\n pos: [0.5, -0.5, 0.0],\n", "file_path": "demo/src/features/sprite/jobs/prepare.rs", "rank": 47, "score": 135253.53887000922 }, { "content": "struct InProgressUploadDebugInfo {\n\n upload_id: usize,\n\n start_time: std::time::Instant,\n\n size: u64,\n\n image_count: usize,\n\n buffer_count: usize,\n\n}\n\n\n", "file_path": "rafx-assets/src/assets/upload.rs", "rank": 48, "score": 133363.6720707022 }, { "content": "struct ViewPacketBuilder {\n\n inner: Mutex<ViewPacketBuilderInner>,\n\n}\n\n\n\nimpl ViewPacketBuilder {\n\n pub fn new(feature_count: RenderFeatureCount) -> Self {\n\n let view_nodes = (0..feature_count).map(|_| Vec::new()).collect();\n\n\n\n let inner = Mutex::new(ViewPacketBuilderInner { view_nodes });\n\n\n\n ViewPacketBuilder { inner }\n\n }\n\n\n\n pub fn append_view_node(\n\n &self,\n\n handle: GenericRenderNodeHandle,\n\n frame_node_index: u32,\n\n ) {\n\n let mut guard = self.inner.lock().unwrap();\n\n guard.view_nodes[handle.render_feature_index() as usize].push(PerViewNode {\n", "file_path": "rafx-framework/src/nodes/frame_packet.rs", "rank": 49, "score": 133286.20563207928 }, { "content": "pub fn what_to_bind(element_write: &DescriptorSetElementWrite) -> WhatToBind {\n\n let mut what = WhatToBind::default();\n\n\n\n // See https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/VkWriteDescriptorSet.html\n\n match element_write.descriptor_type {\n\n RafxResourceType::SAMPLER => {\n\n what.bind_samplers = !element_write.has_immutable_sampler;\n\n }\n\n RafxResourceType::COMBINED_IMAGE_SAMPLER => {\n\n what.bind_samplers = !element_write.has_immutable_sampler;\n\n what.bind_images = true;\n\n }\n\n RafxResourceType::TEXTURE => {\n\n what.bind_images = true;\n\n }\n\n RafxResourceType::UNIFORM_BUFFER => {\n\n what.bind_buffers = true;\n\n }\n\n RafxResourceType::BUFFER => {\n\n what.bind_buffers = true;\n\n }\n\n _ => unimplemented!(),\n\n }\n\n\n\n what\n\n}\n\n\n", "file_path": "rafx-framework/src/resources/descriptor_sets/mod.rs", "rank": 50, "score": 133227.89749387 }, { "content": "fn process_input(event_pump: &mut sdl2::EventPump) -> bool {\n\n use sdl2::event::Event;\n\n use sdl2::keyboard::Keycode;\n\n\n\n for event in event_pump.poll_iter() {\n\n //log::trace!(\"{:?}\", event);\n\n match event {\n\n //\n\n // Halt if the user requests to close the window\n\n //\n\n Event::Quit { .. } => return false,\n\n\n\n //\n\n // Close if the escape key is hit\n\n //\n\n Event::KeyDown {\n\n keycode: Some(keycode),\n\n keymod: _modifiers,\n\n ..\n\n } => {\n", "file_path": "rafx/examples/asset_triangle/asset_triangle.rs", "rank": 51, "score": 132649.7771176952 }, { "content": "fn process_input(event_pump: &mut sdl2::EventPump) -> bool {\n\n use sdl2::event::Event;\n\n use sdl2::keyboard::Keycode;\n\n\n\n for event in event_pump.poll_iter() {\n\n //log::trace!(\"{:?}\", event);\n\n match event {\n\n //\n\n // Halt if the user requests to close the window\n\n //\n\n Event::Quit { .. } => return false,\n\n\n\n //\n\n // Close if the escape key is hit\n\n //\n\n Event::KeyDown {\n\n keycode: Some(keycode),\n\n keymod: _modifiers,\n\n ..\n\n } => {\n", "file_path": "rafx/examples/api_triangle/api_triangle.rs", "rank": 52, "score": 132649.7771176952 }, { "content": "fn process_input(event_pump: &mut sdl2::EventPump) -> bool {\n\n use sdl2::event::Event;\n\n use sdl2::keyboard::Keycode;\n\n\n\n for event in event_pump.poll_iter() {\n\n //log::trace!(\"{:?}\", event);\n\n match event {\n\n //\n\n // Halt if the user requests to close the window\n\n //\n\n Event::Quit { .. } => return false,\n\n\n\n //\n\n // Close if the escape key is hit\n\n //\n\n Event::KeyDown {\n\n keycode: Some(keycode),\n\n keymod: _modifiers,\n\n ..\n\n } => {\n", "file_path": "rafx/examples/framework_triangle/framework_triangle.rs", "rank": 53, "score": 132649.7771176952 }, { "content": "fn generate_struct(\n\n builtin_types: &FnvHashMap<String, TypeAlignmentInfo>,\n\n user_types: &FnvHashMap<String, UserType>,\n\n type_name: &str,\n\n user_type: &UserType,\n\n layout: MemoryLayout,\n\n) -> Result<GenerateStructResult, String> {\n\n //println!(\"Generate struct {}\", type_name);\n\n\n\n let mut members = Vec::default();\n\n\n\n let mut pad_var_count = 0;\n\n\n\n let struct_name = get_rust_type_name_non_array(builtin_types, user_types, &type_name, layout)?;\n\n\n\n let mut gpu_offset = 0;\n\n let mut rust_offset = 0;\n\n for f in &*user_type.fields {\n\n //\n\n // Determine the alignment and size of this type using GPU layout\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 54, "score": 132345.8848214455 }, { "content": "fn rust_structs(\n\n rust_code: &mut Vec<String>,\n\n builtin_types: &FnvHashMap<String, TypeAlignmentInfo>,\n\n user_types: &FnvHashMap<String, UserType>,\n\n) -> Result<Vec<GenerateStructResult>, String> {\n\n let mut structs = Vec::default();\n\n for (type_name, user_type) in user_types {\n\n if user_type.export_uniform_layout {\n\n let s = generate_struct(\n\n &builtin_types,\n\n &user_types,\n\n type_name,\n\n user_type,\n\n MemoryLayout::Std140,\n\n )?;\n\n rust_code.push(s.generate_struct_code());\n\n rust_code.push(s.generate_struct_default_code());\n\n structs.push(s);\n\n }\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 55, "score": 132345.8848214455 }, { "content": "pub fn create_demo_extract_job() -> Box<dyn ExtractJob> {\n\n Box::new(DemoExtractJob::default())\n\n}\n\n\n\n//\n\n// This is boiler-platish\n\n//\n\npub struct DemoRenderNode {\n\n pub position: glam::Vec3,\n\n pub alpha: f32,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct DemoRenderNodeHandle(pub DropSlabKey<DemoRenderNode>);\n\n\n\nimpl DemoRenderNodeHandle {\n\n pub fn as_raw_generic_handle(&self) -> GenericRenderNodeHandle {\n\n GenericRenderNodeHandle::new(\n\n <DemoRenderFeature as RenderFeature>::feature_index(),\n\n self.0.index(),\n", "file_path": "rafx/examples/nodes_api_design/demo_feature/mod.rs", "rank": 56, "score": 131775.89835623762 }, { "content": "fn present_mode_priority(swapchain_def: &RafxSwapchainDef) -> &'static [VkPresentMode] {\n\n if swapchain_def.enable_vsync {\n\n &VSYNC_ON_PRESENT_MODES[..]\n\n } else {\n\n &VSYNC_OFF_PRESENT_MODES[..]\n\n }\n\n}\n\n\n\n/// Represents a vulkan swapchain that can be rebuilt as needed\n\npub struct RafxSwapchainVulkan {\n\n device_context: RafxDeviceContextVulkan,\n\n swapchain: ManuallyDrop<RafxSwapchainVulkanInstance>,\n\n swapchain_def: RafxSwapchainDef,\n\n #[allow(dead_code)]\n\n last_image_suboptimal: bool,\n\n swapchain_images: Vec<RafxSwapchainImage>,\n\n surface: vk::SurfaceKHR,\n\n surface_loader: Arc<khr::Surface>,\n\n}\n\n\n", "file_path": "rafx-api/src/backends/vulkan/swapchain.rs", "rank": 57, "score": 131462.63532660485 }, { "content": "struct ViewPacketBuilderInner {\n\n view_nodes: Vec<Vec<PerViewNode>>,\n\n}\n\n\n", "file_path": "rafx-framework/src/nodes/frame_packet.rs", "rank": 58, "score": 131419.18522066274 }, { "content": "//TODO: Maybe the frame_node_assignments needs to be a heap of bitfields, sorted by render node,\n\n// a bit per view to indicate it's present in the view\n\nstruct FramePacketBuilderInner {\n\n // O(1) lookup for if the render node is already inserted into the per frame node list\n\n // index by feature index, then render object index\n\n frame_node_assignments: Vec<Vec<i32>>,\n\n\n\n // A builder per view\n\n view_packet_builders: Vec<Option<ViewPacketBuilder>>,\n\n\n\n // All frame nodes, grouped by feature index\n\n frame_nodes: Vec<Vec<PerFrameNode>>,\n\n}\n\n\n\npub struct FramePacketBuilder {\n\n inner: Mutex<FramePacketBuilderInner>,\n\n}\n\n\n\nimpl FramePacketBuilder {\n\n pub fn new() -> Self {\n\n let feature_count = RenderRegistry::registered_feature_count();\n\n\n", "file_path": "rafx-framework/src/nodes/frame_packet.rs", "rank": 59, "score": 131419.18522066274 }, { "content": "fn element_count(array_sizes: &[usize]) -> usize {\n\n let mut element_count = 1;\n\n for x in array_sizes {\n\n element_count *= x;\n\n }\n\n\n\n element_count\n\n}\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 60, "score": 130879.7978277674 }, { "content": "/// Implement to customize how PoolAllocator resets and destroys pools\n\npub trait PooledResourceImpl {\n\n fn reset(&mut self) -> RafxResult<()>;\n\n}\n\n\n", "file_path": "rafx-framework/src/resources/pool.rs", "rank": 61, "score": 130562.04490494546 }, { "content": "fn add_light_debug_draw(\n\n resources: &Resources,\n\n world: &World,\n\n) {\n\n let mut debug_draw = resources.get_mut::<DebugDraw3DResource>().unwrap();\n\n\n\n let mut query = <Read<DirectionalLightComponent>>::query();\n\n for light in query.iter(world) {\n\n let light_from = light.direction * -10.0;\n\n let light_to = glam::Vec3::ZERO;\n\n\n\n debug_draw.add_line(light_from, light_to, light.color);\n\n }\n\n\n\n let mut query = <(Read<PositionComponent>, Read<PointLightComponent>)>::query();\n\n for (position, light) in query.iter(world) {\n\n debug_draw.add_sphere(position.position, 0.25, light.color, 12);\n\n }\n\n\n\n let mut query = <(Read<PositionComponent>, Read<SpotLightComponent>)>::query();\n", "file_path": "demo/src/scenes/mod.rs", "rank": 62, "score": 130438.27618125002 }, { "content": "struct AssetState<A> {\n\n version: u32,\n\n asset_uuid: AssetUuid,\n\n asset: A,\n\n}\n\n\n\n// A strongly typed storage for a single asset type\n\npub struct Storage<AssetT: TypeUuid + Send> {\n\n refop_sender: Sender<RefOp>,\n\n assets: HashMap<LoadHandle, AssetState<AssetT>>,\n\n uncommitted: HashMap<LoadHandle, UncommittedAssetState<AssetT>>,\n\n loader: Box<dyn DynAssetLoader<AssetT>>,\n\n indirection_table: IndirectionTable,\n\n}\n\n\n", "file_path": "rafx-assets/src/distill_impl/asset_storage.rs", "rank": 63, "score": 130346.80146748883 }, { "content": "pub fn init_distill_packfile(pack_file: &std::path::Path) -> AssetResource {\n\n let packfile = std::fs::File::open(pack_file).unwrap();\n\n let packfile_loader = PackfileReader::new(packfile).unwrap();\n\n let loader = Loader::new(Box::new(packfile_loader));\n\n let resolver = Box::new(DefaultIndirectionResolver);\n\n AssetResource::new(loader, resolver)\n\n}\n", "file_path": "rafx-renderer/src/daemon.rs", "rank": 64, "score": 129146.91727130921 }, { "content": "#[profiling::function]\n\nfn finish_load_image(\n\n asset_manager: &mut AssetManager,\n\n texture: RafxTexture,\n\n) -> RafxResult<ImageAsset> {\n\n let image = asset_manager.resources().insert_image(texture);\n\n\n\n let image_view = asset_manager\n\n .resources()\n\n .get_or_create_image_view(&image, None)?;\n\n\n\n Ok(ImageAsset { image, image_view })\n\n}\n", "file_path": "rafx-assets/src/assets/image/assets.rs", "rank": 65, "score": 128529.45279741404 }, { "content": "#[profiling::function]\n\nfn finish_load_buffer(\n\n asset_manager: &mut AssetManager,\n\n buffer: RafxBuffer,\n\n) -> RafxResult<BufferAsset> {\n\n let buffer = asset_manager.resources().insert_buffer(buffer);\n\n\n\n Ok(BufferAsset { buffer })\n\n}\n", "file_path": "rafx-assets/src/assets/buffer/assets.rs", "rank": 66, "score": 128529.45279741404 }, { "content": "fn range_of_line_at_position(\n\n code: &[char],\n\n position: usize,\n\n) -> Range<usize> {\n\n let mut begin_of_line = position;\n\n let mut end_of_line = position;\n\n\n\n for i in position..code.len() {\n\n end_of_line = i + 1;\n\n if code[i] == '\\n' {\n\n break;\n\n }\n\n }\n\n\n\n if position > 0 {\n\n for i in (0..=position - 1).rev() {\n\n if code[i] == '\\n' {\n\n break;\n\n }\n\n\n", "file_path": "rafx-shader-processor/src/parse_source.rs", "rank": 67, "score": 128525.9741677662 }, { "content": "fn pop_comments_up_to_position(\n\n comments: &mut VecDeque<CommentText>,\n\n position: usize,\n\n) -> Vec<CommentText> {\n\n let mut result = Vec::default();\n\n\n\n while let Some(comment) = comments.front() {\n\n if comment.position < position {\n\n result.push(comments.pop_front().unwrap());\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n result\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct DeclarationText {\n\n pub text: Vec<char>,\n", "file_path": "rafx-shader-processor/src/parse_source.rs", "rank": 68, "score": 128525.9741677662 }, { "content": "#[test]\n\nfn test_extract_resources() {\n\n let mut resources = ResourceRefMap::default();\n\n let mut x: i32 = 50;\n\n resources.insert(&mut x);\n\n\n\n {\n\n let mut x_borrowed = resources.fetch_mut::<i32>();\n\n assert_eq!(*x_borrowed, 50);\n\n *x_borrowed += 10;\n\n }\n\n\n\n assert_eq!(x, 60);\n\n}\n", "file_path": "rafx-base/src/resource_ref_map.rs", "rank": 69, "score": 128515.86532262052 }, { "content": "pub fn parse_glsl(file_path: &Path) -> Result<ShaderText, String> {\n\n let first_file = FileToProcess {\n\n path: file_path.to_path_buf(),\n\n include_type: IncludeType::Relative,\n\n requested_from: PathBuf::new(),\n\n include_depth: 0,\n\n };\n\n\n\n let mut included_files = FnvHashSet::<PathBuf>::default();\n\n included_files.insert(file_path.to_path_buf());\n\n let mut declarations = Vec::default();\n\n\n\n let content = std::fs::read_to_string(file_path)\n\n .map_err(|e| format!(\"Could not read file {:?}: {:?}\", file_path, e))?;\n\n let code: Vec<char> = content.chars().collect();\n\n parse_shader_source_text(&first_file, &mut declarations, &mut included_files, &code)?;\n\n\n\n Ok(ShaderText { declarations })\n\n}\n\n\n", "file_path": "rafx-shader-processor/src/parse_source.rs", "rank": 70, "score": 127645.03850023332 }, { "content": "pub fn run(args: &ShaderProcessorArgs) -> Result<(), Box<dyn Error>> {\n\n log::trace!(\"Shader processor args: {:#?}\", args);\n\n\n\n if let Some(glsl_file) = &args.glsl_file {\n\n //\n\n // Handle a single file given via --glsl_file. In this mode, the output files are explicit\n\n //\n\n log::info!(\"Processing file {:?}\", glsl_file);\n\n\n\n //\n\n // Try to determine what kind of shader this is from the file name\n\n //\n\n let shader_kind = shader_kind_from_args(args)\n\n .or_else(|| deduce_default_shader_kind_from_path(glsl_file))\n\n .unwrap_or(shaderc::ShaderKind::InferFromSource);\n\n\n\n //\n\n // Process this shader and write to output files\n\n //\n\n process_glsl_shader(\n", "file_path": "rafx-shader-processor/src/lib.rs", "rank": 71, "score": 126836.68064335076 }, { "content": "pub fn create_font_texture_with_characters<'a, IterT: Iterator<Item = &'a char>>(\n\n font: &fontdue::Font,\n\n characters: IterT,\n\n size: f32,\n\n margin: u32,\n\n) -> Option<FontTextureWithMeta> {\n\n let mut rasterized_data = FnvHashMap::default();\n\n let mut rects_to_place = rectangle_pack::GroupedRectsToPlace::<char, ()>::new();\n\n\n\n for &c in characters {\n\n let (metrics, data) = font.rasterize(c, size);\n\n rects_to_place.push_rect(\n\n c,\n\n None,\n\n rectangle_pack::RectToInsert::new(\n\n metrics.width as u32 + (margin * 2),\n\n metrics.height as u32 + (margin * 2),\n\n 1,\n\n ),\n\n );\n", "file_path": "demo/src/assets/font/font_cooking.rs", "rank": 72, "score": 125334.80187227487 }, { "content": "struct UncommittedAssetState<A: Send> {\n\n version: u32,\n\n asset_uuid: AssetUuid,\n\n result: UpdateAssetResult<A>,\n\n}\n\n\n", "file_path": "rafx-assets/src/distill_impl/asset_storage.rs", "rank": 73, "score": 124185.13274622697 }, { "content": "// Binding type determines memory layout that gets used\n\nfn determine_memory_layout(binding_struct_type: StructBindingType) -> MemoryLayout {\n\n match binding_struct_type {\n\n StructBindingType::Uniform => MemoryLayout::Std140,\n\n StructBindingType::Buffer => MemoryLayout::Std430,\n\n StructBindingType::PushConstant => MemoryLayout::Std430,\n\n }\n\n}\n\n\n\n// Memory layouts we have to deal with (C = repr(C))\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 74, "score": 123387.82235448467 }, { "content": "// This holds the allocator and the frame on which it was \"borrowed\" from the allocator manager\n\nstruct DescriptorSetAllocatorRefInner {\n\n allocator: Box<DescriptorSetAllocator>,\n\n checkout_frame: u64,\n\n}\n\n\n\n// A borrowed allocator that returns itself when it is dropped. It is expected that these borrows\n\n// are short (i.e. within a single frame). Holding an allocator over multiple frames can delay\n\n// releasing descriptor sets that have been dropped.\n\npub struct DescriptorSetAllocatorRef {\n\n // This should never be None. We always allocate this to a non-none value and we don't clear\n\n // it until the drop handler\n\n allocator: Option<DescriptorSetAllocatorRefInner>,\n\n drop_tx: Sender<DescriptorSetAllocatorRefInner>,\n\n}\n\n\n\nimpl DescriptorSetAllocatorRef {\n\n fn new(\n\n allocator: DescriptorSetAllocatorRefInner,\n\n drop_tx: Sender<DescriptorSetAllocatorRefInner>,\n\n ) -> Self {\n", "file_path": "rafx-framework/src/resources/descriptor_sets/descriptor_set_allocator_manager.rs", "rank": 75, "score": 123129.13422855029 }, { "content": "struct PoolResourceInFlight<T: PooledResourceImpl> {\n\n pool: T,\n\n live_until_frame: Wrapping<u32>,\n\n}\n\n\n\n/// This handles waiting for N frames to pass before resetting the pool. \"Restting\" could mean\n\n/// different things depending on the resource. This allocator also has a callback for allocating\n\n/// new pools for use. A maximum pool count should be provided so that an unbounded leak of pools\n\n/// can be detected.\n\npub struct PooledResourceAllocator<T: PooledResourceImpl> {\n\n device_context: RafxDeviceContext,\n\n\n\n // Allocates a new pool\n\n allocate_fn: Box<PoolResourceAllocatorAllocFn<T>>,\n\n\n\n // We are assuming that all pools can survive for the same amount of time so the data in\n\n // this VecDeque will naturally be orderered such that things that need to be reset sooner\n\n // are at the front\n\n in_flight_pools: VecDeque<PoolResourceInFlight<T>>,\n\n\n", "file_path": "rafx-framework/src/resources/pool.rs", "rank": 76, "score": 122451.4883966688 }, { "content": "#[profiling::function]\n\nfn opaque_render_phase_sort_submit_nodes(mut submit_nodes: Vec<SubmitNode>) -> Vec<SubmitNode> {\n\n // Sort by feature\n\n log::trace!(\n\n \"Sort phase {}\",\n\n OpaqueRenderPhase::render_phase_debug_name()\n\n );\n\n submit_nodes.sort_unstable_by(|a, b| a.feature_index().cmp(&b.feature_index()));\n\n\n\n submit_nodes\n\n}\n\n\n\n//\n\n// SDL2 helpers\n\n//\n\npub struct Sdl2Systems {\n\n pub context: sdl2::Sdl,\n\n pub video_subsystem: sdl2::VideoSubsystem,\n\n pub window: sdl2::video::Window,\n\n}\n\n\n", "file_path": "rafx/examples/framework_triangle/framework_triangle.rs", "rank": 77, "score": 121672.9238960217 }, { "content": "#[profiling::function]\n\nfn opaque_render_phase_sort_submit_nodes(mut submit_nodes: Vec<SubmitNode>) -> Vec<SubmitNode> {\n\n // Sort by feature\n\n log::trace!(\n\n \"Sort phase {}\",\n\n OpaqueRenderPhase::render_phase_debug_name()\n\n );\n\n submit_nodes.sort_unstable_by(|a, b| a.feature_index().cmp(&b.feature_index()));\n\n\n\n submit_nodes\n\n}\n\n\n\n//\n\n// SDL2 helpers\n\n//\n\npub struct Sdl2Systems {\n\n pub context: sdl2::Sdl,\n\n pub video_subsystem: sdl2::VideoSubsystem,\n\n pub window: sdl2::video::Window,\n\n}\n\n\n", "file_path": "rafx/examples/asset_triangle/asset_triangle.rs", "rank": 78, "score": 121672.9238960217 }, { "content": "#[profiling::function]\n\nfn ui_render_phase_sort_submit_nodes(mut submit_nodes: Vec<SubmitNode>) -> Vec<SubmitNode> {\n\n // Sort by feature\n\n log::trace!(\"Sort phase {}\", UiRenderPhase::render_phase_debug_name());\n\n submit_nodes.sort_unstable_by(|a, b| a.feature_index().cmp(&b.feature_index()));\n\n\n\n submit_nodes\n\n}\n", "file_path": "demo/src/phases/ui_render_phase.rs", "rank": 79, "score": 121672.9238960217 }, { "content": "#[profiling::function]\n\nfn shadow_map_render_phase_sort_submit_nodes(mut submit_nodes: Vec<SubmitNode>) -> Vec<SubmitNode> {\n\n // Sort by distance from camera front to back\n\n log::trace!(\n\n \"Sort phase {}\",\n\n ShadowMapRenderPhase::render_phase_debug_name()\n\n );\n\n submit_nodes.sort_unstable_by(|a, b| a.distance().partial_cmp(&b.distance()).unwrap());\n\n\n\n submit_nodes\n\n}\n", "file_path": "demo/src/phases/shadow_map_render_phase.rs", "rank": 80, "score": 119235.50994704157 }, { "content": "// Used to dynamic dispatch into a storage, supports checked downcasting\n\npub trait DynAssetStorage: Downcast + Send {\n\n fn update_asset(\n\n &mut self,\n\n loader_info: &dyn LoaderInfoProvider,\n\n data: &[u8],\n\n load_handle: LoadHandle,\n\n load_op: AssetLoadOp,\n\n version: u32,\n\n ) -> Result<(), Box<dyn Error + Send + 'static>>;\n\n fn commit_asset_version(\n\n &mut self,\n\n handle: LoadHandle,\n\n version: u32,\n\n );\n\n fn free(\n\n &mut self,\n\n handle: LoadHandle,\n\n version: u32,\n\n );\n\n\n", "file_path": "rafx-assets/src/distill_impl/asset_storage.rs", "rank": 81, "score": 118847.57494921269 }, { "content": "fn format_array_sizes(sizes: &[usize]) -> String {\n\n let mut s = String::default();\n\n for size in sizes {\n\n s += &format!(\"[{}]\", size);\n\n }\n\n\n\n s\n\n}\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 82, "score": 118314.69373170054 }, { "content": "fn try_parse_struct(code: &[char]) -> Result<Option<ParseStructResult>, String> {\n\n let mut position = 0;\n\n\n\n // Consume the struct keyword. If it's missing, assume this isn't a struct and return None\n\n let consumed = crate::parse_source::try_consume_identifier(code, &mut position);\n\n if consumed.is_none() || consumed.unwrap() != \"struct\" {\n\n return Ok(None);\n\n }\n\n\n\n // Consume the name of the struct and all whitespace to the opening {\n\n crate::parse_source::skip_whitespace(code, &mut position);\n\n let type_name =\n\n crate::parse_source::try_consume_identifier(code, &mut position).ok_or(format!(\n\n \"Expected name of struct while parsing struct:\\n{}\",\n\n crate::parse_source::characters_to_string(&code)\n\n ))?;\n\n\n\n crate::parse_source::skip_whitespace(code, &mut position);\n\n let fields = try_parse_fields(code, &mut position)?.ok_or(format!(\n\n \"Expected {{ while parsing struct:\\n{}\",\n", "file_path": "rafx-shader-processor/src/parse_declarations.rs", "rank": 83, "score": 117883.1054396098 }, { "content": "// Implements loading logic (i.e. turning bytes into an asset. The asset may contain runtime-only\n\n// data and may be created asynchronously\n\npub trait DynAssetLoader<AssetT>: Send\n\nwhere\n\n AssetT: TypeUuid + 'static + Send,\n\n{\n\n fn update_asset(\n\n &mut self,\n\n refop_sender: &Sender<RefOp>,\n\n loader_info: &dyn LoaderInfoProvider,\n\n data: &[u8],\n\n load_handle: LoadHandle,\n\n load_op: AssetLoadOp,\n\n version: u32,\n\n ) -> Result<UpdateAssetResult<AssetT>, Box<dyn Error + Send + 'static>>;\n\n\n\n fn commit_asset_version(\n\n &mut self,\n\n handle: LoadHandle,\n\n version: u32,\n\n );\n\n\n\n fn free(\n\n &mut self,\n\n handle: LoadHandle,\n\n );\n\n}\n\n\n", "file_path": "rafx-assets/src/distill_impl/asset_storage.rs", "rank": 85, "score": 117227.82680805147 }, { "content": "fn resolve_load_handle<T: AssetHandle>(\n\n handle: &T,\n\n indirection_table: &IndirectionTable,\n\n) -> Option<LoadHandle> {\n\n if handle.load_handle().is_indirect() {\n\n indirection_table.resolve(handle.load_handle())\n\n } else {\n\n Some(handle.load_handle())\n\n }\n\n}\n\n\n\nimpl<AssetT: TypeUuid + Send> Storage<AssetT> {\n\n fn new(\n\n sender: Sender<RefOp>,\n\n loader: Box<dyn DynAssetLoader<AssetT>>,\n\n indirection_table: IndirectionTable,\n\n ) -> Self {\n\n Self {\n\n refop_sender: sender,\n\n assets: HashMap::new(),\n", "file_path": "rafx-assets/src/distill_impl/asset_storage.rs", "rank": 86, "score": 116682.53660525497 }, { "content": "//\n\n// A default asset type handler implementation for asset types that can implement a simple \"load\"\n\n// function to convert from asset data to the asset\n\n//\n\npub trait DefaultAssetTypeLoadHandler<AssetDataT, AssetT> {\n\n fn load(\n\n asset_manager: &mut AssetManager,\n\n font_asset: AssetDataT,\n\n ) -> RafxResult<AssetT>;\n\n}\n\n\n\npub struct DefaultAssetTypeHandler<AssetDataT, AssetT, LoadHandlerT>\n\nwhere\n\n LoadHandlerT: DefaultAssetTypeLoadHandler<AssetDataT, AssetT>,\n\n{\n\n asset_lookup: AssetLookup<AssetT>,\n\n load_queues: LoadQueues<AssetDataT, AssetT>,\n\n phantom_data: PhantomData<LoadHandlerT>,\n\n}\n\n\n\nimpl<AssetDataT, AssetT, LoadHandlerT> AssetTypeHandlerFactory\n\n for DefaultAssetTypeHandler<AssetDataT, AssetT, LoadHandlerT>\n\nwhere\n\n AssetDataT: TypeUuid + for<'a> serde::Deserialize<'a> + 'static + Send + Clone,\n", "file_path": "rafx-assets/src/assets/asset_type_handler.rs", "rank": 87, "score": 111360.72129863864 }, { "content": "// based on https://github.com/google/shaderc/blob/caa519ca532a6a3a0279509fce2ceb791c4f4651/glslc/src/shader_stage.cc#L69\n\nfn deduce_default_shader_kind_from_path(path: &Path) -> Option<shaderc::ShaderKind> {\n\n let extensions = [\n\n (\"vert\", shaderc::ShaderKind::DefaultVertex),\n\n (\"frag\", shaderc::ShaderKind::DefaultFragment),\n\n (\"tesc\", shaderc::ShaderKind::DefaultTessControl),\n\n (\"tese\", shaderc::ShaderKind::DefaultTessEvaluation),\n\n (\"geom\", shaderc::ShaderKind::DefaultGeometry),\n\n (\"comp\", shaderc::ShaderKind::DefaultCompute),\n\n //(\"spvasm\", shaderc::ShaderKind::Vertex), // we don't parse spvasm\n\n (\"rgen\", shaderc::ShaderKind::DefaultRayGeneration),\n\n (\"rahit\", shaderc::ShaderKind::DefaultAnyHit),\n\n (\"rchit\", shaderc::ShaderKind::DefaultClosestHit),\n\n (\"rmiss\", shaderc::ShaderKind::DefaultMiss),\n\n (\"rint\", shaderc::ShaderKind::DefaultIntersection),\n\n (\"rcall\", shaderc::ShaderKind::DefaultCallable),\n\n (\"task\", shaderc::ShaderKind::DefaultTask),\n\n (\"mesh\", shaderc::ShaderKind::DefaultMesh),\n\n ];\n\n\n\n if let Some(extension) = path.extension() {\n", "file_path": "rafx-shader-processor/src/lib.rs", "rank": 88, "score": 107292.34598421067 }, { "content": "// Determine the binding type of a struct based on parsed code\n\nfn determine_binding_type(b: &ParsedBindingWithAnnotations) -> Result<StructBindingType, String> {\n\n if b.parsed.layout_parts.push_constant {\n\n Ok(StructBindingType::PushConstant)\n\n } else if b.parsed.binding_type == BindingType::Uniform {\n\n Ok(StructBindingType::Uniform)\n\n } else if b.parsed.binding_type == BindingType::Buffer {\n\n Ok(StructBindingType::Buffer)\n\n } else {\n\n Err(\"Unknown binding type\".to_string())\n\n }\n\n}\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 89, "score": 107186.82544568661 }, { "content": "#[profiling::function]\n\nfn determine_node_order(graph: &RenderGraphBuilder) -> Vec<RenderGraphNodeId> {\n\n // As we depth-first traverse nodes, mark them as visiting and push them onto this stack.\n\n // We will use this to detect and print out cycles\n\n let mut visiting = vec![false; graph.nodes.len()];\n\n let mut visiting_stack = Vec::default();\n\n\n\n // The order of nodes, upstream to downstream. As we depth-first traverse nodes, push nodes\n\n // with no unvisited dependencies onto this list and mark them as visited\n\n let mut visited = vec![false; graph.nodes.len()];\n\n let mut ordered_list = Vec::default();\n\n\n\n // Iterate all the images we need to output. This will visit all the nodes we need to execute,\n\n // potentially leaving out nodes we can cull.\n\n for output_image_id in &graph.output_images {\n\n // Find the node that creates the output image\n\n let output_node = graph.image_version_info(output_image_id.usage).creator_node;\n\n log::trace!(\n\n \"Traversing dependencies of output image created by node {:?} {:?}\",\n\n output_node,\n\n graph.node(output_node).name()\n", "file_path": "rafx-framework/src/graph/graph_plan.rs", "rank": 90, "score": 105718.94779744859 }, { "content": "#[derive(Debug)]\n\nstruct StructMember {\n\n name: String,\n\n ty: String,\n\n size: usize,\n\n offset: usize,\n\n align: usize,\n\n default_value: String,\n\n}\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 91, "score": 97246.09357616663 }, { "content": "#[derive(Debug)]\n\nstruct GenerateStructResult {\n\n name: String,\n\n size: usize,\n\n align: usize,\n\n members: Vec<StructMember>,\n\n}\n\n\n\nimpl GenerateStructResult {\n\n fn generate_struct_code(&self) -> String {\n\n let mut result_string = String::default();\n\n result_string += &format!(\n\n \"#[derive(Copy, Clone, Debug)]\\n#[repr(C)]\\npub struct {} {{\\n\",\n\n self.name\n\n );\n\n for m in &self.members {\n\n result_string += &format_member(&m.name, &m.ty, m.offset, m.size);\n\n }\n\n result_string += &format!(\"}} // {} bytes\\n\\n\", self.size);\n\n result_string\n\n }\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 92, "score": 96482.57622877037 }, { "content": "#[derive(Debug)]\n\nstruct UserType {\n\n struct_or_binding: StructOrBinding,\n\n type_name: String,\n\n fields: Arc<Vec<ParseFieldResult>>,\n\n //export_name: Option<String>,\n\n export_uniform_layout: bool,\n\n export_push_constant_layout: bool,\n\n export_buffer_layout: bool,\n\n}\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 93, "score": 85576.03401897519 }, { "content": "#[derive(Clone)]\n\nstruct TextComponent {\n\n text: String,\n\n font: Handle<FontAsset>,\n\n}\n\n\n", "file_path": "demo/src/scenes/rafxmark_scene.rs", "rank": 94, "score": 85575.98844425086 }, { "content": "#[derive(Copy, Clone)]\n\nstruct BodyComponent {\n\n velocity: Vec3,\n\n}\n\n\n", "file_path": "demo/src/scenes/rafxmark_scene.rs", "rank": 95, "score": 85575.94807164953 }, { "content": "#[derive(Clone, Copy)]\n\nstruct InputComponent {\n\n is_left_mouse_button_down: bool,\n\n}\n\n\n", "file_path": "demo/src/scenes/rafxmark_scene.rs", "rank": 96, "score": 85575.94807164953 }, { "content": "struct MeshToImport {\n\n id: GltfObjectId,\n\n asset: MeshAssetData,\n\n}\n\n\n", "file_path": "demo/src/assets/gltf/importer.rs", "rank": 97, "score": 85569.98917696634 }, { "content": "struct ImageToImport {\n\n id: GltfObjectId,\n\n asset: ImageAssetData,\n\n}\n\n\n\n#[derive(Default, Clone)]\n\npub struct GltfMaterialImportData {\n\n //pub name: Option<String>,\n\n pub material_data: GltfMaterialData,\n\n\n\n pub base_color_texture: Option<Handle<ImageAsset>>,\n\n // metalness in B, roughness in G\n\n pub metallic_roughness_texture: Option<Handle<ImageAsset>>,\n\n pub normal_texture: Option<Handle<ImageAsset>>,\n\n pub occlusion_texture: Option<Handle<ImageAsset>>,\n\n pub emissive_texture: Option<Handle<ImageAsset>>,\n\n // We would need to change the pipeline for these\n\n // double_sided: bool, // defult false\n\n // alpha_mode: String, // OPAQUE, MASK, BLEND\n\n // support for points/lines?\n\n}\n\n\n", "file_path": "demo/src/assets/gltf/importer.rs", "rank": 98, "score": 85569.98917696634 }, { "content": "struct MaterialToImport {\n\n id: GltfObjectId,\n\n asset: GltfMaterialImportData,\n\n}\n\n\n", "file_path": "demo/src/assets/gltf/importer.rs", "rank": 99, "score": 85569.98917696634 } ]
Rust
src/transforms/rename_fields.rs
zcapper/vector
560fd106fc9a60c12ddf2c32e31ad4f2031ff1f5
use super::Transform; use crate::{ event::Event, topology::config::{DataType, TransformConfig, TransformContext, TransformDescription}, }; use indexmap::map::IndexMap; use serde::{Deserialize, Serialize}; use snafu::Snafu; use string_cache::DefaultAtom as Atom; use toml::value::Value as TomlValue; #[derive(Deserialize, Serialize, Debug)] #[serde(deny_unknown_fields)] pub struct RenameFieldsConfig { pub fields: IndexMap<String, TomlValue>, } pub struct RenameFields { fields: IndexMap<Atom, Atom>, } inventory::submit! { TransformDescription::new_without_default::<RenameFieldsConfig>("rename_fields") } #[typetag::serde(name = "rename_fields")] impl TransformConfig for RenameFieldsConfig { fn build(&self, _exec: TransformContext) -> crate::Result<Box<dyn Transform>> { Ok(Box::new(RenameFields::new(self.fields.clone())?)) } fn input_type(&self) -> DataType { DataType::Log } fn output_type(&self) -> DataType { DataType::Log } fn transform_type(&self) -> &'static str { "rename_fields" } } impl RenameFields { pub fn new(fields: IndexMap<String, TomlValue>) -> crate::Result<Self> { Ok(RenameFields { fields: fields .into_iter() .map(|kv| flatten(kv, None)) .collect::<crate::Result<_>>()?, }) } } #[derive(Debug, Eq, PartialEq, Snafu)] enum FlattenError { #[snafu(display( "The key {:?} cannot be flattened. Is it a plain string or a `a.b.c` style map?", key ))] CannotFlatten { key: String }, } fn flatten(kv: (String, TomlValue), prequel: Option<String>) -> crate::Result<(Atom, Atom)> { let (k, v) = kv; match v { TomlValue::String(s) => match prequel { Some(prequel) => Ok((format!("{}.{}", prequel, k).into(), s.into())), None => Ok((k.into(), s.into())), }, TomlValue::Table(map) => { if map.len() > 1 { Err(Box::new(FlattenError::CannotFlatten { key: k })) } else { let sub_kv = map.into_iter().next().expect("Map of len 1 has no values"); let key = match prequel { Some(prequel) => format!("{}.{}", prequel, k), None => k, }; flatten(sub_kv, Some(key)) } } TomlValue::Integer(_) | TomlValue::Float(_) | TomlValue::Boolean(_) | TomlValue::Datetime(_) | TomlValue::Array(_) => Err(Box::new(FlattenError::CannotFlatten { key: k })), } } impl Transform for RenameFields { fn transform(&mut self, mut event: Event) -> Option<Event> { for (old_key, new_key) in &self.fields { let log = event.as_mut_log(); if let Some(v) = log.remove(&old_key) { log.insert(new_key.clone(), v) } } Some(event) } } #[cfg(test)] mod tests { use super::RenameFields; use crate::{event::Event, transforms::Transform}; use indexmap::map::IndexMap; #[test] fn rename_fields() { let mut event = Event::from("message"); event.as_mut_log().insert("to_move", "some value"); event.as_mut_log().insert("do_not_move", "not moved"); let mut fields = IndexMap::new(); fields.insert("to_move".into(), "moved".into()); fields.insert("not_present".into(), "should_not_exist".into()); let mut transform = RenameFields::new(fields).unwrap(); let new_event = transform.transform(event).unwrap(); assert!(new_event.as_log().get(&"to_move".into()).is_none()); assert_eq!(new_event.as_log()[&"moved".into()], "some value".into()); assert!(new_event.as_log().get(&"not_present".into()).is_none()); assert!(new_event.as_log().get(&"should_not_exist".into()).is_none()); assert_eq!( new_event.as_log()[&"do_not_move".into()], "not moved".into() ); } }
use super::Transform; use crate::{ event::Event, topology::config::{DataType, TransformConfig, TransformContext, TransformDescription}, }; use indexmap::map::IndexMap; use serde::{Deserialize, Serialize}; use snafu::Snafu; use string_cache::DefaultAtom as Atom; use toml::value::Value as TomlValue; #[derive(Deserialize, Serialize, Debug)] #[serde(deny_unknown_fields)] pub struct RenameFieldsConfig { pub fields: IndexMap<String, TomlValue>, } pub struct RenameFields { fields: IndexMap<Atom, Atom>, } inventory::submit! { TransformDescription::new_without_default::<RenameFieldsConfig>("rename_fields") } #[typetag::serde(name = "rename_fields")] impl TransformConfig for RenameFieldsConfig { fn build(&self, _exec: TransformContext) -> crate::Result<Box<dyn Transform>> { Ok(Box::new(RenameFields::new(self.fields.clone())?)) } fn input_type(&self) -> DataType { DataType::Log } fn output_type(&self) -> DataType { DataType::Log } fn transform_type(&self) -> &'static str { "rename_fields" } } impl RenameFields { pub fn new(fields: IndexMap<String, TomlValue>) -> crate::Result<Self> {
} } #[derive(Debug, Eq, PartialEq, Snafu)] enum FlattenError { #[snafu(display( "The key {:?} cannot be flattened. Is it a plain string or a `a.b.c` style map?", key ))] CannotFlatten { key: String }, } fn flatten(kv: (String, TomlValue), prequel: Option<String>) -> crate::Result<(Atom, Atom)> { let (k, v) = kv; match v { TomlValue::String(s) => match prequel { Some(prequel) => Ok((format!("{}.{}", prequel, k).into(), s.into())), None => Ok((k.into(), s.into())), }, TomlValue::Table(map) => { if map.len() > 1 { Err(Box::new(FlattenError::CannotFlatten { key: k })) } else { let sub_kv = map.into_iter().next().expect("Map of len 1 has no values"); let key = match prequel { Some(prequel) => format!("{}.{}", prequel, k), None => k, }; flatten(sub_kv, Some(key)) } } TomlValue::Integer(_) | TomlValue::Float(_) | TomlValue::Boolean(_) | TomlValue::Datetime(_) | TomlValue::Array(_) => Err(Box::new(FlattenError::CannotFlatten { key: k })), } } impl Transform for RenameFields { fn transform(&mut self, mut event: Event) -> Option<Event> { for (old_key, new_key) in &self.fields { let log = event.as_mut_log(); if let Some(v) = log.remove(&old_key) { log.insert(new_key.clone(), v) } } Some(event) } } #[cfg(test)] mod tests { use super::RenameFields; use crate::{event::Event, transforms::Transform}; use indexmap::map::IndexMap; #[test] fn rename_fields() { let mut event = Event::from("message"); event.as_mut_log().insert("to_move", "some value"); event.as_mut_log().insert("do_not_move", "not moved"); let mut fields = IndexMap::new(); fields.insert("to_move".into(), "moved".into()); fields.insert("not_present".into(), "should_not_exist".into()); let mut transform = RenameFields::new(fields).unwrap(); let new_event = transform.transform(event).unwrap(); assert!(new_event.as_log().get(&"to_move".into()).is_none()); assert_eq!(new_event.as_log()[&"moved".into()], "some value".into()); assert!(new_event.as_log().get(&"not_present".into()).is_none()); assert!(new_event.as_log().get(&"should_not_exist".into()).is_none()); assert_eq!( new_event.as_log()[&"do_not_move".into()], "not moved".into() ); } }
Ok(RenameFields { fields: fields .into_iter() .map(|kv| flatten(kv, None)) .collect::<crate::Result<_>>()?, })
call_expression
[ { "content": "/// Iterates over all paths in form \"a.b[0].c[1]\" in alphabetical order.\n\n/// It is implemented as a wrapper around `all_fields` to reduce code\n\n/// duplication.\n\npub fn keys<'a>(fields: &'a BTreeMap<Atom, Value>) -> impl Iterator<Item = Atom> + 'a {\n\n all_fields(fields).map(|(k, _)| k)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::super::test::fields_from_json;\n\n use super::*;\n\n use serde_json::json;\n\n\n\n #[test]\n\n fn keys_simple() {\n\n let fields = fields_from_json(json!({\n\n \"field2\": 3,\n\n \"field1\": 4,\n\n \"field3\": 5\n\n }));\n\n let expected: Vec<_> = vec![\"field1\", \"field2\", \"field3\"]\n\n .into_iter()\n\n .map(|s| Atom::from(s))\n", "file_path": "src/event/util/log/keys.rs", "rank": 0, "score": 302618.780449646 }, { "content": "/// Checks whether a field specified by a given path is present.\n\npub fn contains(fields: &BTreeMap<Atom, Value>, path: &str) -> bool {\n\n let mut path_iter = PathIter::new(path);\n\n\n\n match path_iter.next() {\n\n Some(PathComponent::Key(key)) => match fields.get(&key) {\n\n None => false,\n\n Some(value) => value_contains(value, path_iter),\n\n },\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/event/util/log/contains.rs", "rank": 1, "score": 298365.00787750114 }, { "content": "pub fn parse(input: &str) -> Vec<&str> {\n\n let simple = is_not::<_, _, (&str, nom::error::ErrorKind)>(\" \\t[\\\"\");\n\n let string = delimited(\n\n tag(\"\\\"\"),\n\n map(opt(escaped(is_not(\"\\\"\\\\\"), '\\\\', one_of(\"\\\"\\\\\"))), |o| {\n\n o.unwrap_or(\"\")\n\n }),\n\n tag(\"\\\"\"),\n\n );\n\n let bracket = delimited(\n\n tag(\"[\"),\n\n map(opt(escaped(is_not(\"]\\\\\"), '\\\\', one_of(\"]\\\\\"))), |o| {\n\n o.unwrap_or(\"\")\n\n }),\n\n tag(\"]\"),\n\n );\n\n\n\n // fall back to returning the rest of the input, if any\n\n let remainder = verify(rest, |s: &str| !s.is_empty());\n\n let field = alt((bracket, string, simple, remainder));\n", "file_path": "src/transforms/tokenizer.rs", "rank": 2, "score": 297030.8451642633 }, { "content": "pub fn transform(suffix: &str, increase: f64) -> MockTransformConfig {\n\n MockTransformConfig::new(suffix.to_owned(), increase)\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct MockSourceConfig {\n\n #[serde(skip)]\n\n receiver: Arc<Mutex<Option<Receiver<Event>>>>,\n\n}\n\n\n\nimpl MockSourceConfig {\n\n pub fn new(receiver: Receiver<Event>) -> Self {\n\n Self {\n\n receiver: Arc::new(Mutex::new(Some(receiver))),\n\n }\n\n }\n\n}\n\n\n\n#[typetag::serde(name = \"mock\")]\n\nimpl SourceConfig for MockSourceConfig {\n", "file_path": "tests/support/mod.rs", "rank": 3, "score": 296305.9564975859 }, { "content": "pub fn validate_host(host: &str) -> crate::Result<()> {\n\n let uri = Uri::try_from(host).context(super::UriParseError)?;\n\n\n\n match uri.scheme_part() {\n\n Some(_) => Ok(()),\n\n None => Err(Box::new(BuildError::UriMissingScheme)),\n\n }\n\n}\n\n\n", "file_path": "src/sinks/splunk_hec.rs", "rank": 4, "score": 294371.6223046749 }, { "content": "/// Inserts field value using a path specified using `a.b[1].c` notation.\n\npub fn insert(fields: &mut BTreeMap<Atom, Value>, path: &str, value: Value) {\n\n map_insert(fields, PathIter::new(path).peekable(), value);\n\n}\n\n\n", "file_path": "src/event/util/log/insert.rs", "rank": 6, "score": 281309.601083334 }, { "content": "/// Removes field value specified by the given path and return its value.\n\n///\n\n/// A special case worth mentioning: if there is a nested array and an item is removed\n\n/// from the middle of this array, then it is just replaced by `Value::Null`.\n\npub fn remove(fields: &mut BTreeMap<Atom, Value>, path: &str) -> Option<Value> {\n\n let mut path_iter = PathIter::new(path).peekable();\n\n\n\n let key = match path_iter.next() {\n\n Some(PathComponent::Key(key)) => key,\n\n _ => return None,\n\n };\n\n\n\n match path_iter.peek() {\n\n None => fields.remove(&key),\n\n Some(_) => match fields.get_mut(&key) {\n\n None => None,\n\n Some(value) => value_remove(value, path_iter),\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/event/util/log/remove.rs", "rank": 7, "score": 281302.53303055343 }, { "content": "/// Returns a reference to a field value specified by the given path.\n\npub fn get<'a>(fields: &'a BTreeMap<Atom, Value>, path: &str) -> Option<&'a Value> {\n\n let mut path_iter = PathIter::new(path);\n\n\n\n match path_iter.next() {\n\n Some(PathComponent::Key(key)) => match fields.get(&key) {\n\n None => None,\n\n Some(value) => get_value(value, path_iter),\n\n },\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/event/util/log/get.rs", "rank": 8, "score": 277465.0579811778 }, { "content": "// Splits the given input by a separator.\n\n// If the separator is `None`, then it will split on whitespace.\n\npub fn split(input: &str, separator: Option<String>) -> Vec<&str> {\n\n match separator {\n\n Some(separator) => input.split(&separator).collect(),\n\n None => input.split_whitespace().collect(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::split;\n\n use super::SplitConfig;\n\n use crate::event::{LogEvent, Value};\n\n use crate::{\n\n topology::config::{TransformConfig, TransformContext},\n\n Event,\n\n };\n\n use string_cache::DefaultAtom as Atom;\n\n\n\n #[test]\n\n fn split_whitespace() {\n", "file_path": "src/transforms/split.rs", "rank": 9, "score": 271595.07417821506 }, { "content": "fn open_read(filename: &Path, note: &'static str) -> crate::Result<Vec<u8>> {\n\n let mut text = Vec::<u8>::new();\n\n\n\n File::open(filename)\n\n .with_context(|| FileOpenFailed { note, filename })?\n\n .read_to_end(&mut text)\n\n .with_context(|| FileReadFailed { note, filename })?;\n\n\n\n Ok(text)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::assert_downcast_matches;\n\n\n\n const TEST_PKCS12: &str = \"tests/data/localhost.p12\";\n\n const TEST_PEM_CRT: &str = \"tests/data/localhost.crt\";\n\n const TEST_PEM_KEY: &str = \"tests/data/localhost.key\";\n\n\n", "file_path": "src/tls.rs", "rank": 10, "score": 264132.932964214 }, { "content": "/// Determines format of message.\n\n/// This exists because Docker is still a special entity in Kubernetes as it can write in Json\n\n/// despite CRI defining it's own format.\n\npub fn build_message_parser() -> crate::Result<ApplicableTransform> {\n\n let transforms = vec![\n\n Box::new(DockerMessageTransformer::new()) as Box<dyn Transform>,\n\n transform_cri_message()?,\n\n ];\n\n Ok(ApplicableTransform::Candidates(transforms))\n\n}\n\n\n", "file_path": "src/sources/kubernetes/message_parser.rs", "rank": 11, "score": 262485.1125817797 }, { "content": "/// Returns a mutable reference to field value specified by the given path.\n\npub fn get_mut<'a>(fields: &'a mut BTreeMap<Atom, Value>, path: &str) -> Option<&'a mut Value> {\n\n let mut path_iter = PathIter::new(path);\n\n\n\n match path_iter.next() {\n\n Some(PathComponent::Key(key)) => match fields.get_mut(&key) {\n\n None => None,\n\n Some(value) => get_mut_value(value, path_iter),\n\n },\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/event/util/log/get_mut.rs", "rank": 12, "score": 256596.512715549 }, { "content": "pub fn load_sink<T>(config: &str) -> crate::Result<(T, SinkContext, Runtime)>\n\nwhere\n\n for<'a> T: Deserialize<'a> + SinkConfig,\n\n{\n\n let sink_config: T = toml::from_str(config)?;\n\n let rt = crate::test_util::runtime();\n\n let cx = SinkContext::new_test(rt.executor());\n\n\n\n Ok((sink_config, cx, rt))\n\n}\n\n\n", "file_path": "src/sinks/util/test.rs", "rank": 13, "score": 253230.7583967763 }, { "content": "fn render_fields(src: &str, event: &Event) -> Result<String, Vec<Atom>> {\n\n let mut missing_fields = Vec::new();\n\n let out = RE\n\n .replace_all(src, |caps: &Captures<'_>| {\n\n let key = caps\n\n .get(1)\n\n .map(|s| Atom::from(s.as_str().trim()))\n\n .expect(\"src should match regex\");\n\n if let Some(val) = event.as_log().get(&key) {\n\n val.to_string_lossy()\n\n } else {\n\n missing_fields.push(key.clone());\n\n String::new()\n\n }\n\n })\n\n .into_owned();\n\n if missing_fields.is_empty() {\n\n Ok(out)\n\n } else {\n\n Err(missing_fields)\n\n }\n\n}\n\n\n", "file_path": "src/template.rs", "rank": 14, "score": 247645.38991588302 }, { "content": "fn flatten_field(key: Atom, value: TomlValue, new_fields: &mut IndexMap<Atom, TemplateOrValue>) {\n\n match value {\n\n TomlValue::String(s) => {\n\n let t = Template::from(s);\n\n new_fields.insert(key, t.into())\n\n }\n\n TomlValue::Integer(i) => {\n\n let i = Value::from(i);\n\n new_fields.insert(key, i.into())\n\n }\n\n TomlValue::Float(f) => {\n\n let f = Value::from(f);\n\n new_fields.insert(key, f.into())\n\n }\n\n TomlValue::Boolean(b) => {\n\n let b = Value::from(b);\n\n new_fields.insert(key, b.into())\n\n }\n\n TomlValue::Datetime(dt) => {\n\n let dt = dt.to_string();\n", "file_path": "src/transforms/add_fields.rs", "rank": 15, "score": 243447.37370981195 }, { "content": "pub fn log_schema() -> &'static LogSchema {\n\n LOG_SCHEMA.get().unwrap_or(&LOG_SCHEMA_DEFAULT)\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Getters, Setters)]\n\n#[serde(default)]\n\npub struct LogSchema {\n\n #[getset(get = \"pub\", set = \"pub(crate)\")]\n\n message_key: Atom,\n\n #[getset(get = \"pub\", set = \"pub(crate)\")]\n\n timestamp_key: Atom,\n\n #[getset(get = \"pub\", set = \"pub(crate)\")]\n\n host_key: Atom,\n\n}\n\n\n\nimpl Default for LogSchema {\n\n fn default() -> Self {\n\n LogSchema {\n\n message_key: Atom::from(\"message\"),\n\n timestamp_key: Atom::from(\"timestamp\"),\n", "file_path": "src/event/mod.rs", "rank": 16, "score": 238834.60753912193 }, { "content": "fn encode_uri(host: &str, database: &str, table: &str) -> crate::Result<Uri> {\n\n let query = url::form_urlencoded::Serializer::new(String::new())\n\n .append_pair(\n\n \"query\",\n\n format!(\n\n \"INSERT INTO \\\"{}\\\".\\\"{}\\\" FORMAT JSONEachRow\",\n\n database,\n\n table.replace(\"\\\"\", \"\\\\\\\"\")\n\n )\n\n .as_str(),\n\n )\n\n .finish();\n\n\n\n let url = if host.ends_with('/') {\n\n format!(\"{}?{}\", host, query)\n\n } else {\n\n format!(\"{}/?{}\", host, query)\n\n };\n\n\n\n Ok(url.parse::<Uri>().context(super::UriParseError)?)\n\n}\n\n\n", "file_path": "src/sinks/clickhouse.rs", "rank": 17, "score": 233241.5201394748 }, { "content": "fn transform_file() -> crate::Result<Box<dyn Transform>> {\n\n let mut config = RegexParserConfig::default();\n\n\n\n config.field = Some(\"file\".into());\n\n\n\n config.regex = r\"^\".to_owned()\n\n + LOG_DIRECTORY\n\n + r\"(?P<pod_uid>[^/]*)/(?P<container_name>[^/]*)/[0-9]*[.]log$\";\n\n\n\n // this field is implementation depended so remove it\n\n config.drop_field = true;\n\n\n\n // pod_uid is a string\n\n // container_name is a string\n\n RegexParser::build(&config).map_err(|e| {\n\n format!(\n\n \"Failed in creating file regex transform with error: {:?}\",\n\n e\n\n )\n\n .into()\n\n })\n\n}\n\n\n", "file_path": "src/sources/kubernetes/mod.rs", "rank": 18, "score": 233144.89977367222 }, { "content": "#[typetag::serde(tag = \"type\")]\n\npub trait TransformConfig: core::fmt::Debug {\n\n fn build(&self, cx: TransformContext) -> crate::Result<Box<dyn transforms::Transform>>;\n\n\n\n fn input_type(&self) -> DataType;\n\n\n\n fn output_type(&self) -> DataType;\n\n\n\n fn transform_type(&self) -> &'static str;\n\n\n\n /// Allows a transform configuration to expand itself into multiple \"child\"\n\n /// transformations to replace it. This allows a transform to act as a macro\n\n /// for various patterns.\n\n fn expand(&mut self) -> crate::Result<Option<IndexMap<String, Box<dyn TransformConfig>>>> {\n\n Ok(None)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct TransformContext {\n\n pub(super) exec: TaskExecutor,\n", "file_path": "src/topology/config/mod.rs", "rank": 19, "score": 231475.0242387532 }, { "content": "pub fn wait_for(mut f: impl FnMut() -> bool) {\n\n let wait = std::time::Duration::from_millis(5);\n\n let limit = std::time::Duration::from_secs(5);\n\n let mut attempts = 0;\n\n while !f() {\n\n std::thread::sleep(wait);\n\n attempts += 1;\n\n if attempts * wait > limit {\n\n panic!(\"timed out while waiting\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 20, "score": 231040.32579721976 }, { "content": "/// Iterates over all paths in form \"a.b[0].c[1]\" in alphabetical order\n\n/// and their corresponding values.\n\npub fn all_fields<'a>(\n\n fields: &'a BTreeMap<Atom, Value>,\n\n) -> impl Iterator<Item = (Atom, &'a Value)> + Serialize {\n\n FieldsIter::new(fields)\n\n}\n\n\n", "file_path": "src/event/util/log/all_fields.rs", "rank": 21, "score": 228950.45106328223 }, { "content": "/// As defined by CRI\n\nfn transform_cri_message() -> crate::Result<Box<dyn Transform>> {\n\n let mut rp_config = RegexParserConfig::default();\n\n // message field\n\n rp_config.regex =\n\n r\"^(?P<timestamp>.*) (?P<stream>(stdout|stderr)) (?P<multiline_tag>(P|F)) (?P<message>.*)$\"\n\n .to_owned();\n\n // drop field\n\n rp_config.types.insert(\n\n event::log_schema().timestamp_key().clone(),\n\n \"timestamp|%+\".to_owned(),\n\n );\n\n // stream is a string\n\n // message is a string\n\n RegexParser::build(&rp_config).map_err(|e| {\n\n format!(\n\n \"Failed in creating message regex transform with error: {:?}\",\n\n e\n\n )\n\n .into()\n\n })\n", "file_path": "src/sources/kubernetes/message_parser.rs", "rank": 22, "score": 227699.47793751868 }, { "content": "fn pathbuf_to_string(path: &PathBuf) -> crate::Result<&str> {\n\n path.to_str()\n\n .ok_or_else(|| KafkaError::InvalidPath { path: path.into() }.into())\n\n}\n", "file_path": "src/kafka.rs", "rank": 23, "score": 223509.35853429112 }, { "content": "/// Merges all fields specified at `merge_fields` from `incoming` to `current`.\n\npub fn merge_log_event(current: &mut LogEvent, mut incoming: LogEvent, merge_fields: &[Atom]) {\n\n for merge_field in merge_fields {\n\n let incoming_val = match incoming.remove(merge_field) {\n\n None => continue,\n\n Some(val) => val,\n\n };\n\n match current.get_mut(merge_field) {\n\n None => current.insert(merge_field, incoming_val),\n\n Some(current_val) => merge_value(current_val, incoming_val),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/event/merge.rs", "rank": 24, "score": 223411.75807598804 }, { "content": "#[must_use]\n\npub fn echo(kube: &Kube, name: &str, message: &str) -> KubePod {\n\n // Start echo\n\n let echo = echo_create(ECHO_YAML, kube, name, message);\n\n\n\n // Wait for success state\n\n kube.wait_for_success(echo.clone());\n\n\n\n echo\n\n}\n\n\n", "file_path": "src/sources/kubernetes/test.rs", "rank": 25, "score": 223035.20016633705 }, { "content": "fn default_host_field() -> Atom {\n\n event::LogSchema::default().host_key().clone()\n\n}\n\n\n\ninventory::submit! {\n\n SinkDescription::new::<HecSinkConfig>(\"splunk_hec\")\n\n}\n\n\n\n#[typetag::serde(name = \"splunk_hec\")]\n\nimpl SinkConfig for HecSinkConfig {\n\n fn build(&self, cx: SinkContext) -> crate::Result<(super::RouterSink, super::Healthcheck)> {\n\n validate_host(&self.host)?;\n\n let healthcheck = healthcheck(&self, cx.resolver())?;\n\n let sink = hec(self.clone(), cx)?;\n\n\n\n Ok((sink, healthcheck))\n\n }\n\n\n\n fn input_type(&self) -> DataType {\n\n DataType::Log\n\n }\n\n\n\n fn sink_type(&self) -> &'static str {\n\n \"splunk_hec\"\n\n }\n\n}\n\n\n", "file_path": "src/sinks/splunk_hec.rs", "rank": 26, "score": 219610.512632505 }, { "content": "fn healthcheck() -> impl Future<Item = (), Error = crate::Error> {\n\n future::ok(())\n\n}\n\n\n\nimpl BlackholeSink {\n\n pub fn new(config: BlackholeConfig, acker: Acker) -> Self {\n\n BlackholeSink {\n\n config,\n\n total_events: 0,\n\n total_raw_bytes: 0,\n\n acker,\n\n }\n\n }\n\n}\n\n\n\nimpl Sink for BlackholeSink {\n\n type SinkItem = Event;\n\n type SinkError = ();\n\n\n\n fn start_send(&mut self, item: Self::SinkItem) -> StartSend<Self::SinkItem, Self::SinkError> {\n", "file_path": "src/sinks/blackhole.rs", "rank": 27, "score": 218680.7800641153 }, { "content": "pub fn client(resolver: Resolver) -> crate::Result<Client> {\n\n let mut http = HttpConnector::new_with_resolver(resolver);\n\n http.enforce_http(false);\n\n\n\n let ssl = SslConnector::builder(SslMethod::tls())?;\n\n let https = HttpsConnector::with_connector(http, ssl)?;\n\n\n\n Ok(HttpClient::from_connector(https))\n\n}\n", "file_path": "src/sinks/util/rusoto.rs", "rank": 28, "score": 215039.74790371783 }, { "content": "pub fn parse(packet: &str) -> Result<Metric, ParseError> {\n\n // https://docs.datadoghq.com/developers/dogstatsd/datagram_shell/#datagram-format\n\n let key_and_body = packet.splitn(2, ':').collect::<Vec<_>>();\n\n if key_and_body.len() != 2 {\n\n return Err(ParseError::Malformed(\n\n \"should be key and body with ':' separator\",\n\n ));\n\n }\n\n let (key, body) = (key_and_body[0], key_and_body[1]);\n\n\n\n let parts = body.split('|').collect::<Vec<_>>();\n\n if parts.len() < 2 {\n\n return Err(ParseError::Malformed(\n\n \"body should have at least two pipe separated components\",\n\n ));\n\n }\n\n\n\n let name = sanitize_key(key);\n\n let metric_type = parts[1];\n\n\n", "file_path": "src/sources/statsd/parser.rs", "rank": 29, "score": 211896.5036570148 }, { "content": "fn truncate_string_at(s: &str, maxlen: usize) -> Cow<str> {\n\n if s.len() >= maxlen {\n\n let mut len = maxlen - ELLIPSIS.len();\n\n while !s.is_char_boundary(len) {\n\n len -= 1;\n\n }\n\n format!(\"{}{}\", &s[..len], ELLIPSIS).into()\n\n } else {\n\n s.into()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::RegexParserConfig;\n\n use crate::event::{LogEvent, Value};\n\n use crate::{\n\n topology::config::{TransformConfig, TransformContext},\n\n Event,\n\n };\n", "file_path": "src/transforms/regex_parser.rs", "rank": 30, "score": 211769.73914967765 }, { "content": "/// Contains several regexes that can parse common forms of pod_uid.\n\n/// On the first message, regexes are tried out one after the other until\n\n/// first succesfull one has been found. After that that regex will be\n\n/// always used.\n\n///\n\n/// If nothing succeeds the message is still passed.\n\nfn transform_pod_uid() -> crate::Result<ApplicableTransform> {\n\n let mut regexes = Vec::new();\n\n\n\n let namespace_regex = r\"(?P<pod_namespace>[0-9a-z.\\-]*)\";\n\n let name_regex = r\"(?P<pod_name>[0-9a-z.\\-]*)\";\n\n // TODO: rename to pod_uid?\n\n let uid_regex = r\"(?P<object_uid>([0-9A-Fa-f]{8}[-][0-9A-Fa-f]{4}[-][0-9A-Fa-f]{4}[-][0-9A-Fa-f]{4}[-][0-9A-Fa-f]{12}|[0-9A-Fa-f]{32}))\";\n\n\n\n // Definition of pod_uid has been well defined since Kubernetes 1.14 with https://github.com/kubernetes/kubernetes/pull/74441\n\n\n\n // Minikube 1.15, MicroK8s 1.15,1.14,1.16 , DigitalOcean 1.16 , Google Kubernetes Engine 1.13, 1.14, EKS 1.14\n\n // format: namespace_name_UID\n\n regexes.push(format!(\n\n \"^{}_{}_{}$\",\n\n namespace_regex, name_regex, uid_regex\n\n ));\n\n\n\n // EKS 1.13 , AKS 1.13.12, MicroK8s 1.13\n\n // If everything else fails, try to at least parse out uid from somewhere.\n\n // This is somewhat robust as UUID format is hard to create by accident\n", "file_path": "src/sources/kubernetes/mod.rs", "rank": 31, "score": 211639.30013261683 }, { "content": "/// Parse a string into a timestamp using one of a set of formats\n\npub fn parse_timestamp(s: &str) -> Result<DateTime<Utc>, Error> {\n\n for format in TIMESTAMP_FORMATS {\n\n if let Ok(result) = Local.datetime_from_str(s, format) {\n\n return Ok(datetime_to_utc(result));\n\n }\n\n }\n\n for format in TIMESTAMP_UTC_FORMATS {\n\n if let Ok(result) = Utc.datetime_from_str(s, format) {\n\n return Ok(result);\n\n }\n\n }\n\n if let Ok(result) = DateTime::parse_from_rfc3339(s) {\n\n return Ok(datetime_to_utc(result));\n\n }\n\n if let Ok(result) = DateTime::parse_from_rfc2822(s) {\n\n return Ok(datetime_to_utc(result));\n\n }\n\n for format in TIMESTAMP_TZ_FORMATS {\n\n if let Ok(result) = DateTime::parse_from_str(s, format) {\n\n return Ok(datetime_to_utc(result));\n", "file_path": "src/types.rs", "rank": 32, "score": 210343.53094405658 }, { "content": "pub fn random_lines(len: usize) -> impl Iterator<Item = String> {\n\n std::iter::repeat(()).map(move |_| random_string(len))\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 33, "score": 207641.41867680458 }, { "content": "pub fn parse(packet: &str) -> Result<Vec<Metric>, ParserError> {\n\n let mut result = Vec::new();\n\n\n\n for (header, group) in group_metrics(packet)? {\n\n // just a header without measurements\n\n if group.is_empty() {\n\n continue;\n\n }\n\n\n\n match header.kind {\n\n ParserType::Counter => {\n\n for line in group {\n\n let metric = parse_metric(&line)?;\n\n let tags = if !metric.tags.is_empty() {\n\n Some(metric.tags)\n\n } else {\n\n None\n\n };\n\n\n\n let counter = Metric {\n", "file_path": "src/sources/prometheus/parser.rs", "rank": 34, "score": 204589.8493877559 }, { "content": "fn to_string(value: impl Serialize) -> String {\n\n let value = serde_json::to_value(&value).unwrap();\n\n value.as_str().unwrap().into()\n\n}\n\n\n", "file_path": "src/sinks/aws_s3.rs", "rank": 35, "score": 201246.9852192796 }, { "content": "fn to_string(value: impl Serialize) -> String {\n\n let value = serde_json::to_value(value).unwrap();\n\n value.as_str().unwrap().into()\n\n}\n\n\n", "file_path": "src/sinks/gcp/cloud_storage.rs", "rank": 36, "score": 198025.85844383138 }, { "content": "pub fn interpolate(input: &str, vars: &HashMap<String, String>) -> String {\n\n let re = Regex::new(r\"\\$\\$|\\$(\\w+)|\\$\\{(\\w+)(?::-([^}]+)?)?\\}\").unwrap();\n\n re.replace_all(input, |caps: &Captures<'_>| {\n\n caps.get(1)\n\n .or_else(|| caps.get(2))\n\n .map(|m| m.as_str())\n\n .map(|name| {\n\n vars.get(name).map(|val| val.as_str()).unwrap_or_else(|| {\n\n caps.get(3).map(|m| m.as_str()).unwrap_or_else(|| {\n\n warn!(\"unknown env var in config: {:?}\", name);\n\n \"\"\n\n })\n\n })\n\n })\n\n .unwrap_or(\"$\")\n\n .to_string()\n\n })\n\n .into_owned()\n\n}\n\n\n", "file_path": "src/topology/config/vars.rs", "rank": 37, "score": 197874.73268505346 }, { "content": "pub trait CheckFieldsPredicate: std::fmt::Debug + Send + Sync {\n\n fn check(&self, e: &Event) -> bool;\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/conditions/check_fields.rs", "rank": 38, "score": 197505.51936554 }, { "content": "/// Serve the metrics server via the address from the metrics controller\n\npub fn serve(addr: &SocketAddr, controller: Controller) -> impl Future<Item = (), Error = ()> {\n\n let make_svc = make_service_fn(move |addr_stream: &AddrStream| {\n\n let connection_addr = addr_stream.remote_addr();\n\n let connection_span = info_span!(\"connection\", addr = field::display(&connection_addr));\n\n let controller = controller.clone();\n\n\n\n service_fn_ok(move |_: Request<Body>| {\n\n connection_span.in_scope(|| {\n\n debug!(message = \"snapshotting metrics.\");\n\n let snapshot = controller.get_snapshot().unwrap();\n\n let output = process_snapshot(snapshot).unwrap();\n\n\n\n trace!(\n\n message = \"sending metrics snapshot output.\",\n\n bytes = output.len()\n\n );\n\n Response::new(Body::from(output))\n\n })\n\n })\n\n });\n\n\n\n Server::bind(&addr)\n\n .serve(make_svc)\n\n .map_err(|e| error!(\"metrics server error: {}\", e))\n\n}\n\n\n", "file_path": "src/metrics.rs", "rank": 39, "score": 197169.1003490748 }, { "content": "pub fn https_client(resolver: Resolver, tls: TlsSettings) -> crate::Result<HttpsClient> {\n\n let https = connector(resolver, tls)?;\n\n Ok(hyper::Client::builder().build(https))\n\n}\n\n\n\nimpl<B> Service<B> for HttpService<B> {\n\n type Response = Response;\n\n type Error = Error;\n\n type Future = Box<dyn Future<Item = Self::Response, Error = Self::Error> + Send + 'static>;\n\n\n\n fn poll_ready(&mut self) -> Poll<(), Self::Error> {\n\n Ok(().into())\n\n }\n\n\n\n fn call(&mut self, body: B) -> Self::Future {\n\n let request = (self.request_builder)(body);\n\n\n\n debug!(message = \"sending request.\");\n\n\n\n let fut = self\n", "file_path": "src/sinks/util/http.rs", "rank": 40, "score": 192999.40595613144 }, { "content": "fn render_template(s: &str, event: &Event) -> Result<String, TransformError> {\n\n let template = Template::from(s);\n\n let name = template.render(&event).map_err(|e| {\n\n TransformError::RenderError(format!(\n\n \"Keys ({:?}) do not exist on the event. Dropping event.\",\n\n e\n\n ))\n\n })?;\n\n Ok(String::from_utf8_lossy(&name.to_vec()).to_string())\n\n}\n\n\n", "file_path": "src/transforms/log_to_metric.rs", "rank": 41, "score": 191523.6984312474 }, { "content": "#[test]\n\nfn topology_swap_transform_is_atomic() {\n\n let mut rt = runtime();\n\n let (in1, source1) = source();\n\n let transform1v1 = transform(\" transformed\", 0.0);\n\n let (out1, sink1) = sink();\n\n\n\n let running = Arc::new(AtomicBool::new(true));\n\n let run_control = running.clone();\n\n\n\n let send_counter = Arc::new(AtomicUsize::new(0));\n\n let recv_counter = Arc::new(AtomicUsize::new(0));\n\n let send_total = send_counter.clone();\n\n let recv_total = recv_counter.clone();\n\n\n\n let events = move || match running.load(Ordering::Acquire) {\n\n true => {\n\n send_counter.fetch_add(1, Ordering::Release);\n\n Some(Event::from(\"this\"))\n\n }\n\n false => None,\n", "file_path": "tests/topology.rs", "rank": 42, "score": 190181.23696530343 }, { "content": "pub fn init(color: bool, json: bool, levels: &str, metrics: Option<metrics::Sink>) {\n\n let dispatch = if json {\n\n let subscriber = FmtSubscriber::builder()\n\n .with_env_filter(levels)\n\n .json()\n\n .flatten_event(true)\n\n .finish()\n\n .with(Limit::default());\n\n\n\n if let Some(sink) = metrics {\n\n Dispatch::new(MetricsSubscriber::new(subscriber, sink))\n\n } else {\n\n Dispatch::new(subscriber)\n\n }\n\n } else {\n\n let subscriber = FmtSubscriber::builder()\n\n .with_ansi(color)\n\n .with_env_filter(levels)\n\n .finish()\n\n .with(Limit::default());\n", "file_path": "src/trace.rs", "rank": 43, "score": 188002.86853152892 }, { "content": "/// Removes / at the start of str\n\nfn remove_slash(s: &str) -> &str {\n\n s.trim_start_matches(\"/\")\n\n}\n\n\n\n#[cfg(all(test, feature = \"docker-integration-tests\"))]\n\nmod tests {\n\n use super::*;\n\n use crate::runtime;\n\n use crate::test_util::{self, collect_n, trace_init};\n\n use futures01::future;\n\n\n\n static BUXYBOX_IMAGE_TAG: &'static str = \"latest\";\n\n\n\n fn pull(image: &str, docker: &Docker, rt: &mut runtime::Runtime) {\n\n let list_option = shiplift::ImageListOptions::builder()\n\n .filter_name(image)\n\n .build();\n\n\n\n if let Ok(images) = rt\n\n .block_on(docker.images().list(&list_option))\n", "file_path": "src/sources/docker.rs", "rank": 44, "score": 186602.73526123576 }, { "content": "pub fn udp(address: SocketAddr, host_key: Atom, out: mpsc::Sender<Event>) -> Source {\n\n let out = out.sink_map_err(|e| error!(\"error sending event: {:?}\", e));\n\n\n\n Box::new(\n\n future::lazy(move || {\n\n let socket = UdpSocket::bind(&address).expect(\"failed to bind to udp listener socket\");\n\n\n\n info!(message = \"listening.\", %address);\n\n\n\n Ok(socket)\n\n })\n\n .and_then(move |socket| {\n\n let host_key = host_key.clone();\n\n // UDP processes messages per packet, where messages are separated by newline.\n\n // And stretch to end of packet.\n\n UdpFramed::with_decode(socket, BytesDelimitedCodec::new(b'\\n'), true)\n\n .map(move |(line, addr): (Bytes, _)| {\n\n let mut event = Event::from(line);\n\n\n\n event\n", "file_path": "src/sources/socket/udp.rs", "rank": 45, "score": 186114.18012693225 }, { "content": "pub fn hec(config: HecSinkConfig, cx: SinkContext) -> crate::Result<super::RouterSink> {\n\n let host = config.host.clone();\n\n let token = config.token.clone();\n\n let host_field = config.host_field;\n\n\n\n let gzip = match config.compression.unwrap_or(Compression::None) {\n\n Compression::None => false,\n\n Compression::Gzip => true,\n\n };\n\n let batch = config.batch.unwrap_or(bytesize::mib(1u64), 1);\n\n let request = config.request.unwrap_with(&REQUEST_DEFAULTS);\n\n let encoding = config.encoding.clone();\n\n\n\n let uri = format!(\"{}/services/collector/event\", host)\n\n .parse::<Uri>()\n\n .context(super::UriParseError)?;\n\n let token = Bytes::from(format!(\"Splunk {}\", token));\n\n\n\n let tls_settings = TlsSettings::from_options(&config.tls)?;\n\n\n", "file_path": "src/sinks/splunk_hec.rs", "rank": 46, "score": 184702.96773878648 }, { "content": "pub trait Transform: Send {\n\n fn transform(&mut self, event: Event) -> Option<Event>;\n\n\n\n fn transform_into(&mut self, output: &mut Vec<Event>, event: Event) {\n\n if let Some(transformed) = self.transform(event) {\n\n output.push(transformed);\n\n }\n\n }\n\n\n\n fn transform_stream(\n\n self: Box<Self>,\n\n input_rx: Receiver<Event>,\n\n ) -> Box<dyn Stream<Item = Event, Error = ()> + Send>\n\n where\n\n Self: 'static,\n\n {\n\n let mut me = self;\n\n Box::new(\n\n input_rx\n\n .map(move |event| {\n\n let mut output = Vec::with_capacity(1);\n\n me.transform_into(&mut output, event);\n\n futures01::stream::iter_ok(output.into_iter())\n\n })\n\n .flatten(),\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/transforms/mod.rs", "rank": 47, "score": 184628.77826876956 }, { "content": "fn insert_fields_from_syslog(event: &mut Event, parsed: Message<&str>) {\n\n let log = event.as_mut_log();\n\n\n\n if let Some(severity) = parsed.severity {\n\n log.insert(\"severity\", severity.as_str());\n\n }\n\n if let Some(facility) = parsed.facility {\n\n log.insert(\"facility\", facility.as_str());\n\n }\n\n if let Protocol::RFC5424(version) = parsed.protocol {\n\n log.insert(\"version\", version as i64);\n\n }\n\n if let Some(app_name) = parsed.appname {\n\n log.insert(\"appname\", app_name);\n\n }\n\n if let Some(msg_id) = parsed.msgid {\n\n log.insert(\"msgid\", msg_id);\n\n }\n\n if let Some(procid) = parsed.procid {\n\n let value: Value = match procid {\n", "file_path": "src/sources/syslog.rs", "rank": 48, "score": 184424.87168007414 }, { "content": "fn default_geoip_target_field() -> String {\n\n \"geoip\".to_string()\n\n}\n\n\n\n#[typetag::serde(name = \"geoip\")]\n\nimpl TransformConfig for GeoipConfig {\n\n fn build(&self, _cx: TransformContext) -> Result<Box<dyn Transform>, crate::Error> {\n\n let reader = maxminddb::Reader::open_readfile(self.database.clone())?;\n\n Ok(Box::new(Geoip::new(\n\n reader,\n\n self.source.clone(),\n\n self.target.clone(),\n\n )))\n\n }\n\n\n\n fn input_type(&self) -> DataType {\n\n DataType::Log\n\n }\n\n\n\n fn output_type(&self) -> DataType {\n", "file_path": "src/transforms/geoip.rs", "rank": 49, "score": 179465.72518294016 }, { "content": "fn encode_map(fields: BTreeMap<Atom, Value>) -> proto::ValueMap {\n\n proto::ValueMap {\n\n fields: fields\n\n .into_iter()\n\n .map(|(key, value)| (key.to_string(), encode_value(value)))\n\n .collect(),\n\n }\n\n}\n\n\n", "file_path": "src/event/mod.rs", "rank": 50, "score": 179152.24524282935 }, { "content": "fn maybe_set_id(key: Option<impl AsRef<str>>, doc: &mut serde_json::Value, event: &Event) {\n\n if let Some(val) = key.and_then(|k| event.as_log().get(&k.as_ref().into())) {\n\n let val = val.to_string_lossy();\n\n\n\n doc.as_object_mut()\n\n .unwrap()\n\n .insert(\"_id\".into(), json!(val));\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::Event;\n\n use serde_json::json;\n\n\n\n #[test]\n\n fn sets_id_from_custom_field() {\n\n let id_key = Some(\"foo\");\n\n let mut event = Event::from(\"butts\");\n", "file_path": "src/sinks/elasticsearch.rs", "rank": 51, "score": 178724.03385371368 }, { "content": "pub fn start(\n\n config: Config,\n\n rt: &mut runtime::Runtime,\n\n require_healthy: bool,\n\n) -> Option<(RunningTopology, mpsc::UnboundedReceiver<()>)> {\n\n validate(&config, rt.executor())\n\n .and_then(|pieces| start_validated(config, pieces, rt, require_healthy))\n\n}\n\n\n", "file_path": "src/topology/mod.rs", "rank": 52, "score": 178329.91294327064 }, { "content": "pub fn open(\n\n data_dir: &Path,\n\n buffer_dir: &Path,\n\n max_size: usize,\n\n) -> Result<(Writer, Reader, super::Acker), Error> {\n\n let path = data_dir.join(buffer_dir);\n\n\n\n // Check data dir\n\n std::fs::metadata(&data_dir)\n\n .map_err(|e| match e.kind() {\n\n io::ErrorKind::PermissionDenied => Error::DataDirNotWritable {\n\n data_dir: data_dir.into(),\n\n },\n\n io::ErrorKind::NotFound => Error::DataDirNotFound {\n\n data_dir: data_dir.into(),\n\n },\n\n _ => Error::DataDirMetadataError {\n\n data_dir: data_dir.into(),\n\n source: e,\n\n },\n", "file_path": "src/buffers/disk.rs", "rank": 53, "score": 178329.91294327064 }, { "content": "pub fn udp(\n\n addr: SocketAddr,\n\n _max_length: usize,\n\n host_key: String,\n\n out: mpsc::Sender<Event>,\n\n) -> super::Source {\n\n let out = out.sink_map_err(|e| error!(\"error sending line: {:?}\", e));\n\n\n\n Box::new(\n\n future::lazy(move || {\n\n let socket = UdpSocket::bind(&addr).expect(\"failed to bind to udp listener socket\");\n\n\n\n info!(\n\n message = \"listening.\",\n\n addr = &field::display(addr),\n\n r#type = \"udp\"\n\n );\n\n\n\n future::ok(socket)\n\n })\n", "file_path": "src/sources/syslog.rs", "rank": 54, "score": 178329.91294327064 }, { "content": "/// Response with body\n\nfn response_json(code: StatusCode, body: impl Serialize) -> Response<Body> {\n\n warp::reply::with_status(warp::reply::json(&body), code).into_response()\n\n}\n\n\n", "file_path": "src/sources/splunk_hec.rs", "rank": 55, "score": 176359.5551688469 }, { "content": "pub fn connector(\n\n resolver: Resolver,\n\n tls_settings: TlsSettings,\n\n) -> crate::Result<HttpsConnector<HttpConnector<Resolver>>> {\n\n let mut http = HttpConnector::new_with_resolver(resolver);\n\n http.enforce_http(false);\n\n\n\n let mut tls = native_tls::TlsConnector::builder();\n\n\n\n tls.use_tls_settings(tls_settings);\n\n\n\n let https = HttpsConnector::from((http, tls.build()?));\n\n\n\n Ok(https)\n\n}\n\n\n", "file_path": "src/sinks/util/http.rs", "rank": 56, "score": 175730.68768278847 }, { "content": "pub fn start_validated(\n\n config: Config,\n\n mut pieces: Pieces,\n\n rt: &mut runtime::Runtime,\n\n require_healthy: bool,\n\n) -> Option<(RunningTopology, mpsc::UnboundedReceiver<()>)> {\n\n let (abort_tx, abort_rx) = mpsc::unbounded();\n\n\n\n let mut running_topology = RunningTopology {\n\n inputs: HashMap::new(),\n\n outputs: HashMap::new(),\n\n config: Config::empty(),\n\n shutdown_triggers: HashMap::new(),\n\n source_tasks: HashMap::new(),\n\n tasks: HashMap::new(),\n\n abort_tx,\n\n };\n\n\n\n if !running_topology.run_healthchecks(&config, &mut pieces, rt, require_healthy) {\n\n return None;\n\n }\n\n\n\n running_topology.spawn_all(config, pieces, rt);\n\n Some((running_topology, abort_rx))\n\n}\n\n\n", "file_path": "src/topology/mod.rs", "rank": 57, "score": 175730.68768278847 }, { "content": "pub fn random_maps(\n\n max_size: usize,\n\n field_len: usize,\n\n) -> impl Iterator<Item = HashMap<String, String>> {\n\n iter::repeat(()).map(move |_| random_map(max_size, field_len))\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 58, "score": 175730.68768278847 }, { "content": "pub fn send_lines(\n\n addr: SocketAddr,\n\n lines: impl Iterator<Item = String>,\n\n) -> impl Future<Item = (), Error = ()> {\n\n let lines = futures01::stream::iter_ok::<_, ()>(lines);\n\n\n\n TcpStream::connect(&addr)\n\n .map_err(|e| panic!(\"{:}\", e))\n\n .and_then(|socket| {\n\n let out =\n\n FramedWrite::new(socket, LinesCodec::new()).sink_map_err(|e| panic!(\"{:?}\", e));\n\n\n\n lines\n\n .forward(out)\n\n .and_then(|(_source, sink)| {\n\n let socket = sink.into_inner().into_inner();\n\n tokio::io::shutdown(socket).map_err(|e| panic!(\"{:}\", e))\n\n })\n\n .map(|_| ())\n\n })\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 59, "score": 175730.68768278847 }, { "content": "pub fn unix(\n\n path: PathBuf,\n\n max_length: usize,\n\n host_key: String,\n\n out: mpsc::Sender<Event>,\n\n) -> Source {\n\n build_unix_source(path, max_length, host_key, out, build_event)\n\n}\n", "file_path": "src/sources/socket/unix.rs", "rank": 60, "score": 175730.68768278847 }, { "content": "pub fn build_pieces(\n\n config: &super::Config,\n\n exec: runtime::TaskExecutor,\n\n) -> Result<(Pieces, Vec<String>), Vec<String>> {\n\n let mut inputs = HashMap::new();\n\n let mut outputs = HashMap::new();\n\n let mut tasks = HashMap::new();\n\n let mut source_tasks = HashMap::new();\n\n let mut healthchecks = HashMap::new();\n\n let mut shutdown_triggers = HashMap::new();\n\n\n\n let mut errors = vec![];\n\n let mut warnings = vec![];\n\n\n\n if config.sources.is_empty() {\n\n return Err(vec![\"No sources defined in the config.\".to_owned()]);\n\n }\n\n if config.sinks.is_empty() {\n\n return Err(vec![\"No sinks defined in the config.\".to_owned()]);\n\n }\n", "file_path": "src/topology/builder.rs", "rank": 61, "score": 175730.68768278847 }, { "content": "pub fn healthcheck(\n\n config: &HecSinkConfig,\n\n resolver: Resolver,\n\n) -> crate::Result<super::Healthcheck> {\n\n let uri = format!(\"{}/services/collector/health/1.0\", config.host)\n\n .parse::<Uri>()\n\n .context(super::UriParseError)?;\n\n\n\n let request = Request::get(uri)\n\n .header(\"Authorization\", format!(\"Splunk {}\", config.token))\n\n .body(Body::empty())\n\n .unwrap();\n\n\n\n let tls = TlsSettings::from_options(&config.tls)?;\n\n let client = https_client(resolver, tls)?;\n\n\n\n let healthcheck = client\n\n .request(request)\n\n .map_err(|err| err.into())\n\n .and_then(|response| match response.status() {\n\n StatusCode::OK => Ok(()),\n\n StatusCode::BAD_REQUEST => Err(HealthcheckError::InvalidToken.into()),\n\n StatusCode::SERVICE_UNAVAILABLE => Err(HealthcheckError::QueuesFull.into()),\n\n other => Err(super::HealthcheckError::UnexpectedStatus { status: other }.into()),\n\n });\n\n\n\n Ok(Box::new(healthcheck))\n\n}\n\n\n", "file_path": "src/sinks/splunk_hec.rs", "rank": 62, "score": 175730.68768278847 }, { "content": "pub fn trace_init() {\n\n let env = std::env::var(\"TEST_LOG\").unwrap_or_else(|_| \"off\".to_string());\n\n\n\n let subscriber = tracing_subscriber::FmtSubscriber::builder()\n\n .with_env_filter(env)\n\n .finish();\n\n\n\n let _ = tracing_log::LogTracer::init();\n\n let _ = tracing::dispatcher::set_global_default(tracing::Dispatch::new(subscriber));\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 63, "score": 175730.68768278847 }, { "content": "/// Helper function to parse a mapping of conversion descriptions into actual Conversion values.\n\npub fn parse_conversion_map(\n\n types: &HashMap<Atom, String>,\n\n) -> Result<HashMap<Atom, Conversion>, ConversionError> {\n\n types\n\n .iter()\n\n .map(|(field, typename)| {\n\n typename\n\n .parse::<Conversion>()\n\n .map(|conv| (field.clone(), conv))\n\n })\n\n .collect()\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq, Snafu)]\n\npub enum Error {\n\n #[snafu(display(\"Invalid boolean value {:?}\", s))]\n\n BoolParseError { s: String },\n\n #[snafu(display(\"Invalid integer {:?}: {}\", s, source))]\n\n IntParseError { s: String, source: ParseIntError },\n\n #[snafu(display(\"Invalid floating point number {:?}: {}\", s, source))]\n", "file_path": "src/types.rs", "rank": 64, "score": 175730.68768278847 }, { "content": "// Use this to map a healthcheck response, as it handles setting up the renewal task.\n\npub fn healthcheck_response(\n\n creds: Option<GcpCredentials>,\n\n not_found_error: crate::Error,\n\n) -> impl FnOnce(http::Response<hyper::Body>) -> crate::Result<()> {\n\n move |response| match response.status() {\n\n StatusCode::OK => {\n\n // If there are credentials configured, the\n\n // generated OAuth token needs to be periodically\n\n // regenerated. Since the health check runs at\n\n // startup, after a successful health check is a\n\n // good place to create the regeneration task.\n\n creds.map(|creds| creds.spawn_regenerate_token());\n\n Ok(())\n\n }\n\n StatusCode::FORBIDDEN => Err(GcpError::InvalidCredentials0.into()),\n\n StatusCode::NOT_FOUND => Err(not_found_error),\n\n status => Err(HealthcheckError::UnexpectedStatus { status }.into()),\n\n }\n\n}\n\n\n", "file_path": "src/sinks/gcp/mod.rs", "rank": 65, "score": 173265.13736671337 }, { "content": "/// Helper function to parse a conversion map and check against a list of names\n\npub fn parse_check_conversion_map(\n\n types: &HashMap<Atom, String>,\n\n names: &[Atom],\n\n) -> Result<HashMap<Atom, Conversion>, ConversionError> {\n\n // Check if any named type references a nonexistent field\n\n let names: HashSet<Atom> = names.iter().map(|s| s.into()).collect();\n\n for name in types.keys() {\n\n if !names.contains(name) {\n\n warn!(\n\n message = \"Field was specified in the types but is not a valid field name.\",\n\n field = &name[..]\n\n );\n\n }\n\n }\n\n\n\n parse_conversion_map(types)\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 66, "score": 173259.47785229067 }, { "content": "pub fn random_events_with_stream(\n\n len: usize,\n\n count: usize,\n\n) -> (Vec<Event>, impl Stream<Item = Event, Error = ()>) {\n\n random_events_with_stream_generic(count, move || Event::from(random_string(len)))\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 67, "score": 173259.47785229067 }, { "content": "pub fn random_lines_with_stream(\n\n len: usize,\n\n count: usize,\n\n) -> (Vec<String>, impl Stream<Item = Event, Error = ()>) {\n\n let lines = (0..count).map(|_| random_string(len)).collect::<Vec<_>>();\n\n let stream = stream::iter_ok(lines.clone().into_iter().map(Event::from));\n\n (lines, stream)\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 68, "score": 173259.47785229067 }, { "content": "pub fn raw_tcp(\n\n host: String,\n\n port: u16,\n\n cx: SinkContext,\n\n encoding: Encoding,\n\n tls: Option<TlsSettings>,\n\n) -> RouterSink {\n\n Box::new(\n\n TcpSink::new(host, port, cx.resolver(), tls)\n\n .stream_ack(cx.acker())\n\n .with_flat_map(move |event| iter_ok(encode_event(event, &encoding))),\n\n )\n\n}\n\n\n", "file_path": "src/sinks/util/tcp.rs", "rank": 69, "score": 173259.47785229067 }, { "content": "pub fn file_source(\n\n config: &FileConfig,\n\n data_dir: PathBuf,\n\n out: mpsc::Sender<Event>,\n\n) -> super::Source {\n\n let (shutdown_tx, shutdown_rx) = std::sync::mpsc::channel();\n\n\n\n let ignore_before = config\n\n .ignore_older\n\n .map(|secs| SystemTime::now() - Duration::from_secs(secs));\n\n let glob_minimum_cooldown = Duration::from_millis(config.glob_minimum_cooldown);\n\n\n\n let file_server = FileServer {\n\n include: config.include.clone(),\n\n exclude: config.exclude.clone(),\n\n max_read_bytes: config.max_read_bytes,\n\n start_at_beginning: config.start_at_beginning,\n\n ignore_before,\n\n max_line_bytes: config.max_line_bytes,\n\n data_dir,\n", "file_path": "src/sources/file/mod.rs", "rank": 70, "score": 173259.47785229067 }, { "content": "pub fn send_lines_tls(\n\n addr: SocketAddr,\n\n host: String,\n\n lines: impl Iterator<Item = String>,\n\n) -> impl Future<Item = (), Error = ()> {\n\n let lines = futures01::stream::iter_ok::<_, ()>(lines);\n\n\n\n let connector: TlsConnector = native_tls::TlsConnector::builder()\n\n .danger_accept_invalid_certs(true)\n\n .danger_accept_invalid_hostnames(true)\n\n .build()\n\n .expect(\"Failed to build TLS connector\")\n\n .into();\n\n\n\n TcpStream::connect(&addr)\n\n .map_err(|e| panic!(\"{:}\", e))\n\n .and_then(move |socket| {\n\n connector\n\n .connect(&host, socket)\n\n .map_err(|e| panic!(\"{:}\", e))\n", "file_path": "src/test_util.rs", "rank": 71, "score": 173259.47785229067 }, { "content": "fn encode_namespace(namespace: &str, name: &str) -> String {\n\n if !namespace.is_empty() {\n\n format!(\"{}_{}\", namespace, name)\n\n } else {\n\n name.to_string()\n\n }\n\n}\n\n\n", "file_path": "src/sinks/prometheus.rs", "rank": 72, "score": 172684.76971889206 }, { "content": "fn event_to_json(event: LogEvent, indexed_fields: &[Atom], timestamp: i64) -> JsonValue {\n\n let fields = indexed_fields\n\n .iter()\n\n .filter_map(|field| event.get(field).map(|value| (field, value.clone())))\n\n .collect::<LogEvent>();\n\n\n\n json!({\n\n \"fields\": fields,\n\n \"event\": event,\n\n \"time\": timestamp\n\n })\n\n}\n\n\n", "file_path": "src/sinks/splunk_hec.rs", "rank": 73, "score": 171111.03781281284 }, { "content": "pub fn random_nested_events_with_stream(\n\n len: usize,\n\n breadth: usize,\n\n depth: usize,\n\n count: usize,\n\n) -> (Vec<Event>, impl Stream<Item = Event, Error = ()>) {\n\n random_events_with_stream_generic(count, move || {\n\n let mut log = Event::new_empty_log().into_log();\n\n\n\n let tree = random_pseudonested_map(len, breadth, depth);\n\n for (k, v) in tree.into_iter() {\n\n log.insert(k, v)\n\n }\n\n\n\n Event::Log(log)\n\n })\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 74, "score": 170907.05334472418 }, { "content": "pub fn build_unix_source(\n\n path: PathBuf,\n\n max_length: usize,\n\n host_key: String,\n\n out: mpsc::Sender<Event>,\n\n build_event: impl Fn(&str, Option<Bytes>, &str) -> Option<Event>\n\n + std::marker::Send\n\n + std::marker::Sync\n\n + std::clone::Clone\n\n + 'static,\n\n) -> Source {\n\n let out = out.sink_map_err(|e| error!(\"error sending line: {:?}\", e));\n\n\n\n Box::new(future::lazy(move || {\n\n let listener = UnixListener::bind(&path).expect(\"failed to bind to listener socket\");\n\n\n\n info!(message = \"listening.\", ?path, r#type = \"unix\");\n\n\n\n listener\n\n .incoming()\n", "file_path": "src/sources/util/unix.rs", "rank": 75, "score": 170907.05334472418 }, { "content": "pub fn build_test_server(\n\n addr: &std::net::SocketAddr,\n\n) -> (\n\n mpsc::Receiver<(http::request::Parts, hyper::Chunk)>,\n\n stream_cancel::Trigger,\n\n impl Future<Item = (), Error = ()>,\n\n) {\n\n let (tx, rx) = mpsc::channel(100);\n\n let service = move || {\n\n let tx = tx.clone();\n\n service_fn_ok(move |req: Request<Body>| {\n\n let (parts, body) = req.into_parts();\n\n\n\n let tx = tx.clone();\n\n tokio::spawn(\n\n body.concat2()\n\n .map_err(|e| panic!(e))\n\n .and_then(|body| tx.send((parts, body)))\n\n .map(|_| ())\n\n .map_err(|e| panic!(e)),\n", "file_path": "src/sinks/util/test.rs", "rank": 76, "score": 170907.05334472418 }, { "content": "fn encode_namespace(namespace: &str, name: &str) -> String {\n\n if !namespace.is_empty() {\n\n format!(\"{}.{}\", namespace, name)\n\n } else {\n\n name.to_string()\n\n }\n\n}\n\n\n", "file_path": "src/sinks/datadog_metrics.rs", "rank": 77, "score": 170813.5128018424 }, { "content": "fn encode_namespace(namespace: &str, name: &str) -> String {\n\n if !namespace.is_empty() {\n\n format!(\"{}.{}\", namespace, name)\n\n } else {\n\n name.to_string()\n\n }\n\n}\n\n\n", "file_path": "src/sinks/influxdb_metrics.rs", "rank": 78, "score": 170813.5128018424 }, { "content": "pub fn runtime() -> Runtime {\n\n Runtime::single_threaded().unwrap()\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 79, "score": 169928.303103285 }, { "content": "pub fn current_span() -> Span {\n\n Span::current()\n\n}\n", "file_path": "src/trace.rs", "rank": 80, "score": 169928.303103285 }, { "content": "pub fn default_true() -> bool {\n\n true\n\n}\n\n\n", "file_path": "src/serde.rs", "rank": 81, "score": 169928.303103285 }, { "content": "pub fn default_false() -> bool {\n\n false\n\n}\n", "file_path": "src/serde.rs", "rank": 82, "score": 169928.303103285 }, { "content": "pub fn collect_n<T>(mut rx: mpsc::Receiver<T>, n: usize) -> impl Future<Item = Vec<T>, Error = ()> {\n\n let mut events = Vec::new();\n\n\n\n future::poll_fn(move || {\n\n while events.len() < n {\n\n let e = try_ready!(rx.poll()).unwrap();\n\n events.push(e);\n\n }\n\n Ok(Async::Ready(mem::replace(&mut events, Vec::new())))\n\n })\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 83, "score": 168835.57804232524 }, { "content": "pub fn random_map(max_size: usize, field_len: usize) -> HashMap<String, String> {\n\n let size = thread_rng().gen_range(0, max_size);\n\n\n\n (0..size)\n\n .map(move |_| (random_string(field_len), random_string(field_len)))\n\n .collect()\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 84, "score": 167955.59727447852 }, { "content": "fn capitalize(s: &str) -> String {\n\n let mut s = s.to_owned();\n\n if let Some(r) = s.get_mut(0..1) {\n\n r.make_ascii_uppercase();\n\n }\n\n s\n\n}\n", "file_path": "src/topology/builder.rs", "rank": 85, "score": 165328.252477873 }, { "content": "pub fn default_host() -> String {\n\n String::from(\"https://api.datadoghq.com\")\n\n}\n\n\n\n// https://docs.datadoghq.com/api/?lang=bash#post-timeseries-points\n", "file_path": "src/sinks/datadog_metrics.rs", "rank": 86, "score": 165104.66876522073 }, { "content": "pub fn next_addr() -> SocketAddr {\n\n use std::net::{IpAddr, Ipv4Addr};\n\n\n\n let port = NEXT_PORT.fetch_add(1, Ordering::AcqRel) as u16;\n\n SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), port)\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 87, "score": 165104.66876522073 }, { "content": "pub fn default_address() -> SocketAddr {\n\n SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 8125)\n\n}\n\n\n\ninventory::submit! {\n\n SinkDescription::new_without_default::<StatsdSinkConfig>(\"statsd\")\n\n}\n\n\n\n#[typetag::serde(name = \"statsd\")]\n\nimpl SinkConfig for StatsdSinkConfig {\n\n fn build(&self, cx: SinkContext) -> crate::Result<(super::RouterSink, super::Healthcheck)> {\n\n let sink = StatsdSvc::new(self.clone(), cx.acker())?;\n\n let healthcheck = StatsdSvc::healthcheck(self.clone())?;\n\n Ok((sink, healthcheck))\n\n }\n\n\n\n fn input_type(&self) -> DataType {\n\n DataType::Metric\n\n }\n\n\n", "file_path": "src/sinks/statsd.rs", "rank": 88, "score": 165104.66876522073 }, { "content": "pub fn default_address() -> SocketAddr {\n\n use std::net::{IpAddr, Ipv4Addr};\n\n\n\n SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 9598)\n\n}\n\n\n", "file_path": "src/sinks/prometheus.rs", "rank": 89, "score": 165104.66876522073 }, { "content": "/// Build the metrics receiver, controller and sink\n\npub fn build() -> (Controller, Sink) {\n\n let mut receiver = Receiver::builder().build();\n\n let controller = receiver.get_controller();\n\n let sink = receiver.get_sink();\n\n\n\n std::thread::spawn(move || {\n\n receiver.run();\n\n });\n\n\n\n (controller, sink)\n\n}\n\n\n", "file_path": "src/metrics.rs", "rank": 90, "score": 164955.35337985947 }, { "content": "fn echo_create(template: &str, kube: &Kube, name: &str, message: &str) -> KubePod {\n\n kube.create(\n\n Api::v1Pod,\n\n template\n\n .replace(ECHO_NAME, name)\n\n .replace(ARGS_MARKER, format!(\"{}\", message).as_str())\n\n .as_str(),\n\n )\n\n}\n\n\n", "file_path": "src/sources/kubernetes/test.rs", "rank": 91, "score": 164565.2354626089 }, { "content": "pub fn default_flush_period_secs() -> u64 {\n\n 60\n\n}\n\n\n\ninventory::submit! {\n\n SinkDescription::new_without_default::<PrometheusSinkConfig>(\"prometheus\")\n\n}\n\n\n\n#[typetag::serde(name = \"prometheus\")]\n\nimpl SinkConfig for PrometheusSinkConfig {\n\n fn build(&self, cx: SinkContext) -> crate::Result<(super::RouterSink, super::Healthcheck)> {\n\n if self.flush_period_secs < MIN_FLUSH_PERIOD_SECS {\n\n return Err(Box::new(BuildError::FlushPeriodTooShort {\n\n min: MIN_FLUSH_PERIOD_SECS,\n\n }));\n\n }\n\n\n\n let sink = Box::new(PrometheusSink::new(self.clone(), cx.acker()));\n\n let healthcheck = Box::new(future::ok(()));\n\n\n", "file_path": "src/sinks/prometheus.rs", "rank": 92, "score": 162862.66598753477 }, { "content": "/// Does the format specifier have a time zone option?\n\nfn format_has_zone(fmt: &str) -> bool {\n\n fmt.find(\"%Z\").is_some()\n\n || fmt.find(\"%z\").is_some()\n\n || fmt.find(\"%:z\").is_some()\n\n || fmt.find(\"%#z\").is_some()\n\n || fmt.find(\"%+\").is_some()\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 93, "score": 162854.9697211588 }, { "content": "pub fn default_scrape_interval_secs() -> u64 {\n\n 15\n\n}\n\n\n\n#[typetag::serde(name = \"prometheus\")]\n\nimpl crate::topology::config::SourceConfig for PrometheusConfig {\n\n fn build(\n\n &self,\n\n _name: &str,\n\n _globals: &GlobalOptions,\n\n out: mpsc::Sender<Event>,\n\n ) -> crate::Result<super::Source> {\n\n let mut urls = Vec::new();\n\n for host in self.hosts.iter() {\n\n let base_uri = host.parse::<Uri>().context(super::UriParseError)?;\n\n urls.push(format!(\"{}metrics\", base_uri));\n\n }\n\n Ok(prometheus(urls, self.scrape_interval_secs, out))\n\n }\n\n\n\n fn output_type(&self) -> crate::topology::config::DataType {\n\n crate::topology::config::DataType::Metric\n\n }\n\n\n\n fn source_type(&self) -> &'static str {\n\n \"prometheus\"\n\n }\n\n}\n\n\n", "file_path": "src/sources/prometheus/mod.rs", "rank": 94, "score": 160723.48850803816 }, { "content": "fn is_comment(input: &str) -> bool {\n\n input.starts_with(\"#\")\n\n}\n\n\n", "file_path": "src/sources/prometheus/parser.rs", "rank": 95, "score": 160500.57192812953 }, { "content": "fn is_header(input: &str) -> bool {\n\n input.starts_with(\"# TYPE\")\n\n}\n\n\n", "file_path": "src/sources/prometheus/parser.rs", "rank": 96, "score": 160500.57192812953 }, { "content": "pub fn shutdown_on_idle(runtime: Runtime) {\n\n block_on(\n\n runtime\n\n .shutdown_on_idle()\n\n .timeout(std::time::Duration::from_secs(10)),\n\n )\n\n .unwrap()\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct CollectCurrent<S>\n\nwhere\n\n S: Stream,\n\n{\n\n stream: Option<S>,\n\n}\n\n\n\nimpl<S: Stream> CollectCurrent<S> {\n\n pub fn new(s: S) -> Self {\n\n Self { stream: Some(s) }\n", "file_path": "src/test_util.rs", "rank": 97, "score": 160131.7190417952 }, { "content": "fn null_reader() -> impl BufRead {\n\n io::Cursor::new(Vec::new())\n\n}\n\n\n", "file_path": "lib/file-source/src/file_watcher.rs", "rank": 98, "score": 159377.03040252748 }, { "content": "fn to_fields(value: f64) -> HashMap<String, Field> {\n\n let fields: HashMap<String, Field> = vec![(\"value\".to_owned(), Field::Float(value))]\n\n .into_iter()\n\n .collect();\n\n fields\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::event::metric::{Metric, MetricKind, MetricValue};\n\n use chrono::offset::TimeZone;\n\n use pretty_assertions::assert_eq;\n\n\n\n fn ts() -> DateTime<Utc> {\n\n Utc.ymd(2018, 11, 14).and_hms_nano(8, 9, 10, 11)\n\n }\n\n\n\n fn tags() -> BTreeMap<String, String> {\n\n vec![\n", "file_path": "src/sinks/influxdb_metrics.rs", "rank": 99, "score": 159288.22036926195 } ]
Rust
src/netlink-generic/message.rs
qinqon/nispor
86e8a54cba6ad8161bfa78fa683bd114a3ed7799
use anyhow::Context; use netlink_packet_core::{ DecodeError, NetlinkDeserializable, NetlinkHeader, NetlinkPayload, NetlinkSerializable, }; use netlink_packet_utils::{ nla::{DefaultNla, NlasIterator}, Emitable, Parseable, ParseableParametrized, }; use crate::{ buffer::GENL_ID_CTRL, CtrlAttr, GenericNetlinkHeader, GenericNetlinkMessageBuffer, }; #[derive(Debug, PartialEq, Eq, Clone)] pub enum GenericNetlinkAttr { Ctrl(Vec<CtrlAttr>), Other(Vec<DefaultNla>), } #[derive(Debug, PartialEq, Eq, Clone)] pub struct GenericNetlinkMessage { pub message_type: u16, pub header: GenericNetlinkHeader, pub nlas: GenericNetlinkAttr, } impl Emitable for GenericNetlinkMessage { fn buffer_len(&self) -> usize { self.header.buffer_len() + match &self.nlas { GenericNetlinkAttr::Ctrl(nlas) => nlas.as_slice().buffer_len(), GenericNetlinkAttr::Other(nlas) => nlas.as_slice().buffer_len(), } } fn emit(&self, buffer: &mut [u8]) { self.header.emit(buffer); match &self.nlas { GenericNetlinkAttr::Ctrl(nlas) => nlas .as_slice() .emit(&mut buffer[self.header.buffer_len()..]), GenericNetlinkAttr::Other(nlas) => nlas .as_slice() .emit(&mut buffer[self.header.buffer_len()..]), } } } impl NetlinkSerializable<GenericNetlinkMessage> for GenericNetlinkMessage { fn message_type(&self) -> u16 { self.message_type } fn buffer_len(&self) -> usize { <Self as Emitable>::buffer_len(self) } fn serialize(&self, buffer: &mut [u8]) { self.emit(buffer) } } impl NetlinkDeserializable<GenericNetlinkMessage> for GenericNetlinkMessage { type Error = DecodeError; fn deserialize( header: &NetlinkHeader, payload: &[u8], ) -> Result<Self, Self::Error> { let buf = GenericNetlinkMessageBuffer::new(payload); GenericNetlinkMessage::parse_with_param(&buf, header.message_type) } } impl<'a, T: AsRef<[u8]> + ?Sized> ParseableParametrized<GenericNetlinkMessageBuffer<&'a T>, u16> for GenericNetlinkMessage { fn parse_with_param( buf: &GenericNetlinkMessageBuffer<&'a T>, message_type: u16, ) -> Result<Self, DecodeError> { let header = GenericNetlinkHeader::parse(buf) .context("failed to parse generic netlink message header")?; match message_type { GENL_ID_CTRL => { match GenericNetlinkMessageBuffer::new_checked(&buf.inner()) { Ok(buf) => Ok(GenericNetlinkMessage { message_type, header, nlas: { let mut nlas = Vec::new(); let error_msg = "failed to parse control message attributes"; for nla in NlasIterator::new(buf.payload()) { let nla = &nla.context(error_msg)?; let parsed = CtrlAttr::parse(nla).context(error_msg)?; nlas.push(parsed); } GenericNetlinkAttr::Ctrl(nlas) }, }), Err(e) => Err(e), } } _ => Err(format!("Unknown message type: {}", message_type).into()), } } } impl From<GenericNetlinkMessage> for NetlinkPayload<GenericNetlinkMessage> { fn from(message: GenericNetlinkMessage) -> Self { NetlinkPayload::InnerMessage(message) } }
use anyhow::Context; use netlink_packet_core::{ DecodeError, NetlinkDeserializable, NetlinkHeader, NetlinkPayload, NetlinkSerializable, }; use netlink_packet_utils::{ nla::{DefaultNla, NlasIterator}, Emitable, Parseable, ParseableParametrized, }; use crate::{ buffer::GENL_ID_CTRL, CtrlAttr, GenericNetlinkHeader, GenericNetlinkMessageBuffer, }; #[derive(Debug, PartialEq, Eq, Clone)] pub enum GenericNetlinkAttr { Ctrl(Vec<CtrlAttr>), Other(Vec<DefaultNla>), } #[derive(Debug, PartialEq, Eq, Clone)] pub struct GenericNetlinkMessage { pub message_type: u16, pub header: GenericNetlinkHeader, pub nlas: GenericNetlinkAttr, } impl Emitable for GenericNetlinkMessage { fn buffer_len(&self) -> usize { self.header.buffer_len() + match &self.nlas { GenericNetlinkAttr::Ctrl(nlas) => nlas.as_slice().buffer_len(), GenericNetlinkAttr::Other(nlas) => nlas.as_slice().buffer_len(), } } fn emit(&self, buffer: &mut [u8]) { self.header.emit(b
} } } impl NetlinkSerializable<GenericNetlinkMessage> for GenericNetlinkMessage { fn message_type(&self) -> u16 { self.message_type } fn buffer_len(&self) -> usize { <Self as Emitable>::buffer_len(self) } fn serialize(&self, buffer: &mut [u8]) { self.emit(buffer) } } impl NetlinkDeserializable<GenericNetlinkMessage> for GenericNetlinkMessage { type Error = DecodeError; fn deserialize( header: &NetlinkHeader, payload: &[u8], ) -> Result<Self, Self::Error> { let buf = GenericNetlinkMessageBuffer::new(payload); GenericNetlinkMessage::parse_with_param(&buf, header.message_type) } } impl<'a, T: AsRef<[u8]> + ?Sized> ParseableParametrized<GenericNetlinkMessageBuffer<&'a T>, u16> for GenericNetlinkMessage { fn parse_with_param( buf: &GenericNetlinkMessageBuffer<&'a T>, message_type: u16, ) -> Result<Self, DecodeError> { let header = GenericNetlinkHeader::parse(buf) .context("failed to parse generic netlink message header")?; match message_type { GENL_ID_CTRL => { match GenericNetlinkMessageBuffer::new_checked(&buf.inner()) { Ok(buf) => Ok(GenericNetlinkMessage { message_type, header, nlas: { let mut nlas = Vec::new(); let error_msg = "failed to parse control message attributes"; for nla in NlasIterator::new(buf.payload()) { let nla = &nla.context(error_msg)?; let parsed = CtrlAttr::parse(nla).context(error_msg)?; nlas.push(parsed); } GenericNetlinkAttr::Ctrl(nlas) }, }), Err(e) => Err(e), } } _ => Err(format!("Unknown message type: {}", message_type).into()), } } } impl From<GenericNetlinkMessage> for NetlinkPayload<GenericNetlinkMessage> { fn from(message: GenericNetlinkMessage) -> Self { NetlinkPayload::InnerMessage(message) } }
uffer); match &self.nlas { GenericNetlinkAttr::Ctrl(nlas) => nlas .as_slice() .emit(&mut buffer[self.header.buffer_len()..]), GenericNetlinkAttr::Other(nlas) => nlas .as_slice() .emit(&mut buffer[self.header.buffer_len()..]),
function_block-random_span
[ { "content": "fn feature_bits_emit(_feature_bits: &[FeatureBit], _buffer: &mut [u8]) {\n\n todo!(\"Does not support changing ethtool feature yet\")\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone)]\n\npub enum FeatureAttr {\n\n Header(Vec<EthtoolHeader>),\n\n Hw(Vec<FeatureBit>),\n\n Wanted(Vec<FeatureBit>),\n\n Active(Vec<FeatureBit>),\n\n NoChange(Vec<FeatureBit>),\n\n Other(DefaultNla),\n\n}\n\n\n\nimpl nla::Nla for FeatureAttr {\n\n fn value_len(&self) -> usize {\n\n match self {\n\n Self::Header(hdrs) => hdrs.as_slice().buffer_len(),\n\n Self::Hw(feature_bits)\n\n | Self::Wanted(feature_bits)\n", "file_path": "src/netlink-ethtool/feature/attr.rs", "rank": 0, "score": 248478.39091525553 }, { "content": "type BondNlaParseFunc = fn(&[u8], &mut BondInfo) -> Result<(), NisporError>;\n\n\n\nconst NLA_PARSE_FUNS: &[BondNlaParseFunc] = &[\n\n parse_void, // IFLA_BOND_UNSPEC\n\n parse_void, // IFLA_BOND_MODE parsed by get_bond_mode()\n\n parse_void, // IFLA_BOND_ACTIVE_SLAVE is deprecated\n\n parse_miimon,\n\n parse_updelay,\n\n parse_downdelay,\n\n parse_use_carrier,\n\n parse_arp_interval,\n\n parse_arp_ip_target,\n\n parse_arp_validate,\n\n parse_arp_all_targets,\n\n parse_primary,\n\n parse_primary_reselect,\n\n parse_fail_over_mac,\n\n parse_xmit_hash_policy,\n\n parse_resend_igmp,\n\n parse_num_peer_notif,\n", "file_path": "src/lib/netlink/bond.rs", "rank": 1, "score": 204455.3627664959 }, { "content": "fn parse_bitset_bits_nla(raw: &[u8]) -> Result<Vec<String>, DecodeError> {\n\n let mut modes = Vec::new();\n\n let error_msg = \"Failed to parse ETHTOOL_A_BITSET_BITS attributes\";\n\n for bit_nla in NlasIterator::new(raw) {\n\n let bit_nla = &bit_nla.context(error_msg)?;\n\n match bit_nla.kind() {\n\n ETHTOOL_A_BITSET_BITS_BIT => {\n\n let error_msg =\n\n \"Failed to parse ETHTOOL_A_BITSET_BITS_BIT attributes\";\n\n let nlas = NlasIterator::new(bit_nla.value());\n\n for nla in nlas {\n\n let nla = &nla.context(error_msg)?;\n\n let payload = nla.value();\n\n match nla.kind() {\n\n ETHTOOL_A_BITSET_BIT_INDEX\n\n | ETHTOOL_A_BITSET_BIT_VALUE => {\n\n // ignored\n\n }\n\n ETHTOOL_A_BITSET_BIT_NAME => {\n\n modes.push(parse_string(payload).context(\n", "file_path": "src/netlink-ethtool/link_mode/attr.rs", "rank": 2, "score": 166715.82662336988 }, { "content": "fn str_to_zero_ended_u8_array(\n\n src_str: &str,\n\n buffer: &mut [u8],\n\n max_size: usize,\n\n) {\n\n if let Ok(src_cstring) = CString::new(src_str.as_bytes()) {\n\n let src_null_ended_str = src_cstring.into_bytes_with_nul();\n\n if src_null_ended_str.len() > max_size {\n\n buffer[..max_size].clone_from_slice(&src_null_ended_str[..max_size])\n\n } else {\n\n buffer[..src_null_ended_str.len()]\n\n .clone_from_slice(&src_null_ended_str)\n\n }\n\n }\n\n}\n", "file_path": "src/netlink-ethtool/header.rs", "rank": 3, "score": 157217.26924565542 }, { "content": "#[allow(clippy::type_complexity)]\n\npub fn new_connection(\n\n family_id: u16,\n\n) -> io::Result<(\n\n Connection<EthtoolMessage>,\n\n EthtoolHandle,\n\n UnboundedReceiver<(NetlinkMessage<EthtoolMessage>, SocketAddr)>,\n\n)> {\n\n let (conn, handle, messages) =\n\n netlink_proto::new_connection(NETLINK_GENERIC)?;\n\n Ok((conn, EthtoolHandle::new(handle, family_id), messages))\n\n}\n", "file_path": "src/netlink-ethtool/connection.rs", "rank": 4, "score": 148458.3259299668 }, { "content": "pub fn clear_network_environment() {\n\n cmd_exec(\"../../tools/test_env\", vec![\"rm\"]);\n\n}\n\n\n", "file_path": "src/lib/tests/utils.rs", "rank": 5, "score": 146642.7732135345 }, { "content": "#[allow(clippy::type_complexity)]\n\npub fn new_connection() -> io::Result<(\n\n Connection<GenericNetlinkMessage>,\n\n GenericNetlinkHandle,\n\n UnboundedReceiver<(NetlinkMessage<GenericNetlinkMessage>, SocketAddr)>,\n\n)> {\n\n let (conn, handle, messages) =\n\n netlink_proto::new_connection(NETLINK_GENERIC)?;\n\n Ok((conn, GenericNetlinkHandle::new(handle), messages))\n\n}\n", "file_path": "src/netlink-generic/connection.rs", "rank": 6, "score": 133365.58987998767 }, { "content": "fn parse_bitset_bits_nlas(raw: &[u8]) -> Result<Vec<String>, DecodeError> {\n\n let error_msg = \"failed to parse mode bit sets\";\n\n for nla in NlasIterator::new(raw) {\n\n let nla = &nla.context(error_msg)?;\n\n if nla.kind() == ETHTOOL_A_BITSET_BITS {\n\n return parse_bitset_bits_nla(nla.value());\n\n }\n\n }\n\n Err(\"No ETHTOOL_A_BITSET_BITS NLA found\".into())\n\n}\n\n\n", "file_path": "src/netlink-ethtool/link_mode/attr.rs", "rank": 7, "score": 131482.76019687613 }, { "content": "pub fn set_network_environment(env_type: &str) {\n\n assert!(cmd_exec(\"../../tools/test_env\", vec![env_type]));\n\n}\n\n\n", "file_path": "src/lib/tests/utils.rs", "rank": 8, "score": 130096.32271961996 }, { "content": "fn parse_as_nested_ipv4_addr(raw: &[u8]) -> Vec<Ipv4Addr> {\n\n let mut addresses = Vec::new();\n\n let nlas = NlasIterator::new(raw);\n\n for nla in nlas {\n\n match nla {\n\n Ok(nla) => addresses.push(parse_as_ipv4(nla.value())),\n\n Err(e) => {\n\n eprintln!(\"{}\", e);\n\n }\n\n }\n\n }\n\n addresses\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 9, "score": 122655.65787075489 }, { "content": "fn _addr_to_string(data: &[u8], family: &AddressFamily) -> String {\n\n match family {\n\n AddressFamily::IPv4 => parse_as_ipv4(data).to_string(),\n\n AddressFamily::IPv6 => parse_as_ipv6(data).to_string(),\n\n _ => format!(\"{:?}\", data),\n\n }\n\n}\n", "file_path": "src/lib/route.rs", "rank": 10, "score": 122009.98613965558 }, { "content": "fn tidy_up(iface_states: &mut HashMap<String, Iface>) {\n\n controller_iface_index_to_name(iface_states);\n\n bond_iface_tidy_up(iface_states);\n\n bridge_iface_tidy_up(iface_states);\n\n vlan_iface_tidy_up(iface_states);\n\n vxlan_iface_tidy_up(iface_states);\n\n veth_iface_tidy_up(iface_states);\n\n vrf_iface_tidy_up(iface_states);\n\n mac_vlan_iface_tidy_up(iface_states);\n\n}\n\n\n", "file_path": "src/lib/ifaces/mod.rs", "rank": 11, "score": 120507.02891587981 }, { "content": "// TODO: Rust offical has std::net::Ipv6Addr::is_unicast_link_local() in\n\n// experimental.\n\nfn is_ipv6_unicast_link_local(ip: &str, prefix: u8) -> bool {\n\n // The unicast link local address range is fe80::/10.\n\n is_ipv6_addr(ip)\n\n && ip.len() >= 3\n\n && [\"fe8\", \"fe9\", \"fea\", \"feb\"].contains(&&ip[..3])\n\n && prefix >= 10\n\n}\n\n\n\nasync fn remove_ip_addr_conf(\n\n handle: &rtnetlink::Handle,\n\n ip_addr_conf: &IpAddrConf,\n\n nl_addr_msg: AddressMessage,\n\n) -> Result<(), NisporError> {\n\n if let Err(rtnetlink::Error::NetlinkError(e)) =\n\n handle.address().del(nl_addr_msg).execute().await\n\n {\n\n eprintln!(\n\n \"Failed to remove IP address {}/{}: {}\",\n\n ip_addr_conf.address, ip_addr_conf.prefix_len, &e\n\n );\n\n Err(parse_apply_netlink_error(&e))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/lib/ip.rs", "rank": 12, "score": 120472.94502856894 }, { "content": "fn parse_use_carrier(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n bond_info.use_carrier = Some(parse_as_u8(data)? > 0);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 13, "score": 119688.28339522178 }, { "content": "fn parse_as_48_bits_mac(data: &[u8]) -> Result<String, NisporError> {\n\n parse_as_mac(6, data)\n\n}\n\n\n\nconst IFLA_BOND_AD_INFO_AGGREGATOR: u16 = 1;\n\nconst IFLA_BOND_AD_INFO_NUM_PORTS: u16 = 2;\n\nconst IFLA_BOND_AD_INFO_ACTOR_KEY: u16 = 3;\n\nconst IFLA_BOND_AD_INFO_PARTNER_KEY: u16 = 4;\n\nconst IFLA_BOND_AD_INFO_PARTNER_MAC: u16 = 5;\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 14, "score": 118995.30012041041 }, { "content": "fn parse_as_bridge_id(data: &[u8]) -> Result<String, NisporError> {\n\n let err_msg = \"wrong index at bridge_id parsing\";\n\n Ok(format!(\n\n \"{:02x}{:02x}.{:02x}{:02x}{:02x}{:02x}{:02x}{:02x}\",\n\n data.get(0)\n\n .ok_or_else(|| NisporError::bug(err_msg.into()))?,\n\n data.get(1)\n\n .ok_or_else(|| NisporError::bug(err_msg.into()))?,\n\n data.get(2)\n\n .ok_or_else(|| NisporError::bug(err_msg.into()))?,\n\n data.get(3)\n\n .ok_or_else(|| NisporError::bug(err_msg.into()))?,\n\n data.get(4)\n\n .ok_or_else(|| NisporError::bug(err_msg.into()))?,\n\n data.get(5)\n\n .ok_or_else(|| NisporError::bug(err_msg.into()))?,\n\n data.get(6)\n\n .ok_or_else(|| NisporError::bug(err_msg.into()))?,\n\n data.get(7)\n\n .ok_or_else(|| NisporError::bug(err_msg.into()))?,\n\n ))\n\n}\n", "file_path": "src/lib/netlink/bridge_port.rs", "rank": 15, "score": 117573.67378644571 }, { "content": "fn parse_vf_stats(raw: &[u8]) -> Result<VfState, NisporError> {\n\n let mut state = VfState::default();\n\n let nlas = NlasIterator::new(raw);\n\n for nla in nlas {\n\n let nla = nla?;\n\n match nla.kind() {\n\n IFLA_VF_STATS_RX_PACKETS => {\n\n state.rx_packets = parse_as_u64(nla.value())?;\n\n }\n\n IFLA_VF_STATS_TX_PACKETS => {\n\n state.tx_packets = parse_as_u64(nla.value())?;\n\n }\n\n IFLA_VF_STATS_RX_BYTES => {\n\n state.rx_bytes = parse_as_u64(nla.value())?;\n\n }\n\n IFLA_VF_STATS_TX_BYTES => {\n\n state.tx_bytes = parse_as_u64(nla.value())?;\n\n }\n\n IFLA_VF_STATS_BROADCAST => {\n\n state.broadcast = parse_as_u64(nla.value())?;\n", "file_path": "src/lib/ifaces/sriov.rs", "rank": 16, "score": 117573.67378644571 }, { "content": "fn get_bond_mode(raw: &[u8]) -> Result<BondMode, NisporError> {\n\n let nlas = NlasIterator::new(raw);\n\n for nla in nlas {\n\n match nla {\n\n Ok(nla) => {\n\n if nla.kind() == IFLA_BOND_MODE {\n\n return Ok(parse_as_u8(nla.value())?.into());\n\n }\n\n }\n\n Err(e) => {\n\n eprintln!(\"{}\", e);\n\n }\n\n }\n\n }\n\n eprintln!(\"Failed to parse bond mode from NLAS: {:?}\", nlas);\n\n Ok(BondMode::Unknown)\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 17, "score": 117573.67378644571 }, { "content": "fn controller_iface_index_to_name(iface_states: &mut HashMap<String, Iface>) {\n\n let mut index_to_name = HashMap::new();\n\n for iface in iface_states.values() {\n\n index_to_name.insert(format!(\"{}\", iface.index), iface.name.clone());\n\n }\n\n for iface in iface_states.values_mut() {\n\n if let Some(controller) = &iface.controller {\n\n if let Some(name) = index_to_name.get(controller) {\n\n iface.controller = Some(name.to_string());\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lib/ifaces/mod.rs", "rank": 18, "score": 116237.82172985215 }, { "content": "// TODO: This is duplicate of bond gen_port_list_of_controller()\n\nfn gen_port_list_of_controller(iface_states: &mut HashMap<String, Iface>) {\n\n let mut controller_ports: HashMap<String, Vec<String>> = HashMap::new();\n\n for iface in iface_states.values() {\n\n if iface.controller_type == Some(ControllerType::Bridge) {\n\n if let Some(controller) = &iface.controller {\n\n match controller_ports.get_mut(controller) {\n\n Some(ports) => ports.push(iface.name.clone()),\n\n None => {\n\n let new_ports: Vec<String> = vec![iface.name.clone()];\n\n controller_ports.insert(controller.clone(), new_ports);\n\n }\n\n };\n\n }\n\n }\n\n }\n\n for (controller, ports) in controller_ports.iter_mut() {\n\n if let Some(controller_iface) = iface_states.get_mut(controller) {\n\n if let Some(ref mut bridge_info) = controller_iface.bridge {\n\n ports.sort();\n\n bridge_info.ports = ports.clone();\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lib/ifaces/bridge.rs", "rank": 19, "score": 116237.82172985215 }, { "content": "fn primary_index_to_iface_name(iface_states: &mut HashMap<String, Iface>) {\n\n let mut index_to_name = HashMap::new();\n\n for iface in iface_states.values() {\n\n index_to_name.insert(format!(\"{}\", iface.index), iface.name.clone());\n\n }\n\n for iface in iface_states.values_mut() {\n\n if iface.iface_type != IfaceType::Bond {\n\n continue;\n\n }\n\n if let Some(ref mut bond_info) = iface.bond {\n\n if let Some(index) = &bond_info.primary {\n\n if let Some(iface_name) = index_to_name.get(index) {\n\n bond_info.primary = Some(iface_name.clone());\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/lib/ifaces/bond.rs", "rank": 20, "score": 116237.82172985215 }, { "content": "fn parse_ad_info(raw: &[u8]) -> Result<BondAdInfo, NisporError> {\n\n let nlas = NlasIterator::new(raw);\n\n let mut ad_info = BondAdInfo::default();\n\n for nla in nlas {\n\n match nla {\n\n Ok(nla) => match nla.kind() {\n\n IFLA_BOND_AD_INFO_AGGREGATOR => {\n\n ad_info.aggregator = parse_as_u16(nla.value())?;\n\n }\n\n IFLA_BOND_AD_INFO_NUM_PORTS => {\n\n ad_info.num_ports = parse_as_u16(nla.value())?;\n\n }\n\n IFLA_BOND_AD_INFO_ACTOR_KEY => {\n\n ad_info.actor_key = parse_as_u16(nla.value())?;\n\n }\n\n IFLA_BOND_AD_INFO_PARTNER_KEY => {\n\n ad_info.partner_key = parse_as_u16(nla.value())?;\n\n }\n\n IFLA_BOND_AD_INFO_PARTNER_MAC => {\n\n ad_info.partner_mac = parse_as_48_bits_mac(nla.value())?;\n", "file_path": "src/lib/netlink/bond.rs", "rank": 21, "score": 116204.93973047954 }, { "content": "fn parse_bitset_bits_nla(\n\n raw: &[u8],\n\n has_mask: bool,\n\n) -> Result<Vec<FeatureBit>, DecodeError> {\n\n let mut feature_bits = Vec::new();\n\n let error_msg = \"Failed to parse ETHTOOL_A_BITSET_BITS attributes\";\n\n for bit_nla in NlasIterator::new(raw) {\n\n let bit_nla = &bit_nla.context(error_msg)?;\n\n match bit_nla.kind() {\n\n ETHTOOL_A_BITSET_BITS_BIT => {\n\n let error_msg =\n\n \"Failed to parse ETHTOOL_A_BITSET_BITS_BIT attributes\";\n\n let nlas = NlasIterator::new(bit_nla.value());\n\n let mut cur_bit_info = FeatureBit::new(has_mask);\n\n for nla in nlas {\n\n let nla = &nla.context(error_msg)?;\n\n let payload = nla.value();\n\n match nla.kind() {\n\n ETHTOOL_A_BITSET_BIT_INDEX => {\n\n if cur_bit_info.index != 0\n", "file_path": "src/netlink-ethtool/feature/attr.rs", "rank": 22, "score": 116116.29024617036 }, { "content": "fn str_to_zero_ended_u8_array(\n\n src_str: &str,\n\n buffer: &mut [u8],\n\n max_size: usize,\n\n) {\n\n if let Ok(src_cstring) = CString::new(src_str.as_bytes()) {\n\n let src_null_ended_str = src_cstring.into_bytes_with_nul();\n\n if src_null_ended_str.len() > max_size {\n\n buffer[..max_size].clone_from_slice(&src_null_ended_str[..max_size])\n\n } else {\n\n buffer[..src_null_ended_str.len()]\n\n .clone_from_slice(&src_null_ended_str)\n\n }\n\n }\n\n}\n", "file_path": "src/netlink-generic/ctrl.rs", "rank": 23, "score": 116087.68304660241 }, { "content": "fn convert_base_iface_index_to_name(iface_states: &mut HashMap<String, Iface>) {\n\n let mut index_to_name = HashMap::new();\n\n for iface in iface_states.values() {\n\n index_to_name.insert(format!(\"{}\", iface.index), iface.name.clone());\n\n }\n\n for iface in iface_states.values_mut() {\n\n if iface.iface_type != IfaceType::Vlan {\n\n continue;\n\n }\n\n if let Some(ref mut vlan_info) = iface.vlan {\n\n if let Some(base_iface_name) =\n\n index_to_name.get(&vlan_info.base_iface)\n\n {\n\n vlan_info.base_iface = base_iface_name.clone();\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/lib/ifaces/vlan.rs", "rank": 24, "score": 114918.71067525513 }, { "content": "fn convert_back_port_index_to_name(iface_states: &mut HashMap<String, Iface>) {\n\n let mut index_to_name = HashMap::new();\n\n for iface in iface_states.values() {\n\n index_to_name.insert(format!(\"{}\", iface.index), iface.name.clone());\n\n }\n\n for iface in iface_states.values_mut() {\n\n if iface.controller_type != Some(ControllerType::Bridge) {\n\n continue;\n\n }\n\n if let Some(ref mut port_info) = iface.bridge_port {\n\n let index = &port_info.backup_port;\n\n if !index.is_empty() {\n\n if let Some(iface_name) = index_to_name.get(index) {\n\n port_info.backup_port = iface_name.into();\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lib/ifaces/bridge.rs", "rank": 25, "score": 114918.71067525513 }, { "content": "fn convert_base_iface_index_to_name(iface_states: &mut HashMap<String, Iface>) {\n\n let mut index_to_name = HashMap::new();\n\n for iface in iface_states.values() {\n\n index_to_name.insert(format!(\"{}\", iface.index), iface.name.clone());\n\n }\n\n for iface in iface_states.values_mut() {\n\n if iface.iface_type != IfaceType::Vxlan {\n\n continue;\n\n }\n\n if let Some(ref mut vxlan_info) = iface.vxlan {\n\n if let Some(base_iface_name) =\n\n index_to_name.get(&vxlan_info.base_iface)\n\n {\n\n vxlan_info.base_iface = base_iface_name.clone();\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/lib/ifaces/vxlan.rs", "rank": 26, "score": 114918.71067525513 }, { "content": "fn convert_base_iface_index_to_name(iface_states: &mut HashMap<String, Iface>) {\n\n let mut index_to_name = HashMap::new();\n\n for iface in iface_states.values() {\n\n index_to_name.insert(format!(\"{}\", iface.index), iface.name.clone());\n\n }\n\n for iface in iface_states.values_mut() {\n\n if iface.iface_type != IfaceType::MacVlan\n\n && iface.iface_type != IfaceType::MacVtap\n\n {\n\n continue;\n\n }\n\n if let Some(ref mut info) = iface.mac_vlan {\n\n if let Some(base_iface_name) = index_to_name.get(&info.base_iface) {\n\n info.base_iface = base_iface_name.clone();\n\n }\n\n } else if let Some(ref mut info) = iface.mac_vtap {\n\n if let Some(base_iface_name) = index_to_name.get(&info.base_iface) {\n\n info.base_iface = base_iface_name.clone();\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/lib/ifaces/mac_vlan.rs", "rank": 27, "score": 113646.91729304596 }, { "content": "pub fn cmd_exec(command: &str, args: Vec<&str>) -> bool {\n\n let mut proc = Command::new(command);\n\n for argument in args.iter() {\n\n proc.arg(argument);\n\n }\n\n let status = proc.status().expect(\"failed to execute the command\");\n\n\n\n return status.success();\n\n}\n", "file_path": "src/lib/tests/utils.rs", "rank": 28, "score": 113222.37966600734 }, { "content": "fn get_iface_name_from_header(hdrs: &[EthtoolHeader]) -> Option<String> {\n\n for hdr in hdrs {\n\n if let EthtoolHeader::DevName(iface_name) = hdr {\n\n return Some(iface_name.to_string());\n\n }\n\n }\n\n None\n\n}\n", "file_path": "src/lib/ifaces/ethtool.rs", "rank": 29, "score": 109379.11815361159 }, { "content": "fn parse_arg_output_format(matches: &clap::ArgMatches) -> CliOutputType {\n\n match matches.is_present(\"json\") {\n\n true => CliOutputType::Json,\n\n false => CliOutputType::Yaml,\n\n }\n\n}\n\n\n", "file_path": "src/cli/npc.rs", "rank": 30, "score": 108125.16432594071 }, { "content": "fn get_routes(state: &NetState, matches: &clap::ArgMatches) -> CliResult {\n\n let mut routes = state.routes.clone();\n\n\n\n if let Some(iface_name) = matches.value_of(\"dev\") {\n\n routes = routes\n\n .into_iter()\n\n .filter(|route| _is_route_to_specified_dev(route, iface_name))\n\n .collect();\n\n }\n\n\n\n CliResult::Routes(routes)\n\n}\n\n\n", "file_path": "src/cli/npc.rs", "rank": 31, "score": 104812.15004074707 }, { "content": "fn feature_bits_len(_feature_bits: &[FeatureBit]) -> usize {\n\n todo!(\"Does not support changing ethtool feature yet\")\n\n}\n\n\n", "file_path": "src/netlink-ethtool/feature/attr.rs", "rank": 32, "score": 100399.64779869141 }, { "content": "enum CliResult {\n\n Pass,\n\n Brief(Vec<CliIfaceBrief>),\n\n Full(NetState),\n\n Ifaces(Vec<Iface>),\n\n Routes(Vec<Route>),\n\n RouteRules(Vec<RouteRule>),\n\n CliError(CliError),\n\n NisporError(NisporError),\n\n}\n\n\n", "file_path": "src/cli/npc.rs", "rank": 33, "score": 81661.97148873194 }, { "content": "#[derive(PartialEq)]\n\nenum CliOutputType {\n\n Json,\n\n Yaml,\n\n}\n\n\n\nmacro_rules! npc_print {\n\n ($display_func:expr, $data: expr) => {\n\n match $data {\n\n CliResult::Pass => {\n\n process::exit(0);\n\n }\n\n CliResult::Brief(_) => unreachable!(),\n\n CliResult::Full(netstate) => {\n\n writeln!(stdout(), \"{}\", $display_func(&netstate).unwrap())\n\n .ok();\n\n process::exit(0);\n\n }\n\n CliResult::Ifaces(ifaces) => {\n\n writeln!(stdout(), \"{}\", $display_func(&ifaces).unwrap()).ok();\n\n process::exit(0);\n", "file_path": "src/cli/npc.rs", "rank": 34, "score": 80662.4474191789 }, { "content": "#[derive(Serialize, Default)]\n\nstruct CliIfaceBrief {\n\n index: u32,\n\n name: String,\n\n state: IfaceState,\n\n flags: Vec<String>,\n\n mac: String,\n\n permanent_mac: String,\n\n mtu: i64,\n\n ipv4: Vec<String>,\n\n ipv6: Vec<String>,\n\n gw4: Vec<String>,\n\n gw6: Vec<String>,\n\n}\n\n\n\nimpl CliIfaceBrief {\n\n fn list_show(briefs: &[CliIfaceBrief]) -> String {\n\n let mut ret = Vec::new();\n\n for brief in briefs {\n\n ret.push(format!(\n\n \"{}: {}: <{}> state {} mtu {}\",\n", "file_path": "src/cli/npc.rs", "rank": 35, "score": 80640.90942062184 }, { "content": "struct IfaCacheInfo {\n\n ifa_prefered: u32,\n\n ifa_valid: u32,\n\n /*cstamp: u32,\n\n tstamp: u32, */\n\n}\n\n\n", "file_path": "src/lib/netlink/ip.rs", "rank": 36, "score": 79680.81820688139 }, { "content": "fn main() {\n\n let matches = clap_app!(npc =>\n\n (version: crate_version!())\n\n (author: crate_authors!())\n\n (about: \"Nispor CLI\")\n\n (@arg ifname: [INTERFACE_NAME] \"interface name\")\n\n (@arg json: -j --json \"Show in json format\")\n\n (@subcommand iface =>\n\n (@arg json: -j --json \"Show in json format\")\n\n (@arg ifname: [INTERFACE_NAME] \"Show only specified interface\")\n\n (about: \"Show interface\")\n\n )\n\n (@subcommand route =>\n\n (@arg json: -j --json \"Show in json format\")\n\n (@arg dev: -d --dev [OIF] \"Show only route entries with output to the specified interface\")\n\n (about: \"Show routes\")\n\n )\n\n (@subcommand rule =>\n\n (@arg json: -j --json \"Show in json format\")\n\n (about: \"Show routes rules\")\n", "file_path": "src/cli/npc.rs", "rank": 37, "score": 79679.07249503375 }, { "content": "fn get_route(\n\n route_msg: RouteMessage,\n\n ifindex_to_name: &HashMap<String, String>,\n\n) -> Result<Route, NisporError> {\n\n let mut rt = Route::default();\n\n let header = &route_msg.header;\n\n rt.address_family = header.address_family.into();\n\n let src_prefix_len = header.source_prefix_length;\n\n let dst_prefix_len = header.destination_prefix_length;\n\n rt.table = header.table.into();\n\n rt.tos = header.tos;\n\n rt.protocol = header.protocol.into();\n\n rt.scope = header.scope.into();\n\n rt.flags = header.flags.bits();\n\n rt.route_type = header.kind.into();\n\n let family = &rt.address_family;\n\n for nla in &route_msg.nlas {\n\n match nla {\n\n Nla::Destination(ref d) => {\n\n rt.dst = Some(format!(\n", "file_path": "src/lib/route.rs", "rank": 38, "score": 78653.55814736069 }, { "content": "#[derive(Debug, PartialEq, Clone, Default)]\n\nstruct KernelBridgeVlanEntry {\n\n vid: u16,\n\n is_pvid: bool, // is PVID and ingress untagged\n\n is_egress_untagged: bool,\n\n is_range_start: bool,\n\n is_range_end: bool,\n\n}\n\n\n", "file_path": "src/lib/netlink/bridge_vlan.rs", "rank": 39, "score": 77890.23616814878 }, { "content": "// TODO: Use macro to generate function below\n\nfn parse_miimon(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n bond_info.miimon = Some(parse_as_u32(data)?);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 40, "score": 77678.31020588266 }, { "content": "fn filter_iface_state(\n\n cur_state: NetState,\n\n des_iface_names: Vec<String>,\n\n) -> Vec<Iface> {\n\n let mut new_ifaces = Vec::new();\n\n for (iface_name, iface_state) in cur_state.ifaces.iter() {\n\n if des_iface_names.contains(iface_name) {\n\n new_ifaces.push(iface_state.clone());\n\n }\n\n }\n\n new_ifaces\n\n}\n", "file_path": "src/cli/npc.rs", "rank": 41, "score": 77673.87117554527 }, { "content": "fn parse_downdelay(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n bond_info.downdelay = Some(parse_as_u32(data)?);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 42, "score": 77673.87117554527 }, { "content": "fn _addr_to_string(\n\n data: &[u8],\n\n family: &AddressFamily,\n\n) -> Result<String, NisporError> {\n\n match family {\n\n AddressFamily::IPv4 => Ok(parse_as_ipv4(data).to_string()),\n\n AddressFamily::IPv6 => Ok(parse_as_ipv6(data).to_string()),\n\n _ => Err(NisporError {\n\n kind: ErrorKind::NisporBug,\n\n msg: \"The rule is not a valid IPv4 and IPv6 rule\".to_string(),\n\n }),\n\n }\n\n}\n", "file_path": "src/lib/route_rule.rs", "rank": 43, "score": 77673.87117554527 }, { "content": "fn parse_void(\n\n _data: &[u8],\n\n _bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 44, "score": 77673.87117554527 }, { "content": "fn parse_updelay(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n bond_info.updelay = Some(parse_as_u32(data)?);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 45, "score": 77673.87117554527 }, { "content": "fn parse_primary(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n if [\n\n BondMode::ActiveBackup,\n\n BondMode::BalanceAlb,\n\n BondMode::BalanceTlb,\n\n ]\n\n .contains(&bond_info.mode)\n\n {\n\n bond_info.primary = Some(format!(\"{}\", parse_as_u32(data)?));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 46, "score": 77673.87117554527 }, { "content": "fn parse_all_subordinates_active(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n bond_info.all_subordinates_active = Some(parse_as_u8(data)?.into());\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 47, "score": 76737.0068692402 }, { "content": "fn parse_arp_all_targets(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n bond_info.arp_all_targets = Some(parse_as_u32(data)?.into());\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 48, "score": 76737.0068692402 }, { "content": "fn parse_lp_interval(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n if [BondMode::BalanceTlb, BondMode::BalanceAlb].contains(&bond_info.mode) {\n\n bond_info.lp_interval = Some(parse_as_u32(data)?);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 49, "score": 76737.0068692402 }, { "content": "// Once we find a way to load netsimdev kernel module in CI, we can convert this\n\n// to a test\n\nfn main() {\n\n let rt = tokio::runtime::Builder::new_current_thread()\n\n .enable_io()\n\n .build()\n\n .unwrap();\n\n let family_id = rt.block_on(genl_ctrl_resolve_ethtool());\n\n rt.block_on(get_coalesce(family_id, None));\n\n}\n\n\n\nasync fn genl_ctrl_resolve_ethtool() -> u16 {\n\n let (connection, mut handle, _) =\n\n netlink_generic::new_connection().unwrap();\n\n tokio::spawn(connection);\n\n\n\n let family_id = handle.resolve_family_name(\"ethtool\").await.unwrap();\n\n println!(\"Family ID of ethtool is {}\", family_id);\n\n family_id\n\n}\n\n\n\nasync fn get_coalesce(family_id: u16, iface_name: Option<&str>) {\n", "file_path": "src/netlink-ethtool/examples/dump_coalesce.rs", "rank": 50, "score": 76737.0068692402 }, { "content": "fn parse_ipv6_nlas(\n\n nl_msg: &AddressMessage,\n\n) -> Result<(u32, Ipv6AddrInfo), NisporError> {\n\n let iface_index = nl_msg.header.index;\n\n let mut addr = Ipv6AddrInfo {\n\n prefix_len: nl_msg.header.prefix_len,\n\n ..Default::default()\n\n };\n\n\n\n for nla in &nl_msg.nlas {\n\n if let Address(addr_vec) = nla {\n\n addr.address = parse_as_ipv6(addr_vec.as_slice()).to_string();\n\n } else if let CacheInfo(cache_info_vec) = nla {\n\n let cache_info = parse_cache_info(cache_info_vec)?;\n\n addr.preferred_lft = left_time_to_string(cache_info.ifa_prefered);\n\n addr.valid_lft = left_time_to_string(cache_info.ifa_valid);\n\n }\n\n }\n\n\n\n Ok((iface_index, addr))\n\n}\n\n\n", "file_path": "src/lib/netlink/ip.rs", "rank": 51, "score": 76737.0068692402 }, { "content": "fn parse_vf_mac(\n\n raw: &[u8],\n\n mac_len: Option<usize>,\n\n) -> Result<String, NisporError> {\n\n match mac_len {\n\n Some(mac_len) => parse_as_mac(mac_len, raw),\n\n None => parse_as_mac(32, raw),\n\n }\n\n}\n\n\n", "file_path": "src/lib/ifaces/sriov.rs", "rank": 52, "score": 76737.0068692402 }, { "content": "fn parse_cache_info(\n\n cache_info_raw: &[u8],\n\n) -> Result<IfaCacheInfo, NisporError> {\n\n if cache_info_raw.len() != 16 {\n\n panic!(\n\n \"Got invalid ifa_cacheinfo, expect [u8; 32], got {} u8\",\n\n cache_info_raw.len()\n\n );\n\n } else {\n\n // The struct ifa_cacheinfo is storing valid time as second u32\n\n let err_msg = \"wrong index at cache_info_raw parsing\";\n\n Ok(IfaCacheInfo {\n\n ifa_prefered: u32::from_ne_bytes([\n\n *cache_info_raw\n\n .get(0)\n\n .ok_or_else(|| NisporError::bug(err_msg.into()))?,\n\n *cache_info_raw\n\n .get(1)\n\n .ok_or_else(|| NisporError::bug(err_msg.into()))?,\n\n *cache_info_raw\n", "file_path": "src/lib/netlink/ip.rs", "rank": 53, "score": 76737.0068692402 }, { "content": "fn parse_fail_over_mac(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n if bond_info.mode == BondMode::ActiveBackup {\n\n bond_info.fail_over_mac = Some(parse_as_u8(data)?.into());\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 54, "score": 76737.0068692402 }, { "content": "fn parse_ad_select(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n if bond_info.mode == BondMode::Ieee8021AD {\n\n bond_info.ad_select = Some(parse_as_u8(data)?.into());\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 55, "score": 76737.0068692402 }, { "content": "fn parse_min_links(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n if bond_info.mode == BondMode::Ieee8021AD {\n\n bond_info.min_links = Some(parse_as_u32(data)?);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 56, "score": 76737.0068692402 }, { "content": "// TODO: remove the dupcode between parse_ipv4_nlas() and parse_ipv6_nlas()\n\nfn parse_ipv4_nlas(\n\n nl_msg: &AddressMessage,\n\n) -> Result<(u32, Ipv4AddrInfo), NisporError> {\n\n let iface_index = nl_msg.header.index;\n\n let mut addr = Ipv4AddrInfo {\n\n prefix_len: nl_msg.header.prefix_len,\n\n ..Default::default()\n\n };\n\n let mut peer = String::new();\n\n for nla in &nl_msg.nlas {\n\n if let Local(addr_vec) = nla {\n\n addr.address = parse_as_ipv4(addr_vec.as_slice()).to_string();\n\n } else if let Address(addr_vec) = nla {\n\n peer = parse_as_ipv4(addr_vec.as_slice()).to_string();\n\n } else if let CacheInfo(cache_info_vec) = nla {\n\n let cache_info = parse_cache_info(cache_info_vec)?;\n\n addr.preferred_lft = left_time_to_string(cache_info.ifa_prefered);\n\n addr.valid_lft = left_time_to_string(cache_info.ifa_valid);\n\n }\n\n }\n\n\n\n if peer != addr.address {\n\n addr.peer = Some(peer)\n\n }\n\n\n\n Ok((iface_index, addr))\n\n}\n\n\n", "file_path": "src/lib/netlink/ip.rs", "rank": 57, "score": 76737.0068692402 }, { "content": "fn parse_primary_reselect(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n if [\n\n BondMode::ActiveBackup,\n\n BondMode::BalanceAlb,\n\n BondMode::BalanceTlb,\n\n ]\n\n .contains(&bond_info.mode)\n\n {\n\n bond_info.primary_reselect = Some(parse_as_u8(data)?.into());\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 58, "score": 76737.0068692402 }, { "content": "fn parse_resend_igmp(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n if [\n\n BondMode::BalanceRoundRobin,\n\n BondMode::ActiveBackup,\n\n BondMode::BalanceTlb,\n\n BondMode::BalanceAlb,\n\n ]\n\n .contains(&bond_info.mode)\n\n {\n\n bond_info.resend_igmp = Some(parse_as_u32(data)?);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 59, "score": 76737.0068692402 }, { "content": "fn parse_brport_no(\n\n data: &[u8],\n\n cost_info: &mut BridgePortInfo,\n\n) -> Result<(), NisporError> {\n\n cost_info.port_no = format!(\"0x{:x}\", parse_as_u16(data)?);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bridge_port.rs", "rank": 60, "score": 76737.0068692402 }, { "content": "fn parse_arp_validate(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n bond_info.arp_validate = Some(parse_as_u32(data)?.into());\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 61, "score": 76737.0068692402 }, { "content": "fn parse_bridge_id(\n\n priority: u16,\n\n mac: &[u8; 6],\n\n) -> Result<String, NisporError> {\n\n //Following the format of sysfs\n\n let priority_bytes = priority.to_ne_bytes();\n\n Ok(format!(\n\n \"{:02x}{:02x}.{}\",\n\n priority_bytes.get(0).ok_or_else(|| NisporError::bug(\n\n \"wrong index at bridge_id parsing\".into()\n\n ))?,\n\n priority_bytes.get(1).ok_or_else(|| NisporError::bug(\n\n \"wrong index at bridge_id parsing\".into()\n\n ))?,\n\n parse_as_mac(ETH_ALEN, mac)\n\n .expect(\"error when parsing mac address in bridge_id\")\n\n .to_lowercase()\n\n .replace(\":\", \"\")\n\n ))\n\n}\n", "file_path": "src/lib/netlink/bridge.rs", "rank": 62, "score": 76737.0068692402 }, { "content": "// Once we find a way to load netsimdev kernel module in CI, we can convert this\n\n// to a test\n\nfn main() {\n\n let rt = tokio::runtime::Builder::new_current_thread()\n\n .enable_io()\n\n .build()\n\n .unwrap();\n\n let family_id = rt.block_on(genl_ctrl_resolve_ethtool());\n\n rt.block_on(get_feature(family_id, None));\n\n}\n\n\n\nasync fn genl_ctrl_resolve_ethtool() -> u16 {\n\n let (connection, mut handle, _) =\n\n netlink_generic::new_connection().unwrap();\n\n tokio::spawn(connection);\n\n\n\n let family_id = handle.resolve_family_name(\"ethtool\").await.unwrap();\n\n println!(\"Family ID of ethtool is {}\", family_id);\n\n family_id\n\n}\n\n\n\nasync fn get_feature(family_id: u16, iface_name: Option<&str>) {\n", "file_path": "src/netlink-ethtool/examples/dump_features.rs", "rank": 63, "score": 76737.0068692402 }, { "content": "// Once we find a way to load netsimdev kernel module in CI, we can convert this\n\n// to a test\n\nfn main() {\n\n let rt = tokio::runtime::Builder::new_current_thread()\n\n .enable_io()\n\n .build()\n\n .unwrap();\n\n let family_id = rt.block_on(genl_ctrl_resolve_ethtool());\n\n rt.block_on(get_ring(family_id, None));\n\n}\n\n\n\nasync fn genl_ctrl_resolve_ethtool() -> u16 {\n\n let (connection, mut handle, _) =\n\n netlink_generic::new_connection().unwrap();\n\n tokio::spawn(connection);\n\n\n\n let family_id = handle.resolve_family_name(\"ethtool\").await.unwrap();\n\n println!(\"Family ID of ethtool is {}\", family_id);\n\n family_id\n\n}\n\n\n\nasync fn get_ring(family_id: u16, iface_name: Option<&str>) {\n", "file_path": "src/netlink-ethtool/examples/dump_rings.rs", "rank": 64, "score": 76737.0068692402 }, { "content": "fn parse_arp_interval(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n bond_info.arp_interval = Some(parse_as_u32(data)?);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 65, "score": 76737.0068692402 }, { "content": "// Once we find a way to load netsimdev kernel module in CI, we can convert this\n\n// to a test\n\nfn main() {\n\n env_logger::init();\n\n let rt = tokio::runtime::Builder::new_current_thread()\n\n .enable_io()\n\n .build()\n\n .unwrap();\n\n let family_id = rt.block_on(genl_ctrl_resolve_ethtool());\n\n rt.block_on(get_pause(family_id, None));\n\n}\n\n\n\nasync fn genl_ctrl_resolve_ethtool() -> u16 {\n\n let (connection, mut handle, _) =\n\n netlink_generic::new_connection().unwrap();\n\n tokio::spawn(connection);\n\n\n\n let family_id = handle.resolve_family_name(\"ethtool\").await.unwrap();\n\n println!(\"Family ID of ethtool is {}\", family_id);\n\n family_id\n\n}\n\n\n", "file_path": "src/netlink-ethtool/examples/dump_pause.rs", "rank": 66, "score": 76737.0068692402 }, { "content": "fn parse_vlan_info(\n\n data: &[u8],\n\n) -> Result<Option<KernelBridgeVlanEntry>, NisporError> {\n\n if data.len() == 4 {\n\n let flags = u16::from_ne_bytes([\n\n *data.get(0).ok_or_else(|| {\n\n NisporError::bug(\"wrong index at vlan flags\".into())\n\n })?,\n\n *data.get(1).ok_or_else(|| {\n\n NisporError::bug(\"wrong index at vlan flags\".into())\n\n })?,\n\n ]);\n\n let vid = u16::from_ne_bytes([\n\n *data.get(2).ok_or_else(|| {\n\n NisporError::bug(\"wrong index at vlan id\".into())\n\n })?,\n\n *data.get(3).ok_or_else(|| {\n\n NisporError::bug(\"wrong index at vlan id\".into())\n\n })?,\n\n ]);\n", "file_path": "src/lib/netlink/bridge_vlan.rs", "rank": 67, "score": 75840.21757518346 }, { "content": "fn parse_ad_lacp_rate(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n if bond_info.mode == BondMode::Ieee8021AD {\n\n bond_info.lacp_rate = Some(parse_as_u8(data)?.into());\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 68, "score": 75840.21757518346 }, { "content": "fn parse_peer_notif_delay(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n bond_info.peer_notif_delay = Some(parse_as_u32(data)?);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 69, "score": 75840.21757518346 }, { "content": "fn parse_ad_actor_system(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n if bond_info.mode == BondMode::Ieee8021AD {\n\n bond_info.ad_actor_system = Some(parse_as_48_bits_mac(data)?);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 70, "score": 75840.21757518346 }, { "content": "fn parse_brport_mode(\n\n data: &[u8],\n\n port_info: &mut BridgePortInfo,\n\n) -> Result<(), NisporError> {\n\n port_info.hairpin_mode = parse_as_u8(data)? > 0;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bridge_port.rs", "rank": 71, "score": 75840.21757518346 }, { "content": "// Once we find a way to load netsimdev kernel module in CI, we can convert this\n\n// to a test\n\nfn main() {\n\n let rt = tokio::runtime::Builder::new_current_thread()\n\n .enable_io()\n\n .build()\n\n .unwrap();\n\n let family_id = rt.block_on(genl_ctrl_resolve_ethtool());\n\n rt.block_on(get_link_mode(family_id, None));\n\n}\n\n\n\nasync fn genl_ctrl_resolve_ethtool() -> u16 {\n\n let (connection, mut handle, _) =\n\n netlink_generic::new_connection().unwrap();\n\n tokio::spawn(connection);\n\n\n\n let family_id = handle.resolve_family_name(\"ethtool\").await.unwrap();\n\n println!(\"Family ID of ethtool is {}\", family_id);\n\n family_id\n\n}\n\n\n\nasync fn get_link_mode(family_id: u16, iface_name: Option<&str>) {\n", "file_path": "src/netlink-ethtool/examples/dump_link_mode.rs", "rank": 72, "score": 75840.21757518346 }, { "content": "fn parse_brport_isolated(\n\n data: &[u8],\n\n cost_info: &mut BridgePortInfo,\n\n) -> Result<(), NisporError> {\n\n cost_info.isolated = parse_as_u8(data)? > 0;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bridge_port.rs", "rank": 73, "score": 75840.21757518346 }, { "content": "fn parse_brport_id(\n\n data: &[u8],\n\n cost_info: &mut BridgePortInfo,\n\n) -> Result<(), NisporError> {\n\n cost_info.port_id = format!(\"0x{:04x}\", parse_as_u16(data)?);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bridge_port.rs", "rank": 74, "score": 75840.21757518346 }, { "content": "fn parse_brport_state(\n\n data: &[u8],\n\n port_info: &mut BridgePortInfo,\n\n) -> Result<(), NisporError> {\n\n port_info.stp_state = parse_as_u8(data)?.into();\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bridge_port.rs", "rank": 75, "score": 75840.21757518346 }, { "content": "fn gen_subordinate_list_of_controller(\n\n iface_states: &mut HashMap<String, Iface>,\n\n) {\n\n let mut controller_subordinates: HashMap<String, Vec<String>> =\n\n HashMap::new();\n\n for iface in iface_states.values() {\n\n if iface.controller_type == Some(ControllerType::Bond) {\n\n if let Some(controller) = &iface.controller {\n\n match controller_subordinates.get_mut(controller) {\n\n Some(subordinates) => subordinates.push(iface.name.clone()),\n\n None => {\n\n let new_subordinates: Vec<String> =\n\n vec![iface.name.clone()];\n\n controller_subordinates\n\n .insert(controller.clone(), new_subordinates);\n\n }\n\n };\n\n }\n\n }\n\n }\n", "file_path": "src/lib/ifaces/bond.rs", "rank": 76, "score": 75840.21757518346 }, { "content": "fn parse_brport_priority(\n\n data: &[u8],\n\n port_info: &mut BridgePortInfo,\n\n) -> Result<(), NisporError> {\n\n port_info.stp_priority = parse_as_u16(data)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bridge_port.rs", "rank": 77, "score": 75840.21757518346 }, { "content": "#[test]\n\nfn test_get_route_yaml() {\n\n with_route_test_iface(|| {\n\n let state = NetState::retrieve().unwrap();\n\n let mut expected_routes = Vec::new();\n\n for route in state.routes {\n\n if Some(TEST_ROUTE_DST_V4.into()) == route.dst {\n\n expected_routes.push(route)\n\n } else if Some(TEST_ROUTE_DST_V6.into()) == route.dst {\n\n expected_routes.push(route)\n\n }\n\n }\n\n assert_eq!(\n\n serde_yaml::to_string(&expected_routes).unwrap().trim(),\n\n EXPECTED_YAML_OUTPUT\n\n );\n\n });\n\n}\n\n\n", "file_path": "src/lib/tests/route.rs", "rank": 78, "score": 75840.21757518346 }, { "content": "fn ifaces_merge_ethool_infos(\n\n iface_states: &mut HashMap<String, Iface>,\n\n ethtool_infos: &mut HashMap<String, EthtoolInfo>,\n\n) {\n\n for iface in iface_states.values_mut() {\n\n if let Some(ethtool_info) = ethtool_infos.remove(&iface.name) {\n\n iface.ethtool = Some(ethtool_info)\n\n }\n\n }\n\n}\n", "file_path": "src/lib/ifaces/mod.rs", "rank": 79, "score": 75840.21757518346 }, { "content": "fn parse_tlb_dynamic_lb(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n if bond_info.mode == BondMode::BalanceTlb {\n\n bond_info.tlb_dynamic_lb = Some(parse_as_u8(data)? > 0);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 80, "score": 75840.21757518346 }, { "content": "fn ipv4_addr_array_to_string(\n\n addrs: &[Ipv4Addr],\n\n) -> Result<String, NisporError> {\n\n let mut rt = String::new();\n\n for i in 0..(addrs.len()) {\n\n let addr = &addrs.get(i).ok_or_else(|| {\n\n NisporError::bug(\"wrong index at parsing ipv4 as string\".into())\n\n })?;\n\n rt.push_str(&addr.to_string());\n\n if i != addrs.len() - 1 {\n\n rt.push(',');\n\n }\n\n }\n\n Ok(rt)\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 81, "score": 75840.21757518346 }, { "content": "fn merge_vlan_range(\n\n kernel_vlans: &[KernelBridgeVlanEntry],\n\n) -> Vec<BridgeVlanEntry> {\n\n let mut vlans = Vec::new();\n\n let mut vlan_start = None;\n\n for k_vlan in kernel_vlans {\n\n match (k_vlan.is_range_start, k_vlan.is_range_end) {\n\n (true, false) => {\n\n vlan_start = Some(k_vlan.vid);\n\n continue;\n\n }\n\n (false, true) => {\n\n if let Some(start) = vlan_start {\n\n vlans.push(BridgeVlanEntry {\n\n vid: None,\n\n vid_range: Some((start, k_vlan.vid)),\n\n is_pvid: k_vlan.is_pvid,\n\n is_egress_untagged: k_vlan.is_egress_untagged,\n\n })\n\n } else {\n", "file_path": "src/lib/netlink/bridge_vlan.rs", "rank": 82, "score": 75840.21757518346 }, { "content": "fn parse_brport_guard(\n\n data: &[u8],\n\n port_info: &mut BridgePortInfo,\n\n) -> Result<(), NisporError> {\n\n port_info.bpdu_guard = parse_as_u8(data)? > 0;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bridge_port.rs", "rank": 83, "score": 75840.21757518346 }, { "content": "fn parse_brport_cost(\n\n data: &[u8],\n\n port_info: &mut BridgePortInfo,\n\n) -> Result<(), NisporError> {\n\n port_info.stp_path_cost = parse_as_u32(data)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bridge_port.rs", "rank": 84, "score": 75840.21757518346 }, { "content": "fn parse_brport_proxyarp(\n\n data: &[u8],\n\n port_info: &mut BridgePortInfo,\n\n) -> Result<(), NisporError> {\n\n port_info.proxyarp = parse_as_u8(data)? > 0;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bridge_port.rs", "rank": 85, "score": 75840.21757518346 }, { "content": "fn parse_brport_learning(\n\n data: &[u8],\n\n port_info: &mut BridgePortInfo,\n\n) -> Result<(), NisporError> {\n\n port_info.learning = parse_as_u8(data)? > 0;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bridge_port.rs", "rank": 86, "score": 75840.21757518346 }, { "content": "fn parse_num_peer_notif(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n if bond_info.mode == BondMode::ActiveBackup {\n\n bond_info.num_unsol_na = Some(parse_as_u8(data)?);\n\n bond_info.num_grat_arp = Some(parse_as_u8(data)?);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 87, "score": 75840.21757518346 }, { "content": "fn gen_subordinate_list_of_controller(\n\n iface_states: &mut HashMap<String, Iface>,\n\n) {\n\n let mut controller_subordinates: HashMap<String, Vec<String>> =\n\n HashMap::new();\n\n for iface in iface_states.values() {\n\n if iface.controller_type == Some(ControllerType::Vrf) {\n\n if let Some(controller) = &iface.controller {\n\n match controller_subordinates.get_mut(controller) {\n\n Some(subordinates) => subordinates.push(iface.name.clone()),\n\n None => {\n\n let new_subordinates: Vec<String> =\n\n vec![iface.name.clone()];\n\n controller_subordinates\n\n .insert(controller.clone(), new_subordinates);\n\n }\n\n };\n\n }\n\n }\n\n }\n\n for (controller, subordinates) in controller_subordinates.iter_mut() {\n\n if let Some(controller_iface) = iface_states.get_mut(controller) {\n\n if let Some(ref mut vrf_info) = controller_iface.vrf {\n\n subordinates.sort();\n\n vrf_info.subordinates = subordinates.clone();\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/lib/ifaces/vrf.rs", "rank": 88, "score": 75840.21757518346 }, { "content": "#[test]\n\nfn test_add_and_remove_ip() {\n\n with_veth_iface(|| {\n\n let conf: NetConf = serde_yaml::from_str(ADD_IP_CONF).unwrap();\n\n conf.apply().unwrap();\n\n let state = NetState::retrieve().unwrap();\n\n let iface = &state.ifaces[IFACE_NAME];\n\n let iface_type = &iface.iface_type;\n\n assert_eq!(iface_type, &nispor::IfaceType::Veth);\n\n assert_eq!(\n\n serde_yaml::to_string(&iface.ipv4).unwrap().trim(),\n\n EXPECTED_IPV4_INFO\n\n );\n\n assert_eq!(\n\n serde_yaml::to_string(&iface.ipv6).unwrap().trim(),\n\n EXPECTED_IPV6_INFO\n\n );\n\n let conf: NetConf = serde_yaml::from_str(EMPTY_IP_CONF).unwrap();\n\n conf.apply().unwrap();\n\n let state = NetState::retrieve().unwrap();\n\n let iface = &state.ifaces[IFACE_NAME];\n\n let iface_type = &iface.iface_type;\n\n assert_eq!(iface_type, &nispor::IfaceType::Veth);\n\n assert_eq!(iface.ipv4, None);\n\n assert_eq!(\n\n serde_yaml::to_string(&iface.ipv6).unwrap().trim(),\n\n EXPECTED_EMPTY_IPV6_INFO\n\n );\n\n });\n\n}\n", "file_path": "src/lib/tests/ip.rs", "rank": 89, "score": 75840.21757518346 }, { "content": "fn parse_xmit_hash_policy(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n if [\n\n BondMode::BalanceXor,\n\n BondMode::Ieee8021AD,\n\n BondMode::BalanceTlb,\n\n ]\n\n .contains(&bond_info.mode)\n\n {\n\n bond_info.xmit_hash_policy = Some(parse_as_u8(data)?.into());\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 90, "score": 75840.21757518346 }, { "content": "fn parse_brport_protect(\n\n data: &[u8],\n\n port_info: &mut BridgePortInfo,\n\n) -> Result<(), NisporError> {\n\n port_info.root_block = parse_as_u8(data)? > 0;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bridge_port.rs", "rank": 91, "score": 75840.21757518346 }, { "content": "fn parse_packets_per_subordinate(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n if bond_info.mode == BondMode::BalanceRoundRobin {\n\n bond_info.packets_per_subordinate = Some(parse_as_u32(data)?);\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/lib/netlink/bond.rs", "rank": 92, "score": 75840.21757518346 }, { "content": "fn get_iface_name_by_index(\n\n iface_states: &HashMap<String, Iface>,\n\n iface_index: u32,\n\n) -> String {\n\n for (iface_name, iface) in iface_states.iter() {\n\n if iface.index == iface_index {\n\n return iface_name.clone();\n\n }\n\n }\n\n \"\".into()\n\n}\n", "file_path": "src/lib/netlink/ip.rs", "rank": 93, "score": 75840.21757518346 }, { "content": "fn parse_arp_ip_target(\n\n data: &[u8],\n\n bond_info: &mut BondInfo,\n\n) -> Result<(), NisporError> {\n\n bond_info.arp_ip_target =\n\n Some(ipv4_addr_array_to_string(&parse_as_nested_ipv4_addr(data))?);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bond.rs", "rank": 94, "score": 75840.21757518346 }, { "content": "fn parse_brport_proxyarp_wifi(\n\n data: &[u8],\n\n port_info: &mut BridgePortInfo,\n\n) -> Result<(), NisporError> {\n\n port_info.proxyarp_wifi = parse_as_u8(data)? > 0;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bridge_port.rs", "rank": 95, "score": 74980.98578334553 }, { "content": "fn parse_brport_fast_leave(\n\n data: &[u8],\n\n port_info: &mut BridgePortInfo,\n\n) -> Result<(), NisporError> {\n\n port_info.multicast_fast_leave = parse_as_u8(data)? > 0;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bridge_port.rs", "rank": 96, "score": 74980.98578334553 }, { "content": "fn parse_void_port_info(\n\n _data: &[u8],\n\n _port_info: &mut BridgePortInfo,\n\n) -> Result<(), NisporError> {\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bridge_port.rs", "rank": 97, "score": 74980.98578334553 }, { "content": "fn parse_brport_learning_sync(\n\n _data: &[u8],\n\n _port_info: &mut BridgePortInfo,\n\n) -> Result<(), NisporError> {\n\n Ok(()) // Ther kernel 5.7-rc6 never update fill value in br_port_fill_attrs\n\n}\n\n\n", "file_path": "src/lib/netlink/bridge_port.rs", "rank": 98, "score": 74980.98578334553 }, { "content": "fn parse_brport_unicast_flood(\n\n data: &[u8],\n\n port_info: &mut BridgePortInfo,\n\n) -> Result<(), NisporError> {\n\n port_info.unicast_flood = parse_as_u8(data)? > 0;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/netlink/bridge_port.rs", "rank": 99, "score": 74980.98578334553 } ]
Rust
src/response/mod.rs
ferrum-rs/ferrum
2bda0743d84632e95925d687d8cc03ef15e0ecc4
use std::fmt::{self, Debug}; use std::mem::replace; use mime::Mime; use typemap::{TypeMap, TypeMapInner}; use plugin::Extensible; use hyper::{Body, HttpVersion}; use hyper::header::{ContentLength, ContentType, Location, Raw}; use {Plugin, Header, Headers, StatusCode}; pub use hyper::Response as HyperResponse; pub mod content; pub use self::content::*; pub struct Response { pub status: StatusCode, pub headers: Headers, pub body: Option<Body>, pub extensions: TypeMap<TypeMapInner>, } impl Response { #[inline] pub fn new() -> Response { Response { status: Default::default(), headers: Headers::new(), body: None, extensions: TypeMap::custom() } } #[inline] pub fn new_redirect<R: Into<Raw>>(location: R) -> Response { let mut headers = Headers::new(); headers.set(Location::parse_header(&location.into()).unwrap()); Response { status: StatusCode::Found, headers, body: None, extensions: TypeMap::custom() } } #[inline] pub fn with_status(mut self, status: StatusCode) -> Self { self.status = status; self } #[inline] pub fn with_header<H: Header>(mut self, header: H) -> Self { self.headers.set(header); self } #[inline] pub fn with_headers(mut self, headers: Headers) -> Self { self.headers = headers; self } #[inline] pub fn with_body<T: Into<Body>>(mut self, body: T) -> Self { self.body = Some(body.into()); self } #[inline] pub fn with_content<C: Into<Content>>(mut self, content: C, mime: Mime) -> Self { self.set_content(content, mime); self } #[inline] pub fn set_content<C: Into<Content>>(&mut self, content: C, mime: Mime) { let content = content.into(); self.headers.set(ContentType(mime)); self.headers.set(ContentLength(content.len() as u64)); self.body = Some(content.into()); self.status = StatusCode::Ok; } #[inline] pub fn with_mime(mut self, mime: Mime) -> Self { self.set_mime(mime); self } #[inline] pub fn set_mime(&mut self, mime: Mime) { self.headers.set(ContentType(mime)); } } impl From<HyperResponse> for Response { fn from(mut from_response: HyperResponse) -> Response { Response { status: from_response.status(), headers: replace(from_response.headers_mut(), Headers::new()), body: if from_response.body_ref().is_some() { Some(from_response.body()) } else { None }, extensions: TypeMap::custom() } } } impl From<Response> for HyperResponse { fn from(from_response: Response) -> HyperResponse { HyperResponse::new() .with_status(from_response.status) .with_headers(from_response.headers) .with_body(from_response.body.unwrap_or_default()) } } impl Debug for Response { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { writeln!(formatter, "{} {}\n{}", HttpVersion::default(), self.status, self.headers ) } } impl fmt::Display for Response { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { Debug::fmt(self, formatter) } } impl Extensible<TypeMapInner> for Response { fn extensions(&self) -> &TypeMap<TypeMapInner> { &self.extensions } fn extensions_mut(&mut self) -> &mut TypeMap<TypeMapInner> { &mut self.extensions } } impl Plugin for Response {} #[cfg(test)] mod test { use super::*; use hyper::header::{ContentType}; use futures::stream::Stream; use futures::{future, Future}; use mime; use std::str::from_utf8; #[test] fn test_create_response() { let response = Response::new(); assert_eq!(response.status, StatusCode::Ok); assert_eq!(response.headers, Headers::new()); assert!(response.body.is_none()); } #[test] fn test_response_from_hyper_response() { let mut headers = Headers::new(); headers.set(ContentType(mime::TEXT_HTML)); let response = Response::from( HyperResponse::new() .with_status(StatusCode::NotFound) .with_headers(headers.clone()) .with_body("Error") ); assert_eq!(response.status, StatusCode::NotFound); assert_eq!(response.headers, headers); assert!(response.body.is_some()); let body = response.body.unwrap() .concat2() .and_then(|chunk| { future::ok(String::from(from_utf8(&chunk).unwrap())) }) .wait().unwrap(); assert_eq!(body, "Error"); } #[test] fn test_hyper_response_from_response() { let mut headers = Headers::new(); headers.set(ContentType(mime::TEXT_HTML)); let response = HyperResponse::from( Response { status: StatusCode::NotFound, headers: headers.clone(), body: Some("Error".into()), extensions: TypeMap::custom() } ); assert_eq!(response.status(), StatusCode::NotFound); assert_eq!(response.headers(), &headers); assert!(response.body_ref().is_some()); let body = response.body() .concat2() .and_then(|chunk| { future::ok(String::from(from_utf8(&chunk).unwrap())) }) .wait().unwrap(); assert_eq!(body, "Error"); } }
use std::fmt::{self, Debug}; use std::mem::replace; use mime::Mime; use typemap::{TypeMap, TypeMapInner}; use plugin::Extensible; use hyper::{Body, HttpVersion}; use hyper::header::{ContentLength, ContentType, Location, Raw}; use {Plugin, Header, Headers, StatusCode}; pub use hyper::Response as HyperResponse; pub mod content; pub use self::content::*; pub struct Response { pub status: StatusCode, pub headers: Headers, pub body: Option<Body>, pub extensions: TypeMap<TypeMapInner>, } impl Response { #[inline] pub fn new() -> Response { Response { status: Default::default(), headers: Headers::new(), body: None, extensions: TypeMap::custom() } } #[inline] pub fn new_redirect<R: Into<Raw>>(location: R) -> Response { let mut headers = Headers::new(); headers.set(Location::parse_header(&location.into()).unwrap()); Response { status: StatusCode::Found, headers, body: None, extensions: TypeMap::custom() } } #[inline] pub fn with_status(mut self, status: StatusCode) -> Self { self.status = status; self } #[inline] pub fn with_header<H: Header>(mut self, header: H) -> Self { self.headers.set(header); self } #[inline] pub fn with_headers(mut self, headers: Headers) -> Self { self.headers = headers; self } #[inline] pub fn with_body<T: Into<Body>>(mut self, body: T) -> Self { self.body = Some(body.into()); self } #[inline] pub fn with_content<C: Into<Content>>(mut self, content: C, mime: Mime) -> Self { self.set_content(content, mime); self } #[inline]
#[inline] pub fn with_mime(mut self, mime: Mime) -> Self { self.set_mime(mime); self } #[inline] pub fn set_mime(&mut self, mime: Mime) { self.headers.set(ContentType(mime)); } } impl From<HyperResponse> for Response { fn from(mut from_response: HyperResponse) -> Response { Response { status: from_response.status(), headers: replace(from_response.headers_mut(), Headers::new()), body: if from_response.body_ref().is_some() { Some(from_response.body()) } else { None }, extensions: TypeMap::custom() } } } impl From<Response> for HyperResponse { fn from(from_response: Response) -> HyperResponse { HyperResponse::new() .with_status(from_response.status) .with_headers(from_response.headers) .with_body(from_response.body.unwrap_or_default()) } } impl Debug for Response { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { writeln!(formatter, "{} {}\n{}", HttpVersion::default(), self.status, self.headers ) } } impl fmt::Display for Response { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { Debug::fmt(self, formatter) } } impl Extensible<TypeMapInner> for Response { fn extensions(&self) -> &TypeMap<TypeMapInner> { &self.extensions } fn extensions_mut(&mut self) -> &mut TypeMap<TypeMapInner> { &mut self.extensions } } impl Plugin for Response {} #[cfg(test)] mod test { use super::*; use hyper::header::{ContentType}; use futures::stream::Stream; use futures::{future, Future}; use mime; use std::str::from_utf8; #[test] fn test_create_response() { let response = Response::new(); assert_eq!(response.status, StatusCode::Ok); assert_eq!(response.headers, Headers::new()); assert!(response.body.is_none()); } #[test] fn test_response_from_hyper_response() { let mut headers = Headers::new(); headers.set(ContentType(mime::TEXT_HTML)); let response = Response::from( HyperResponse::new() .with_status(StatusCode::NotFound) .with_headers(headers.clone()) .with_body("Error") ); assert_eq!(response.status, StatusCode::NotFound); assert_eq!(response.headers, headers); assert!(response.body.is_some()); let body = response.body.unwrap() .concat2() .and_then(|chunk| { future::ok(String::from(from_utf8(&chunk).unwrap())) }) .wait().unwrap(); assert_eq!(body, "Error"); } #[test] fn test_hyper_response_from_response() { let mut headers = Headers::new(); headers.set(ContentType(mime::TEXT_HTML)); let response = HyperResponse::from( Response { status: StatusCode::NotFound, headers: headers.clone(), body: Some("Error".into()), extensions: TypeMap::custom() } ); assert_eq!(response.status(), StatusCode::NotFound); assert_eq!(response.headers(), &headers); assert!(response.body_ref().is_some()); let body = response.body() .concat2() .and_then(|chunk| { future::ok(String::from(from_utf8(&chunk).unwrap())) }) .wait().unwrap(); assert_eq!(body, "Error"); } }
pub fn set_content<C: Into<Content>>(&mut self, content: C, mime: Mime) { let content = content.into(); self.headers.set(ContentType(mime)); self.headers.set(ContentLength(content.len() as u64)); self.body = Some(content.into()); self.status = StatusCode::Ok; }
function_block-full_function
[ { "content": "struct DefaultContentType;\n\n\n\nimpl AfterMiddleware for DefaultContentType {\n\n // This is run for every requests, AFTER all handlers have been executed\n\n fn after(&self, _: &mut Request, mut response: Response) -> FerrumResult<Response> {\n\n if response.headers.get::<ContentType>() == None {\n\n // Set a standard header\n\n response.headers.set(ContentType(mime::TEXT_PLAIN));\n\n }\n\n Ok(response)\n\n }\n\n}\n\n\n", "file_path": "examples/get_set_headers.rs", "rank": 0, "score": 83106.09361940571 }, { "content": "fn variant1(_: &mut Request) -> FerrumResult<Response> {\n\n Ok(Response::new().with_content(\"{}\", mime::APPLICATION_JSON))\n\n}\n\n\n", "file_path": "examples/content_type.rs", "rank": 1, "score": 78904.33176993506 }, { "content": "fn variant3(_: &mut Request) -> FerrumResult<Response> {\n\n Ok(\n\n Response::new()\n\n .with_status(StatusCode::Ok)\n\n .with_body(\"{}\")\n\n .with_header(ContentLength(\"{}\".len() as u64))\n\n .with_header(ContentType::json())\n\n )\n\n}\n\n\n", "file_path": "examples/content_type.rs", "rank": 2, "score": 78904.33176993506 }, { "content": "fn variant2(_: &mut Request) -> FerrumResult<Response> {\n\n let content_type: mime::Mime = \"application/json\".parse().unwrap();\n\n Ok(Response::new().with_content(\"{}\", content_type))\n\n}\n\n\n", "file_path": "examples/content_type.rs", "rank": 3, "score": 78904.33176993506 }, { "content": "struct LoggerHandler<H: Handler> {\n\n logger: Logger, handler: H\n\n}\n\n\n\nimpl Logger {\n\n fn new(mode: LoggerMode) -> Logger {\n\n Logger { mode }\n\n }\n\n\n\n fn log(&self, request: &Request, response: Result<&Response, &FerrumError>, time: u64) {\n\n match self.mode {\n\n LoggerMode::Silent => {},\n\n LoggerMode::Tiny => println!(\"-> {:?}\\n<- {:?}\\n{}\", request, response, time),\n\n LoggerMode::Large => println!(\"Request: {:?}\\nResponse: {:?}\\nResponse-Time: {}\", request, response, time)\n\n }\n\n }\n\n}\n\n\n\nimpl<H: Handler> Handler for LoggerHandler<H> {\n\n fn handle(&self, request: &mut Request) -> FerrumResult<Response> {\n", "file_path": "examples/around.rs", "rank": 4, "score": 76928.79696240513 }, { "content": "fn info(request: &mut Request) -> FerrumResult<Response> {\n\n // Get a header using a standard ferrum headers\n\n let user_agent = match request.headers.get::<UserAgent>() {\n\n Some(user_agent) => format!(\"User Agent: {}\\n\", user_agent),\n\n None => \"No User Agent\\n\".to_string(),\n\n };\n\n // Get a non-standard header using the raw header\n\n let x_forwarded_for = match request.headers.get_raw(\"X-Forwarded-For\") {\n\n Some(proxies) => format!(\"Proxies: {}\\n\", std::str::from_utf8(&proxies[0]).unwrap()),\n\n None => \"No proxy\\n\".to_string(),\n\n };\n\n let body = format!(\"{}{}\\n\", user_agent, x_forwarded_for);\n\n\n\n Ok(Response::new().with_body(body))\n\n}\n\n\n", "file_path": "examples/get_set_headers.rs", "rank": 5, "score": 74510.48426467423 }, { "content": "// Stub response\n\nfn response() -> Response { Response::new() }\n\n\n", "file_path": "src/middleware/test.rs", "rank": 6, "score": 68801.09839553598 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let variant_index = if args.len() > 1 { args[1].parse().unwrap() } else { 1 };\n\n let handler = match variant_index {\n\n 1 => variant1,\n\n 2 => variant2,\n\n 3 => variant3,\n\n _ => panic!(\"No such variant\"),\n\n };\n\n println!(\"Using variant{}\", variant_index);\n\n Ferrum::new(handler).http(\"localhost:3000\").unwrap();\n\n}\n", "file_path": "examples/content_type.rs", "rank": 7, "score": 65462.33473744609 }, { "content": "fn main() {\n\n let mut chain = Chain::new(info);\n\n chain.link_after(DefaultContentType);\n\n Ferrum::new(chain).http((\"localhost\", 3000)).unwrap();\n\n}\n", "file_path": "examples/get_set_headers.rs", "rank": 8, "score": 63130.04047476698 }, { "content": "/// `AroundMiddleware` are used to wrap and replace the `Handler` in a `Chain`.\n\n///\n\n/// `AroundMiddleware` produce `Handler`s through their `around` method, which is\n\n/// called once on insertion into a `Chain` or can be called manually outside of a\n\n/// `Chain`.\n\npub trait AroundMiddleware {\n\n /// Produce a `Handler` from this `AroundMiddleware` given another `Handler`.\n\n ///\n\n /// Usually this means wrapping the handler and editing the `Request` on the\n\n /// way in and the `Response` on the way out.\n\n ///\n\n /// This is called only once, when an `AroundMiddleware` is added to a `Chain`\n\n /// using `Chain::around`, it is passed the `Chain`'s current `Handler`.\n\n fn around(self, handler: Box<Handler>) -> Box<Handler>;\n\n}\n\n\n\n/// The middleware chain used in Ferrum.\n\n///\n\n/// This is a canonical implementation of Ferrum's middleware system,\n\n/// but Ferrum's infrastructure is flexible enough to allow alternate\n\n/// systems.\n\npub struct Chain {\n\n befores: Vec<Box<BeforeMiddleware>>,\n\n afters: Vec<Box<AfterMiddleware>>,\n\n\n", "file_path": "src/middleware/mod.rs", "rank": 9, "score": 60748.81997837403 }, { "content": "fn hello_world(_: &mut Request) -> FerrumResult<Response> {\n\n Ok(Response::new().with_content(\"Hello World\", mime::TEXT_PLAIN))\n\n}\n\n\n", "file_path": "examples/time.rs", "rank": 10, "score": 59973.605913501495 }, { "content": "fn echo(request: &mut Request) -> FerrumResult<Response> {\n\n Ok(match request.method {\n\n\n\n Method::Post => {\n\n let body = request.take_body().concat2().wait().unwrap();\n\n Response::new()\n\n .with_status(StatusCode::Ok)\n\n .with_body(body)\n\n },\n\n\n\n _ => Response::new()\n\n .with_status(StatusCode::Ok)\n\n .with_body(\"Try POSTing data\"),\n\n })\n\n}\n\n\n", "file_path": "examples/echo.rs", "rank": 11, "score": 59973.605913501495 }, { "content": "fn hello_world(_: &mut Request) -> FerrumResult<Response> {\n\n Ok(Response::new().with_content(\"Hello World!\", mime::TEXT_PLAIN))\n\n}\n\n\n", "file_path": "examples/around.rs", "rank": 12, "score": 59973.605913501495 }, { "content": "/// `AfterMiddleware` are fired after a `Handler` is called inside of a Chain.\n\n///\n\n/// `AfterMiddleware` receive both a `Request` and a `Response` and are responsible for doing\n\n/// any response post-processing.\n\n///\n\n/// `AfterMiddleware` should *not* overwrite the contents of a Response. In the common case,\n\n/// a complete response is generated by the Chain's `Handler` and `AfterMiddleware` simply\n\n/// do post-processing of that Response, such as adding headers or logging.\n\npub trait AfterMiddleware: Send + Sync + 'static {\n\n /// Do whatever post-processing this middleware should do.\n\n fn after(&self, _request: &mut Request, response: Response) -> FerrumResult<Response> {\n\n Ok(response)\n\n }\n\n\n\n /// Respond to an error thrown by previous `AfterMiddleware`, the `Handler`,\n\n /// or a `BeforeMiddleware`.\n\n ///\n\n /// Returning `Ok` will cause the request to resume the normal flow at the\n\n /// next `AfterMiddleware`.\n\n fn catch(&self, _request: &mut Request, error: FerrumError) -> FerrumResult<Response> {\n\n Err(error)\n\n }\n\n}\n\n\n", "file_path": "src/middleware/mod.rs", "rank": 13, "score": 51860.608814515406 }, { "content": "/// `BeforeMiddleware` are fired before a `Handler` is called inside of a Chain.\n\n///\n\n/// `BeforeMiddleware` are responsible for doing request pre-processing that requires\n\n/// the ability to change control-flow, such as authorization middleware, or for editing\n\n/// the request by modifying the headers.\n\n///\n\n/// `BeforeMiddleware` only have access to the Request, if you need to modify or read a Response,\n\n/// you will need `AfterMiddleware`. Middleware which wishes to send an early response that\n\n/// is not an error cannot be `BeforeMiddleware`, but should instead be `AroundMiddleware`.\n\npub trait BeforeMiddleware: Send + Sync + 'static {\n\n /// Do whatever work this middleware should do with a `Request` object.\n\n fn before(&self, _request: &mut Request) -> FerrumResult<()> {\n\n Ok(())\n\n }\n\n\n\n /// Respond to an error thrown by a previous `BeforeMiddleware`.\n\n ///\n\n /// Returning a `Ok` will cause the request to resume the normal flow at the\n\n /// next `BeforeMiddleware`, or if this was the last `BeforeMiddleware`, at the `Handler`.\n\n fn catch(&self, _request: &mut Request, error: FerrumError) -> FerrumResult<()> {\n\n Err(error)\n\n }\n\n}\n\n\n", "file_path": "src/middleware/mod.rs", "rank": 14, "score": 51858.504692415474 }, { "content": "/// `Handler`s are responsible for handling requests by creating Responses from Requests.\n\npub trait Handler: Send + Sync + 'static {\n\n /// Produce a `Response` from a Request, with the possibility of error.\n\n fn handle(&self, request: &mut Request) -> FerrumResult<Response>;\n\n}\n\n\n", "file_path": "src/middleware/mod.rs", "rank": 15, "score": 51856.84918267091 }, { "content": "struct Logger {\n\n mode: LoggerMode\n\n}\n\n\n", "file_path": "examples/around.rs", "rank": 16, "score": 46363.36209882882 }, { "content": "struct ResponseTime;\n\n\n\nimpl typemap::Key for ResponseTime {\n\n type Value = u64;\n\n}\n\n\n\nimpl BeforeMiddleware for ResponseTime {\n\n fn before(&self, request: &mut Request) -> FerrumResult<()> {\n\n request.extensions.insert::<ResponseTime>(precise_time_ns());\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl AfterMiddleware for ResponseTime {\n\n fn after(&self, request: &mut Request, response: Response) -> FerrumResult<Response> {\n\n let delta = precise_time_ns() - *request.extensions.get::<ResponseTime>().unwrap();\n\n println!(\"Request took: {} ms\", (delta as f64) / 1000000.0);\n\n Ok(response)\n\n }\n\n}\n\n\n", "file_path": "examples/time.rs", "rank": 17, "score": 44900.044435547796 }, { "content": "struct Router {\n\n // Routes here are simply matched with the uri path.\n\n routes: HashMap<String, Box<Handler>>\n\n}\n\n\n\nimpl Router {\n\n fn new() -> Self {\n\n Router {\n\n routes: HashMap::new()\n\n }\n\n }\n\n\n\n fn add_route<H>(&mut self, path: &str, handler: H)\n\n where H: Handler\n\n {\n\n self.routes.insert(path.to_string(), Box::new(handler));\n\n }\n\n}\n\n\n\nimpl Handler for Router {\n\n fn handle(&self, request: &mut Request) -> FerrumResult<Response> {\n\n match self.routes.get(request.uri.path()) {\n\n Some(handler) => handler.handle(request),\n\n None => Ok(Response::new().with_status(StatusCode::NotFound).with_body(\"Not found\"))\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/simple_routing.rs", "rank": 18, "score": 44900.044435547796 }, { "content": "struct ErrorProducer;\n\n\n", "file_path": "examples/error.rs", "rank": 19, "score": 44900.044435547796 }, { "content": "struct Middleware {\n\n normal: Arc<AtomicBool>,\n\n error: Arc<AtomicBool>,\n\n mode: Kind\n\n}\n\n\n\nimpl BeforeMiddleware for Middleware {\n\n fn before(&self, _: &mut Request) -> FerrumResult<()> {\n\n assert!(!self.normal.load(Relaxed));\n\n self.normal.store(true, Relaxed);\n\n\n\n match self.mode {\n\n Fine => { Ok(()) },\n\n Prob => { Err(error()) }\n\n }\n\n }\n\n\n\n fn catch(&self, _: &mut Request, _: FerrumError) -> FerrumResult<()> {\n\n assert!(!self.error.load(Relaxed));\n\n self.error.store(true, Relaxed);\n", "file_path": "src/middleware/test.rs", "rank": 20, "score": 44900.044435547796 }, { "content": "struct ErrorHandler;\n", "file_path": "examples/error.rs", "rank": 21, "score": 44900.044435547796 }, { "content": "fn main() {\n\n Ferrum::new(|_: &mut Request| {\n\n Ok(Response::new().with_status(StatusCode::NotFound))\n\n }).http(\"localhost:3000\").unwrap();\n\n}", "file_path": "examples/404.rs", "rank": 22, "score": 44179.28755864003 }, { "content": "fn main() {\n\n Ferrum::new(echo).http(\"localhost:3000\").unwrap();\n\n}\n", "file_path": "examples/echo.rs", "rank": 23, "score": 42580.96037441093 }, { "content": "fn main() {\n\n println!(\"Servers listening on 2000, 3000, and 4000\");\n\n\n\n thread::spawn(|| {\n\n Ferrum::new(\n\n Logger::new(LoggerMode::Silent).around(Box::new(hello_world))\n\n ).http(\"localhost:2000\").unwrap();\n\n });\n\n\n\n thread::spawn(|| {\n\n Ferrum::new(\n\n Logger::new(LoggerMode::Tiny).around(Box::new(hello_world))\n\n ).http(\"localhost:3000\").unwrap();\n\n });\n\n\n\n Ferrum::new(\n\n Logger::new(LoggerMode::Large).around(Box::new(hello_world))\n\n ).http(\"localhost:4000\").unwrap();\n\n}\n", "file_path": "examples/around.rs", "rank": 24, "score": 42580.96037441093 }, { "content": "fn main() {\n\n let mut chain = Chain::new(hello_world);\n\n chain.link_before(ResponseTime);\n\n chain.link_after(ResponseTime);\n\n Ferrum::new(chain).http(\"localhost:3000\").unwrap();\n\n}\n", "file_path": "examples/time.rs", "rank": 25, "score": 42580.96037441093 }, { "content": "fn main() {\n\n // Handler is attached here.\n\n let mut chain = Chain::new(ErrorHandler);\n\n\n\n // Link our error maker.\n\n chain.link_before(ErrorProducer);\n\n\n\n Ferrum::new(chain).http(\"localhost:3000\").unwrap();\n\n}\n", "file_path": "examples/error.rs", "rank": 26, "score": 42580.96037441093 }, { "content": "fn main() {\n\n Ferrum::new(|_: &mut Request| {\n\n Ok(Response::new().with_content(\"Hello world!\", mime::TEXT_PLAIN))\n\n }).http(\"localhost:3000\").unwrap();\n\n}\n", "file_path": "examples/hello.rs", "rank": 27, "score": 42580.96037441093 }, { "content": "fn main() {\n\n Ferrum::new(move |_: &mut Request | {\n\n Ok(Response::new_redirect(\"http://rust-lang.org\"))\n\n }).http(\"localhost:3000\").unwrap();\n\n}\n\n\n", "file_path": "examples/redirect.rs", "rank": 28, "score": 42580.96037441093 }, { "content": "fn main() {\n\n let mut router = Router::new();\n\n\n\n router.add_route(\"/hello\", |_: &mut Request| {\n\n Ok(Response::new().with_status(StatusCode::Ok).with_body(\"Hello world !\"))\n\n });\n\n\n\n router.add_route(\"/hello/again\", |_: &mut Request| {\n\n Ok(Response::new().with_status(StatusCode::Ok).with_body(\"Hello again !\"))\n\n });\n\n\n\n router.add_route(\"/error\", |_: &mut Request| {\n\n Ok(Response::new().with_status(StatusCode::BadRequest).with_body(\"Bad request\"))\n\n });\n\n\n\n Ferrum::new(router).http(\"localhost:3000\").unwrap();\n\n}\n", "file_path": "examples/simple_routing.rs", "rank": 29, "score": 41163.91173860072 }, { "content": "#[derive(Debug)]\n\nstruct StringError(String);\n\n\n\nimpl fmt::Display for StringError {\n\n fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n Debug::fmt(self, formatter)\n\n }\n\n}\n\n\n\nimpl Error for StringError {\n\n fn description(&self) -> &str {\n\n &*self.0\n\n }\n\n}\n\n\n\nimpl Handler for ErrorHandler {\n\n fn handle(&self, _: &mut Request) -> FerrumResult<Response> {\n\n // This is never called!\n\n //\n\n // If a BeforeMiddleware returns an error through Err(...),\n\n // and it is not handled by a subsequent BeforeMiddleware in\n", "file_path": "examples/error.rs", "rank": 30, "score": 41067.992390588486 }, { "content": "pub trait UriPathSegments {\n\n fn path_segments(&self) -> Split<char>;\n\n\n\n fn decoded_path_segments(&self) -> Vec<String>;\n\n}\n\n\n\nimpl UriPathSegments for Uri {\n\n fn path_segments(&self) -> Split<char> {\n\n let path = self.path();\n\n if path.starts_with('/') {\n\n path[1..].split('/')\n\n } else {\n\n path.split('/')\n\n }\n\n }\n\n\n\n fn decoded_path_segments(&self) -> Vec<String> {\n\n self.path_segments().map(|segment| {\n\n match percent_decode(segment.as_bytes()).decode_utf8() {\n\n Ok(decoded) => decoded.to_string(),\n", "file_path": "src/request/uri.rs", "rank": 31, "score": 39429.766469683556 }, { "content": "// Stub request\n\nfn request() -> Request {\n\n Request::stub()\n\n}\n\n\n", "file_path": "src/middleware/test.rs", "rank": 32, "score": 37734.81934344635 }, { "content": "// Stub error\n\nfn error() -> FerrumError {\n\n use std::fmt::{self, Debug, Display};\n\n use std::error::Error as StdError;\n\n\n\n #[derive(Debug)]\n\n struct SomeError;\n\n\n\n impl Display for SomeError {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n Debug::fmt(self, fmt)\n\n }\n\n }\n\n\n\n impl StdError for SomeError {\n\n fn description(&self) -> &str { \"Some Error\" }\n\n }\n\n\n\n FerrumError {\n\n error: Box::new(SomeError),\n\n response: Some(response())\n\n }\n\n}\n\n\n", "file_path": "src/middleware/test.rs", "rank": 33, "score": 36598.717999482375 }, { "content": "fn to_kind(val: bool) -> Kind {\n\n if val { Fine } else { Prob }\n\n}\n\n\n", "file_path": "src/middleware/test.rs", "rank": 34, "score": 32526.375485059267 }, { "content": "fn sharedbool(val: bool) -> Arc<AtomicBool> {\n\n Arc::new(AtomicBool::new(val))\n\n}\n\n\n", "file_path": "src/middleware/test.rs", "rank": 35, "score": 29633.79622560492 }, { "content": "fn to_chain(counters: &ChainLike<Twice<Arc<AtomicBool>>>,\n\n chain: ChainLike<Kind>) -> Chain {\n\n let (befores, handler, afters) = chain;\n\n let (ref beforec, ref handlerc, ref afterc) = *counters;\n\n\n\n let befores = befores.into_iter().zip(beforec.iter())\n\n .map(into_middleware)\n\n .map(|m| Box::new(m) as Box<BeforeMiddleware>)\n\n .collect::<Vec<_>>();\n\n\n\n let handler = into_middleware((handler, handlerc));\n\n\n\n let afters = afters.into_iter().zip(afterc.iter())\n\n .map(into_middleware)\n\n .map(|m| Box::new(m) as Box<AfterMiddleware>)\n\n .collect::<Vec<_>>();\n\n\n\n Chain {\n\n befores: befores,\n\n handler: Some(Box::new(handler) as Box<Handler>),\n\n afters: afters\n\n }\n\n}\n\n\n", "file_path": "src/middleware/test.rs", "rank": 36, "score": 27213.70306536115 }, { "content": "fn into_middleware(input: (Kind, &Twice<Arc<AtomicBool>>)) -> Middleware {\n\n let mode = input.0;\n\n let (ref normal, ref error) = *input.1;\n\n\n\n Middleware {\n\n normal: normal.clone(),\n\n error: error.clone(),\n\n mode: mode\n\n }\n\n}\n\n\n", "file_path": "src/middleware/test.rs", "rank": 37, "score": 26426.103800154782 }, { "content": "//! Ferrum's HTTP response Content representation and associated methods.\n\n\n\nuse std::ops::Deref;\n\nuse std::borrow::Cow;\n\n\n\nuse hyper::Body;\n\n\n\n#[derive(Debug)]\n\npub struct Content(pub Vec<u8>);\n\n\n\nimpl Deref for Content {\n\n type Target = Vec<u8>;\n\n\n\n #[inline]\n\n fn deref(&self) -> &Vec<u8> {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl From<Vec<u8>> for Content {\n", "file_path": "src/response/content.rs", "rank": 38, "score": 25775.20797428006 }, { "content": " match cow {\n\n Cow::Borrowed(b) => Content::from(b),\n\n Cow::Owned(o) => Content::from(o)\n\n }\n\n }\n\n}\n\n\n\nimpl Into<Body> for Content {\n\n #[inline]\n\n fn into(self) -> Body {\n\n self.0.into()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_deref_content() {\n\n let content = Content(vec![1, 2]);\n\n\n\n assert_eq!(vec![1, 2], *content);\n\n assert_eq!(vec![1, 2].len(), content.len());\n\n }\n\n}", "file_path": "src/response/content.rs", "rank": 39, "score": 25772.59980389041 }, { "content": " }\n\n}\n\n\n\nimpl From<String> for Content {\n\n #[inline]\n\n fn from(string: String) -> Content {\n\n Content(string.as_bytes().to_vec())\n\n }\n\n}\n\n\n\nimpl<'a> From<&'a str> for Content {\n\n #[inline]\n\n fn from(slice: &'a str) -> Content {\n\n Content(slice.as_bytes().to_vec())\n\n }\n\n}\n\n\n\nimpl<'a> From<Cow<'a, str>> for Content {\n\n #[inline]\n\n fn from (cow: Cow<'a, str>) -> Content {\n", "file_path": "src/response/content.rs", "rank": 40, "score": 25769.067071603717 }, { "content": " #[inline]\n\n fn from(vec: Vec<u8>) -> Content {\n\n Content(vec)\n\n }\n\n}\n\n\n\nimpl<'a> From<&'a [u8]> for Content {\n\n #[inline]\n\n fn from(slice: &'a [u8]) -> Content {\n\n Content(slice.to_vec())\n\n }\n\n}\n\n\n\nimpl<'a> From<Cow<'a, [u8]>> for Content {\n\n #[inline]\n\n fn from (cow: Cow<'a, [u8]>) -> Content {\n\n match cow {\n\n Cow::Borrowed(b) => Content::from(b),\n\n Cow::Owned(o) => Content::from(o)\n\n }\n", "file_path": "src/response/content.rs", "rank": 41, "score": 25768.585240054235 }, { "content": "extern crate ferrum;\n\n\n\nuse std::env;\n\n\n\nuse ferrum::*;\n\nuse ferrum::header::{ContentLength, ContentType};\n\n\n\n// All these variants do the same thing, with more or less options for customization.\n\n\n", "file_path": "examples/content_type.rs", "rank": 42, "score": 25767.47825220128 }, { "content": "fn test_chain(chain: ChainLike<Kind>, expected: ChainLike<Kind>) {\n\n let actual = counters(&chain);\n\n let chain = to_chain(&actual, chain);\n\n\n\n // Run the chain\n\n let _ = chain.handle(&mut request());\n\n\n\n // Get all the results\n\n let outbefores = actual.0.into_iter()\n\n .map(|(normal, _)| to_kind(normal.load(Relaxed))).collect::<Vec<_>>();\n\n\n\n let outhandler = to_kind((actual.1).0.load(Relaxed));\n\n\n\n let outafters = actual.2.into_iter()\n\n .map(|(normal, _)| to_kind(normal.load(Relaxed))).collect::<Vec<_>>();\n\n\n\n let outchain = (outbefores, outhandler, outafters);\n\n\n\n // Yay! Actually do the test!\n\n assert_eq!(outchain, expected);\n\n}\n", "file_path": "src/middleware/test.rs", "rank": 43, "score": 25159.06509379811 }, { "content": "extern crate ferrum;\n\n\n\nuse ferrum::*;\n\nuse ferrum::header::{ContentType, UserAgent};\n\n\n", "file_path": "examples/get_set_headers.rs", "rank": 44, "score": 24556.64299182226 }, { "content": "fn counters(chain: &ChainLike<Kind>) -> ChainLike<Twice<Arc<AtomicBool>>> {\n\n let (ref befores, _, ref afters) = *chain;\n\n\n\n (\n\n befores.iter()\n\n .map(|_| (sharedbool(false), sharedbool(false)))\n\n .collect::<Vec<_>>(),\n\n\n\n (sharedbool(false), sharedbool(false)),\n\n\n\n afters.iter()\n\n .map(|_| (sharedbool(false), sharedbool(false)))\n\n .collect::<Vec<_>>()\n\n )\n\n}\n\n\n", "file_path": "src/middleware/test.rs", "rank": 51, "score": 23918.20763162488 }, { "content": "//! Ferrum's HTTP Request representation and associated methods.\n\n\n\nuse std::mem;\n\nuse std::net::SocketAddr;\n\nuse std::fmt::{self, Debug};\n\n\n\nuse hyper::{Body, HttpVersion, Uri};\n\n\n\nuse typemap::{TypeMap, TypeMapInner};\n\nuse plugin::Extensible;\n\n\n\npub use hyper::server::Request as HyperRequest;\n\n\n\nuse {Plugin, Headers, Method};\n\n\n\npub mod uri;\n\npub use self::uri::*;\n\n\n\n/// The `Request` given to all `Middleware`.\n\n///\n", "file_path": "src/request/mod.rs", "rank": 54, "score": 23916.585762987273 }, { "content": " headers,\n\n body: Some(body),\n\n extensions: TypeMap::custom(),\n\n _p: (),\n\n }\n\n }\n\n\n\n pub fn take_body(&mut self) -> Body {\n\n let body = mem::replace(&mut self.body, None);\n\n body.unwrap_or_default()\n\n }\n\n\n\n #[cfg(test)]\n\n pub fn stub() -> Request {\n\n use std::net::ToSocketAddrs;\n\n use std::str::FromStr;\n\n\n\n let uri = Uri::from_str(\"http://www.rust-lang.org\").unwrap();\n\n let uri_path_segments = uri.decoded_path_segments();\n\n\n", "file_path": "src/request/mod.rs", "rank": 55, "score": 23915.340438092677 }, { "content": " pub headers: Headers,\n\n\n\n /// The request body.\n\n pub body: Option<Body>,\n\n\n\n /// Extensible storage for data passed between middleware.\n\n pub extensions: TypeMap<TypeMapInner>,\n\n\n\n _p: (),\n\n}\n\n\n\nimpl Debug for Request {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n writeln!(f, \"Request {{\")?;\n\n writeln!(f, \" uri: {:?}\", self.uri)?;\n\n writeln!(f, \" method: {:?}\", self.method)?;\n\n writeln!(f, \" version: {:?}\", self.version)?;\n\n writeln!(f, \" remote_addr: {:?}\", self.remote_addr)?;\n\n write!(f, \"}}\")?;\n\n Ok(())\n", "file_path": "src/request/mod.rs", "rank": 58, "score": 23914.749549076445 }, { "content": " fn extensions_mut(&mut self) -> &mut TypeMap<TypeMapInner> {\n\n &mut self.extensions\n\n }\n\n}\n\n\n\nimpl Plugin for Request {}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use std::net::ToSocketAddrs;\n\n use std::str::FromStr;\n\n\n\n #[test]\n\n fn test_create_request() {\n\n let uri = Uri::from_str(\"http://www.rust-lang.org/foo/bar\").unwrap();\n\n let request = Request::new(\n\n HyperRequest::new(Method::Get, uri.clone())\n\n );\n\n\n", "file_path": "src/request/mod.rs", "rank": 60, "score": 23914.26192242341 }, { "content": " // Internal invariant: this is always Some\n\n handler: Option<Box<Handler>>\n\n}\n\n\n\nimpl Chain {\n\n /// Construct a new ChainBuilder from a `Handler`.\n\n pub fn new<H: Handler>(handler: H) -> Chain {\n\n Chain {\n\n befores: vec![],\n\n afters: vec![],\n\n handler: Some(Box::new(handler) as Box<Handler>)\n\n }\n\n }\n\n\n\n /// Link both a before and after middleware to the chain at once.\n\n ///\n\n /// Middleware that have a Before and After piece should have a constructor\n\n /// which returns both as a tuple, so it can be passed directly to link.\n\n pub fn link<B, A>(&mut self, link: (B, A)) -> &mut Chain\n\n where A: AfterMiddleware, B: BeforeMiddleware\n", "file_path": "src/middleware/mod.rs", "rank": 61, "score": 23912.96077138632 }, { "content": " Request {\n\n uri,\n\n uri_path_segments,\n\n method: Method::Get,\n\n version: HttpVersion::Http11,\n\n remote_addr: Some(\"localhost:3000\".to_socket_addrs().unwrap().next().unwrap()),\n\n headers: Headers::new(),\n\n body: None,\n\n extensions: TypeMap::custom(),\n\n _p: (),\n\n }\n\n }\n\n}\n\n\n\n// Allow plugins to attach to requests.\n\nimpl Extensible<TypeMapInner> for Request {\n\n fn extensions(&self) -> &TypeMap<TypeMapInner> {\n\n &self.extensions\n\n }\n\n\n", "file_path": "src/request/mod.rs", "rank": 62, "score": 23912.825377724304 }, { "content": "//!\n\n//! See the documentation for each middleware for more details.\n\n//!\n\n//! ## Defining the middleware pipeline\n\n//!\n\n//! A `Chain` is a `Handler` that wraps another `Handler`. It is used to attach\n\n//! middleware to the wrapped `Handler` using a `link` method corresponding to\n\n//! each type of middleware. A sample middleware pipeline is shown below:\n\n//!\n\n//! ```rust\n\n//! use ferrum::*;\n\n//!\n\n//! # fn hello_handler(request: &mut Request) -> FerrumResult<Response> {\n\n//! # Ok(Response::new().with_status(StatusCode::Ok).with_body(\"Hello world!\"))\n\n//! # }\n\n//!\n\n//! struct RequestLoggingMiddleware;\n\n//! impl BeforeMiddleware for RequestLoggingMiddleware {\n\n//! fn before(&self, request: &mut Request) -> FerrumResult<()> {\n\n//! println!(\"{:?}\", request);\n", "file_path": "src/middleware/mod.rs", "rank": 64, "score": 23910.73423155679 }, { "content": " }\n\n}\n\n\n\nimpl Request {\n\n /// Create a request from an HyperRequest.\n\n ///\n\n /// This constructor consumes the HyperRequest.\n\n pub fn new(request: HyperRequest) -> Request {\n\n #[allow(deprecated)]\n\n let remote_addr = request.remote_addr();\n\n let (method, uri, version, headers, body) = request.deconstruct();\n\n\n\n let uri_path_segments = uri.decoded_path_segments();\n\n\n\n Request {\n\n uri,\n\n uri_path_segments,\n\n method,\n\n version,\n\n remote_addr,\n", "file_path": "src/request/mod.rs", "rank": 65, "score": 23910.35253471331 }, { "content": " {\n\n let (before, after) = link;\n\n self.befores.push(Box::new(before) as Box<BeforeMiddleware>);\n\n self.afters.push(Box::new(after) as Box<AfterMiddleware>);\n\n self\n\n }\n\n\n\n /// Link a `BeforeMiddleware` to the `Chain`, after all previously linked\n\n /// `BeforeMiddleware`.\n\n pub fn link_before<B>(&mut self, before: B) -> &mut Chain\n\n where B: BeforeMiddleware\n\n {\n\n self.befores.push(Box::new(before) as Box<BeforeMiddleware>);\n\n self\n\n }\n\n\n\n /// Link a `AfterMiddleware` to the `Chain`, after all previously linked\n\n /// `AfterMiddleware`.\n\n pub fn link_after<A>(&mut self, after: A) -> &mut Chain\n\n where A: AfterMiddleware\n", "file_path": "src/middleware/mod.rs", "rank": 66, "score": 23910.317976532093 }, { "content": " where F: Send + Sync + 'static + Fn(&mut Request) -> FerrumResult<Response>\n\n{\n\n fn handle(&self, req: &mut Request) -> FerrumResult<Response> {\n\n (*self)(req)\n\n }\n\n}\n\n\n\nimpl Handler for Box<Handler> {\n\n fn handle(&self, req: &mut Request) -> FerrumResult<Response> {\n\n (**self).handle(req)\n\n }\n\n}\n\n\n\nimpl<F> BeforeMiddleware for F\n\n where F: Send + Sync + 'static + Fn(&mut Request) -> FerrumResult<()>\n\n{\n\n fn before(&self, req: &mut Request) -> FerrumResult<()> {\n\n (*self)(req)\n\n }\n\n}\n", "file_path": "src/middleware/mod.rs", "rank": 67, "score": 23910.026265752964 }, { "content": "\n\nimpl<T> AfterMiddleware for Arc<T> where T: AfterMiddleware {\n\n fn after(&self, req: &mut Request, res: Response) -> FerrumResult<Response> {\n\n (**self).after(req, res)\n\n }\n\n\n\n fn catch(&self, req: &mut Request, err: FerrumError) -> FerrumResult<Response> {\n\n (**self).catch(req, err)\n\n }\n\n}\n\n\n\nimpl<F> AroundMiddleware for F\n\n where F: FnOnce(Box<Handler>) -> Box<Handler>\n\n{\n\n fn around(self, handler: Box<Handler>) -> Box<Handler> {\n\n self(handler)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test;\n", "file_path": "src/middleware/mod.rs", "rank": 68, "score": 23910.019464005614 }, { "content": " {\n\n self.afters.push(Box::new(after) as Box<AfterMiddleware>);\n\n self\n\n }\n\n\n\n /// Apply an `AroundMiddleware` to the `Handler` in this `Chain`.\n\n ///\n\n /// Note: This function is being renamed `link_around()`, and will\n\n /// eventually be removed.\n\n pub fn around<A>(&mut self, around: A) -> &mut Chain\n\n where A: AroundMiddleware\n\n {\n\n self.link_around(around)\n\n }\n\n\n\n /// Apply an `AroundMiddleware` to the `Handler` in this `Chain`.\n\n pub fn link_around<A>(&mut self, around: A) -> &mut Chain\n\n where A: AroundMiddleware\n\n {\n\n let mut handler = self.handler.take().unwrap();\n", "file_path": "src/middleware/mod.rs", "rank": 69, "score": 23909.23724326484 }, { "content": " }\n\n}\n\n\n\nimpl<F> AfterMiddleware for F\n\n where F: Send + Sync + 'static + Fn(&mut Request, Response) -> FerrumResult<Response>\n\n{\n\n fn after(&self, req: &mut Request, res: Response) -> FerrumResult<Response> {\n\n (*self)(req, res)\n\n }\n\n}\n\n\n\nimpl AfterMiddleware for Box<AfterMiddleware> {\n\n fn after(&self, req: &mut Request, res: Response) -> FerrumResult<Response> {\n\n (**self).after(req, res)\n\n }\n\n\n\n fn catch(&self, req: &mut Request, err: FerrumError) -> FerrumResult<Response> {\n\n (**self).catch(req, err)\n\n }\n\n}\n", "file_path": "src/middleware/mod.rs", "rank": 70, "score": 23909.080252826578 }, { "content": "/// Stores all the properties of the client's request plus\n\n/// an `TypeMap` for data communication between middleware.\n\npub struct Request {\n\n /// The requested URI.\n\n pub uri: Uri,\n\n\n\n /// The URI path segments collection\n\n pub uri_path_segments: Vec<String>,\n\n\n\n /// The request method.\n\n pub method: Method,\n\n\n\n /// The version of the HTTP protocol used.\n\n pub version: HttpVersion,\n\n\n\n /// The originating address of the request. Some underlying transports\n\n /// may not have a socket address, such as Unix Sockets.\n\n pub remote_addr: Option<SocketAddr>,\n\n\n\n /// The request headers.\n", "file_path": "src/request/mod.rs", "rank": 71, "score": 23908.34711208321 }, { "content": "//! This module contains Ferrum's middleware and handler system, the fundamental\n\n//! building blocks for handling HTTP requests and generating responses.\n\n//!\n\n//! # Handlers\n\n//!\n\n//! A `Handler` will produce a `Response` given a `Request`. Most handlers are\n\n//! functions or closures that accept a `&mut Request` as an argument and return\n\n//! an `FerrumResult` containing a `Response`. An `FerrumResult` is returned instead of\n\n//! directly returning a `Response` in order to indicate a possibility of\n\n//! failure (e.g. database timeout).\n\n//!\n\n//! Here's an example of a `Handler`:\n\n//!\n\n//! ```rust\n\n//! use ferrum::*;\n\n//!\n\n//! fn hello_handler(request: &mut Request) -> FerrumResult<Response> {\n\n//! Ok(Response::new().with_status(StatusCode::Ok).with_body(\"Hello world!\"))\n\n//! }\n\n//! ```\n", "file_path": "src/middleware/mod.rs", "rank": 72, "score": 23908.128463602472 }, { "content": "\n\nimpl BeforeMiddleware for Box<BeforeMiddleware> {\n\n fn before(&self, req: &mut Request) -> FerrumResult<()> {\n\n (**self).before(req)\n\n }\n\n\n\n fn catch(&self, req: &mut Request, err: FerrumError) -> FerrumResult<()> {\n\n (**self).catch(req, err)\n\n }\n\n}\n\n\n\nimpl<T> BeforeMiddleware for Arc<T>\n\n where T: BeforeMiddleware\n\n{\n\n fn before(&self, req: &mut Request) -> FerrumResult<()> {\n\n (**self).before(req)\n\n }\n\n\n\n fn catch(&self, req: &mut Request, err: FerrumError) -> FerrumResult<()> {\n\n (**self).catch(req, err)\n", "file_path": "src/middleware/mod.rs", "rank": 73, "score": 23907.696289449395 }, { "content": " assert_eq!(request.uri, uri);\n\n assert_eq!(request.uri_path_segments, vec![\"foo\".to_string(), \"bar\".to_string()]);\n\n assert_eq!(request.method, Method::Get);\n\n assert_eq!(request.version, HttpVersion::default());\n\n assert_eq!(request.remote_addr, None);\n\n assert_eq!(request.headers, Headers::new());\n\n }\n\n\n\n #[test]\n\n fn test_create_request_stub() {\n\n let uri = Uri::from_str(\"http://www.rust-lang.org\").unwrap();\n\n let addr = \"127.0.0.1:3000\".to_socket_addrs().unwrap().next().unwrap();\n\n let request = Request::stub();\n\n\n\n assert_eq!(request.uri, uri);\n\n assert_eq!(request.uri_path_segments, vec![\"\".to_string()]);\n\n assert_eq!(request.method, Method::Get);\n\n assert_eq!(request.version, HttpVersion::default());\n\n assert_eq!(request.remote_addr.unwrap(), addr);\n\n assert_eq!(request.headers, Headers::new());\n\n }\n\n}", "file_path": "src/request/mod.rs", "rank": 74, "score": 23907.352510820318 }, { "content": "//!\n\n//! # Middleware\n\n//!\n\n//! In situations involving more complex logic, it may be desirable to transform\n\n//! `Request`s passed to a `Handler` or altering `Response`s sent to the\n\n//! clients. For example, an authorization step could only allow requests sent\n\n//! by authorized users to be passed to a `Handler` and respond to all other\n\n//! requests with a 403 status code. To faciliate such use cases, Ferrum's\n\n//! middleware system allows `Handler`s to be extended by defining middleware,\n\n//! which will perform transformations.\n\n//!\n\n//! There are three types of middleware:\n\n//!\n\n//! * A `BeforeMiddleware` alters a `Request`. It can be useful for handling\n\n//! control flow (e.g. routing and authorization).\n\n//! * An `AroundMiddleware` wraps a `Handler`, changing both the `Response`\n\n//! passed to the `Handler` and the returned `Response`.\n\n//! * An `AfterMiddleware` performs `Response` post-processing. It can be used\n\n//! for editing headers or logging `Response`s, but it should _not_ be used for\n\n//! changing the body of a `Response`.\n", "file_path": "src/middleware/mod.rs", "rank": 75, "score": 23907.15898148512 }, { "content": "//! Ok(())\n\n//! }\n\n//! }\n\n//!\n\n//! let mut chain = Chain::new(hello_handler);\n\n//! chain.link_before(RequestLoggingMiddleware {});\n\n//! // Since a Chain is a Handler, chain can be passed to Ferrum::new without any problems.\n\n//! // Ferrum::new(chain).http(\"127.0.0.1:3000\").unwrap();\n\n//! ```\n\n//!\n\n//! # The Request Handling Flow\n\n//!\n\n//! A diagram modeling the entire middleware system process is shown below:\n\n//!\n\n//! ```plain\n\n//! [b] = BeforeMiddleware\n\n//! [a] = AfterMiddleware\n\n//! [[h]] = AroundMiddleware\n\n//! [h] = Handler\n\n//! ```\n", "file_path": "src/middleware/mod.rs", "rank": 76, "score": 23906.78362138688 }, { "content": " handler = around.around(handler);\n\n self.handler = Some(handler);\n\n self\n\n }\n\n}\n\n\n\nimpl Handler for Chain {\n\n fn handle(&self, req: &mut Request) -> FerrumResult<Response> {\n\n // Kick off at befores, which will continue into handler\n\n // then afters.\n\n self.continue_from_before(req, 0)\n\n }\n\n}\n\n\n\nimpl Chain {\n\n ///////////////// Implementation Helpers /////////////////\n\n\n\n // Enter the error flow from a before middleware, starting\n\n // at the passed index.\n\n //\n", "file_path": "src/middleware/mod.rs", "rank": 77, "score": 23906.6887670527 }, { "content": " fn continue_from_after(&self, req: &mut Request, index: usize,\n\n mut res: Response) -> FerrumResult<Response> {\n\n // If this was the last after middleware, we're done.\n\n if index >= self.afters.len() {\n\n return Ok(res);\n\n }\n\n\n\n for (i, after) in self.afters[index..].iter().enumerate() {\n\n res = match after.after(req, res) {\n\n Ok(r) => r,\n\n Err(err) => return self.fail_from_after(req, index + i + 1, err)\n\n }\n\n }\n\n\n\n // We made it with no error!\n\n Ok(res)\n\n }\n\n}\n\n\n\nimpl<F> Handler for F\n", "file_path": "src/middleware/mod.rs", "rank": 78, "score": 23906.313906802043 }, { "content": "//!\n\n//! Middleware authors should be cognizant that their middleware may be skipped\n\n//! during the error flow. Anything that *must* be done to each `Request` or\n\n//! `Response` should be run during both the normal and error flow by\n\n//! implementing the `catch` method to also do the necessary action.\n\n\n\nuse std::sync::Arc;\n\nuse {Request, Response, FerrumResult, FerrumError};\n\n\n\n/// `Handler`s are responsible for handling requests by creating Responses from Requests.\n", "file_path": "src/middleware/mod.rs", "rank": 80, "score": 23904.75533407075 }, { "content": " // Enter the error flow from an errored handle, starting with the\n\n // first AfterMiddleware.\n\n fn fail_from_handler(&self, req: &mut Request,\n\n err: FerrumError) -> FerrumResult<Response> {\n\n // Yield to next phase, nothing to do here.\n\n self.fail_from_after(req, 0, err)\n\n }\n\n\n\n // Enter the error flow from an errored after middleware, starting\n\n // with the passed index.\n\n //\n\n // If the index is out of bounds for the after middleware Vec,\n\n // this instead just returns the passed error.\n\n fn fail_from_after(&self, req: &mut Request, index: usize,\n\n mut err: FerrumError) -> FerrumResult<Response> {\n\n // If this was the last after, we're done.\n\n if index == self.afters.len() { return Err(err) }\n\n\n\n for (i, after) in self.afters[index..].iter().enumerate() {\n\n err = match after.catch(req, err) {\n", "file_path": "src/middleware/mod.rs", "rank": 81, "score": 23904.732113326954 }, { "content": " // If the index is out of bounds for the before middleware Vec,\n\n // this instead behaves the same as fail_from_handler.\n\n fn fail_from_before(&self, req: &mut Request, index: usize,\n\n mut err: FerrumError) -> FerrumResult<Response> {\n\n // If this was the last before, yield to next phase.\n\n if index >= self.befores.len() {\n\n return self.fail_from_handler(req, err)\n\n }\n\n\n\n for (i, before) in self.befores[index..].iter().enumerate() {\n\n err = match before.catch(req, err) {\n\n Err(err) => err,\n\n Ok(()) => return self.continue_from_before(req, index + i + 1)\n\n };\n\n }\n\n\n\n // Next phase\n\n self.fail_from_handler(req, err)\n\n }\n\n\n", "file_path": "src/middleware/mod.rs", "rank": 82, "score": 23904.576830992788 }, { "content": " Err(err) => err,\n\n Ok(res) => return self.continue_from_after(req, index + i + 1, res)\n\n }\n\n }\n\n\n\n // Done\n\n Err(err)\n\n }\n\n\n\n // Enter the normal flow in the before middleware, starting with the passed\n\n // index.\n\n fn continue_from_before(&self, req: &mut Request,\n\n index: usize) -> FerrumResult<Response> {\n\n // If this was the last beforemiddleware, start at the handler.\n\n if index >= self.befores.len() {\n\n return self.continue_from_handler(req)\n\n }\n\n\n\n for (i, before) in self.befores[index..].iter().enumerate() {\n\n match before.before(req) {\n", "file_path": "src/middleware/mod.rs", "rank": 83, "score": 23904.040629720603 }, { "content": " Ok(()) => {},\n\n Err(err) => return self.fail_from_before(req, index + i + 1, err)\n\n }\n\n }\n\n\n\n // Yield to next phase.\n\n self.continue_from_handler(req)\n\n }\n\n\n\n // Enter the normal flow at the handler.\n\n fn continue_from_handler(&self, req: &mut Request) -> FerrumResult<Response> {\n\n // unwrap is safe because it's always Some\n\n match self.handler.as_ref().unwrap().handle(req) {\n\n Ok(res) => self.continue_from_after(req, 0, res),\n\n Err(err) => self.fail_from_handler(req, err)\n\n }\n\n }\n\n\n\n // Enter the normal flow in the after middleware, starting with the passed\n\n // index.\n", "file_path": "src/middleware/mod.rs", "rank": 84, "score": 23903.91107355753 }, { "content": "//!\n\n//! With no errors, the flow looks like:\n\n//!\n\n//! ```plain\n\n//! [b] -> [b] -> [b] -> [[[[h]]]] -> [a] -> [a] -> [a] -> [a]\n\n//! ```\n\n//!\n\n//! A request first travels through all `BeforeMiddleware`, then a `Response` is\n\n//! generated by the `Handler`, which can be an arbitrary nesting of\n\n//! `AroundMiddleware`, then all `AfterMiddleware` are called with both the\n\n//! `Request` and `Response`. After all `AfterMiddleware` have been fired, the\n\n//! response is written back to the client.\n\n//!\n\n//! Ferrum's error handling system is pragmatic and focuses on tracking two pieces\n\n//! of information for error receivers (other middleware):\n\n//!\n\n//! * The cause of the error\n\n//! * The result (what to do about) the error.\n\n//!\n\n//! The cause of the error is represented simply by the error itself, and the\n", "file_path": "src/middleware/mod.rs", "rank": 85, "score": 23902.490530811494 }, { "content": "//! result of the error, representing the action to take in response to the\n\n//! error, is a complete Response, which will be sent at the end of the error\n\n//! flow.\n\n//!\n\n//! When an error is thrown in Ferrum by any middleware or handler returning an\n\n//! `Err` variant with an `FerrumError`, the flow of the `Request` switches to the\n\n//! error flow, which proceeds to just call the `catch` method of middleware and\n\n//! sidesteps the `Handler` entirely, since there is already a `Response` in the\n\n//! error.\n\n//!\n\n//! A `Request` can exit the error flow by returning an Ok from any of the catch\n\n//! methods. This resumes the flow at the middleware immediately following the\n\n//! middleware which handled the error. It is impossible to \"go back\" to an\n\n//! earlier middleware that was skipped.\n\n//!\n\n//! Generally speaking, returning a 5XX error code means that the error flow\n\n//! should be entered by raising an explicit error. Dealing with 4XX errors is\n\n//! trickier, since the server may not want to recognize an error that is\n\n//! entirely the clients fault; handling of 4XX error codes is up to to each\n\n//! application and middleware author.\n", "file_path": "src/middleware/mod.rs", "rank": 86, "score": 23902.490530811494 }, { "content": "/// Extensions\n\npub use typemap::TypeMap;\n\n\n\n/// Headers\n\npub use hyper::header;\n\npub use hyper::header::Headers;\n\npub use hyper::header::Header;\n\n\n\n/// Status Codes\n\npub use hyper::{Method, StatusCode, Uri};\n\n\n\n/// Expose `Pluggable` as `Plugin` so users can do `use ferrum::Plugin`.\n\npub use plugin::Pluggable as Plugin;\n\n\n\n/// Errors\n\npub use error::Error;\n\npub use error::FerrumError;\n\n\n\n/// Ferrum's error type and associated utilities.\n\npub mod error;\n", "file_path": "src/lib.rs", "rank": 87, "score": 14.139991869696551 }, { "content": "Ferrum\n\n======\n\n\n\n> An Extensible, Concurrent Web Framework for Rust.\n\n\n\nFerrum is a fork of the [Iron](https://github.com/iron/iron).\n\n\n\n## Hello World Example\n\n\n\n```rust\n\nextern crate ferrum;\n\n\n\nuse ferrum::*;\n\n\n\nfn main() {\n\n Ferrum::new(|_: &mut Request| {\n\n Ok(Response::new().with_content(\"Hello world!\", mime::TEXT_PLAIN))\n\n }).http(\"localhost:3000\").unwrap();\n\n}\n\n```\n\n\n\n## Response Timer Example\n\n\n\n```rust\n\nextern crate time;\n\nextern crate ferrum;\n\n\n\nuse ferrum::*;\n\nuse time::precise_time_ns;\n\n\n\nstruct ResponseTime;\n\n\n\nimpl typemap::Key for ResponseTime {\n\n type Value = u64;\n\n}\n\n\n\nimpl BeforeMiddleware for ResponseTime {\n\n fn before(&self, request: &mut Request) -> FerrumResult<()> {\n\n request.extensions.insert::<ResponseTime>(precise_time_ns());\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl AfterMiddleware for ResponseTime {\n\n fn after(&self, request: &mut Request, response: Response) -> FerrumResult<Response> {\n\n let delta = precise_time_ns() - *request.extensions.get::<ResponseTime>().unwrap();\n\n println!(\"Request took: {} ms\", (delta as f64) / 1000000.0);\n\n Ok(response)\n\n }\n\n}\n\n\n\nfn hello_world(_: &mut Request) -> FerrumResult<Response> {\n\n Ok(Response::new().with_content(\"Hello World\", mime::TEXT_PLAIN))\n\n}\n\n\n\nfn main() {\n\n let mut chain = Chain::new(hello_world);\n\n chain.link_before(ResponseTime);\n\n chain.link_after(ResponseTime);\n\n Ferrum::new(chain).http(\"localhost:3000\").unwrap();\n\n}\n\n```\n\n\n\n## Overview\n\n\n\nFerrum is a high level web framework built in and for Rust, built on\n\n[hyper](https://github.com/hyperium/hyper). Ferrum is designed to take advantage\n\nof Rust's greatest features - its excellent type system and its principled\n\napproach to ownership in both single threaded and multi threaded contexts.\n\n\n\nFerrum is highly concurrent and can scale horizontally on more machines behind a\n\nload balancer or by running more threads on a more powerful machine. Ferrum\n\navoids the bottlenecks encountered in highly concurrent code by avoiding shared\n\nwrites and locking in the core framework.\n\n\n", "file_path": "README.md", "rank": 88, "score": 12.367680208743788 }, { "content": "use std::io;\n\nuse std::sync::Arc;\n\n\n\nuse hyper::server::{NewService, Service};\n\nuse futures::{future, Future};\n\nuse futures_cpupool::{CpuPool, CpuFuture};\n\n\n\nuse request::{Request, HyperRequest};\n\nuse response::HyperResponse;\n\nuse error::HyperError;\n\nuse middleware::Handler;\n\n\n\npub struct InitialService<H>\n\n where H: Handler\n\n{\n\n pub handler: Arc<H>,\n\n pub thread_pool: Arc<CpuPool>,\n\n}\n\n\n\nimpl<H> InitialService<H>\n", "file_path": "src/service.rs", "rank": 89, "score": 11.459359951200812 }, { "content": "impl<H> Service for InitialService<H>\n\n where H: Handler\n\n{\n\n type Request = HyperRequest;\n\n type Response = HyperResponse;\n\n type Error = HyperError;\n\n type Future = CpuFuture<Self::Response, Self::Error>;\n\n\n\n fn call(&self, request: Self::Request) -> Self::Future {\n\n let mut request = Request::new(request);\n\n let handler = self.handler.clone();\n\n\n\n self.thread_pool.spawn_fn(move || {\n\n let handle_result = match handler.handle(&mut request) {\n\n Ok(response) => Box::new(future::ok(response)),\n\n Err(err) => Box::new(future::err(err))\n\n };\n\n Box::new(handle_result\n\n .and_then(move |response| {\n\n future::ok(HyperResponse::from(response))\n\n })\n\n .or_else(move |error| {\n\n future::ok(HyperResponse::from(error))\n\n })\n\n )\n\n })\n\n }\n\n}\n", "file_path": "src/service.rs", "rank": 90, "score": 11.40131957678051 }, { "content": " where H: Handler\n\n{\n\n pub fn new(handler: H, thread_pool_size: Option<usize>) -> InitialService<H> {\n\n let thread_pool = if let Some(size) = thread_pool_size {\n\n CpuPool::new(size)\n\n } else {\n\n CpuPool::new_num_cpus()\n\n };\n\n\n\n InitialService {\n\n handler: Arc::new(handler),\n\n thread_pool: Arc::new(thread_pool),\n\n }\n\n }\n\n}\n\n\n\nimpl<H> Clone for InitialService<H>\n\n where H: Handler\n\n{\n\n fn clone(&self) -> Self {\n", "file_path": "src/service.rs", "rank": 91, "score": 11.014894185152455 }, { "content": "//! Exposes the `Ferrum` type, the main entrance point of the `Ferrum` library.\n\n\n\nuse std::net::ToSocketAddrs;\n\nuse std::time::Duration;\n\nuse std::io::{Error, ErrorKind};\n\n\n\nuse hyper::Body;\n\nuse hyper::server::{Http, Server as HyperServer};\n\n\n\nuse error::HyperResult;\n\nuse service::InitialService;\n\nuse middleware::Handler;\n\n\n\npub type Server<H> = HyperServer<InitialService<H>, Body>;\n\n\n\n/// The primary entrance point to `Ferrum`, a `struct` to instantiate a new server.\n\n///\n\n/// `Ferrum` contains the `Handler` which takes a `Request` and produces a `Response`.\n\npub struct Ferrum<H>\n\n where H: Handler\n", "file_path": "src/ferrum.rs", "rank": 92, "score": 10.86713736859282 }, { "content": "\n\n/// The Result alias used throughout Ferrum and in clients of Ferrum.\n\npub type FerrumResult<T> = Result<T, FerrumError>;\n\n\n\n/// Re-exports from the `TypeMap` crate.\n\npub mod typemap {\n\n pub use plugin::typemap::{TypeMap, Key};\n\n pub use uany::UnsafeAny;\n\n pub type TypeMapInner = UnsafeAny + Send + Sync;\n\n}\n\n\n\n// Publicized to show the documentation\n\npub mod middleware;\n\n\n\n/// Request utilities\n\npub mod request;\n\n\n\n/// Response utilities\n\npub mod response;\n\n\n\npub mod service;\n\n\n\nmod ferrum;", "file_path": "src/lib.rs", "rank": 93, "score": 10.460187212157239 }, { "content": "pub extern crate hyper;\n\npub extern crate futures;\n\nextern crate futures_cpupool;\n\nextern crate unsafe_any as uany;\n\nextern crate ferrum_plugin as plugin;\n\nextern crate num_cpus;\n\nextern crate mime_guess;\n\npub extern crate mime;\n\npub extern crate url;\n\n\n\n/// Request + Response\n\npub use request::Request;\n\npub use response::Response;\n\n\n\n/// Middleware system\n\npub use middleware::{BeforeMiddleware, AfterMiddleware, AroundMiddleware, Handler, Chain};\n\n\n\n/// Server\n\npub use ferrum::*;\n\n\n", "file_path": "src/lib.rs", "rank": 94, "score": 10.201234791380854 }, { "content": " }\n\n}\n\n\n\nimpl From<FerrumError> for HyperResponse {\n\n fn from(error: FerrumError) -> HyperResponse {\n\n match error.response {\n\n Some(response) => HyperResponse::from(response),\n\n None => {\n\n let error_message = format!(\"ERROR: {}\", error);\n\n HyperResponse::new()\n\n .with_header(ContentLength(error_message.len() as u64))\n\n .with_body(error_message)\n\n .with_status(StatusCode::InternalServerError)\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 95, "score": 10.025918138100133 }, { "content": " InitialService {\n\n handler: self.handler.clone(),\n\n thread_pool: self.thread_pool.clone(),\n\n }\n\n }\n\n}\n\n\n\nimpl<H> NewService for InitialService<H>\n\n where H: Handler\n\n{\n\n type Request = HyperRequest;\n\n type Response = HyperResponse;\n\n type Error = HyperError;\n\n type Instance = Self;\n\n\n\n fn new_service(&self) -> io::Result<Self::Instance> {\n\n Ok(self.clone())\n\n }\n\n}\n\n\n", "file_path": "src/service.rs", "rank": 96, "score": 9.743787912655646 }, { "content": "{\n\n /// Ferrum contains a `Handler`, which it uses to create responses for client requests.\n\n pub handler: H,\n\n\n\n /// Controls the timeout for keep alive connections.\n\n ///\n\n /// The default is `true`.\n\n pub keep_alive: bool,\n\n\n\n /// Server timeout.\n\n pub timeout: Option<Duration>,\n\n\n\n /// The number of request handling threads.\n\n ///\n\n /// Defaults to `num_cpus`.\n\n pub num_threads: usize,\n\n}\n\n\n\nimpl<H> Ferrum<H>\n\n where H: Handler\n", "file_path": "src/ferrum.rs", "rank": 97, "score": 9.208319548954247 }, { "content": "{\n\n /// Instantiate a new instance of `Ferrum`.\n\n ///\n\n /// This will create a new `Ferrum`, the base unit of the server, using the\n\n /// passed in `Handler`.\n\n pub fn new(handler: H) -> Ferrum<H> {\n\n Ferrum {\n\n handler,\n\n keep_alive: true,\n\n timeout: Some(Duration::from_secs(30)),\n\n num_threads: ::num_cpus::get(),\n\n }\n\n }\n\n\n\n /// Kick off the server process using the HTTP protocol.\n\n ///\n\n /// Call this once to begin listening for requests on the server.\n\n /// This consumes the Ferrum instance. This method will block\n\n /// the current thread executing the HTTP server.\n\n pub fn http<A>(self, addr: A) -> HyperResult<()>\n", "file_path": "src/ferrum.rs", "rank": 98, "score": 8.63138069383733 }, { "content": "/// it (building up a cause chain).\n\n///\n\n/// The `response` field provides a tangible action to be taken if this error\n\n/// is not otherwise handled.\n\n#[derive(Debug)]\n\npub struct FerrumError {\n\n /// The underlying error\n\n ///\n\n /// This can be layered and will be logged at the end of an errored request.\n\n pub error: Box<Error + Send>,\n\n\n\n /// What to do about this error.\n\n ///\n\n /// This Response will be used when the error-handling flow finishes.\n\n pub response: Option<Response>\n\n}\n\n\n\nimpl FerrumError {\n\n /// Create a new `FerrumError` from an error and a response.\n\n pub fn new<E: 'static + Error + Send>(error: E, response: Option<Response>) -> FerrumError {\n", "file_path": "src/error.rs", "rank": 99, "score": 8.605738406358906 } ]
Rust
src/scu_reset/prclr1.rs
austinglaser/xmc4700
12e1d2b638af5ff0e7088605a4299590a2cd60eb
#[doc = "Writer for register PRCLR1"] pub type W = crate::W<u32, super::PRCLR1>; #[doc = "Register PRCLR1 `reset()`'s with value 0"] impl crate::ResetValue for super::PRCLR1 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "CCU43 Reset Clear\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum CCU43RS_AW { #[doc = "0: No effect"] VALUE1 = 0, #[doc = "1: De-assert reset"] VALUE2 = 1, } impl From<CCU43RS_AW> for bool { #[inline(always)] fn from(variant: CCU43RS_AW) -> Self { variant as u8 != 0 } } #[doc = "Write proxy for field `CCU43RS`"] pub struct CCU43RS_W<'a> { w: &'a mut W, } impl<'a> CCU43RS_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: CCU43RS_AW) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "No effect"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(CCU43RS_AW::VALUE1) } #[doc = "De-assert reset"] #[inline(always)] pub fn value2(self) -> &'a mut W { self.variant(CCU43RS_AW::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "LEDTS Reset Clear\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum LEDTSCU0RS_AW { #[doc = "0: No effect"] VALUE1 = 0, #[doc = "1: De-assert reset"] VALUE2 = 1, } impl From<LEDTSCU0RS_AW> for bool { #[inline(always)] fn from(variant: LEDTSCU0RS_AW) -> Self { variant as u8 != 0 } } #[doc = "Write proxy for field `LEDTSCU0RS`"] pub struct LEDTSCU0RS_W<'a> { w: &'a mut W, } impl<'a> LEDTSCU0RS_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: LEDTSCU0RS_AW) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "No effect"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(LEDTSCU0RS_AW::VALUE1) } #[doc = "De-assert reset"] #[inline(always)] pub fn value2(self) -> &'a mut W { self.variant(LEDTSCU0RS_AW::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "MultiCAN Reset Clear\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum MCAN0RS_AW { #[doc = "0: No effect"] VALUE1 = 0, #[doc = "1: De-assert reset"] VALUE2 = 1, } impl From<MCAN0RS_AW> for bool { #[inline(always)] fn from(variant: MCAN0RS_AW) -> Self { variant as u8 != 0 } } #[doc = "Write proxy for field `MCAN0RS`"] pub struct MCAN0RS_W<'a> { w: &'a mut W, } impl<'a> MCAN0RS_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: MCAN0RS_AW) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "No effect"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(MCAN0RS_AW::VALUE1) } #[doc = "De-assert reset"] #[inline(always)] pub fn value2(self) -> &'a mut W { self.variant(MCAN0RS_AW::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "DAC Reset Clear\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum DACRS_AW { #[doc = "0: No effect"] VALUE1 = 0, #[doc = "1: De-assert reset"] VALUE2 = 1, } impl From<DACRS_AW> for bool { #[inline(always)] fn from(variant: DACRS_AW) -> Self { variant as u8 != 0 } } #[doc = "Write proxy for field `DACRS`"] pub struct DACRS_W<'a> { w: &'a mut W, } impl<'a> DACRS_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: DACRS_AW) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "No effect"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(DACRS_AW::VALUE1) } #[doc = "De-assert reset"] #[inline(always)] pub fn value2(self) -> &'a mut W { self.variant(DACRS_AW::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5); self.w } } #[doc = "MMC Interface Reset Clear\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum MMCIRS_AW { #[doc = "0: No effect"] VALUE1 = 0, #[doc = "1: De-assert reset"] VALUE2 = 1, } impl From<MMCIRS_AW> for bool { #[inline(always)] fn from(variant: MMCIRS_AW) -> Self { variant as u8 != 0 } } #[doc = "Write proxy for field `MMCIRS`"] pub struct MMCIRS_W<'a> { w: &'a mut W, } impl<'a> MMCIRS_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: MMCIRS_AW) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "No effect"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(MMCIRS_AW::VALUE1) } #[doc = "De-assert reset"] #[inline(always)] pub fn value2(self) -> &'a mut W { self.variant(MMCIRS_AW::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6); self.w } } #[doc = "USIC1 Reset Clear\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum USIC1RS_AW { #[doc = "0: No effect"] VALUE1 = 0, #[doc = "1: De-assert reset"] VALUE2 = 1, } impl From<USIC1RS_AW> for bool { #[inline(always)] fn from(variant: USIC1RS_AW) -> Self { variant as u8 != 0 } } #[doc = "Write proxy for field `USIC1RS`"] pub struct USIC1RS_W<'a> { w: &'a mut W, } impl<'a> USIC1RS_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: USIC1RS_AW) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "No effect"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(USIC1RS_AW::VALUE1) } #[doc = "De-assert reset"] #[inline(always)] pub fn value2(self) -> &'a mut W { self.variant(USIC1RS_AW::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } #[doc = "USIC2 Reset Clear\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum USIC2RS_AW { #[doc = "0: No effect"] VALUE1 = 0, #[doc = "1: De-assert reset"] VALUE2 = 1, } impl From<USIC2RS_AW> for bool { #[inline(always)] fn from(variant: USIC2RS_AW) -> Self { variant as u8 != 0 } } #[doc = "Write proxy for field `USIC2RS`"] pub struct USIC2RS_W<'a> { w: &'a mut W, } impl<'a> USIC2RS_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: USIC2RS_AW) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "No effect"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(USIC2RS_AW::VALUE1) } #[doc = "De-assert reset"] #[inline(always)] pub fn value2(self) -> &'a mut W { self.variant(USIC2RS_AW::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8); self.w } } #[doc = "PORTS Reset Clear\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PPORTSRS_AW { #[doc = "0: No effect"] VALUE1 = 0, #[doc = "1: De-assert reset"] VALUE2 = 1, } impl From<PPORTSRS_AW> for bool { #[inline(always)] fn from(variant: PPORTSRS_AW) -> Self { variant as u8 != 0 } } #[doc = "Write proxy for field `PPORTSRS`"] pub struct PPORTSRS_W<'a> { w: &'a mut W, } impl<'a> PPORTSRS_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: PPORTSRS_AW) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "No effect"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(PPORTSRS_AW::VALUE1) } #[doc = "De-assert reset"] #[inline(always)] pub fn value2(self) -> &'a mut W { self.variant(PPORTSRS_AW::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9); self.w } } impl W { #[doc = "Bit 0 - CCU43 Reset Clear"] #[inline(always)] pub fn ccu43rs(&mut self) -> CCU43RS_W { CCU43RS_W { w: self } } #[doc = "Bit 3 - LEDTS Reset Clear"] #[inline(always)] pub fn ledtscu0rs(&mut self) -> LEDTSCU0RS_W { LEDTSCU0RS_W { w: self } } #[doc = "Bit 4 - MultiCAN Reset Clear"] #[inline(always)] pub fn mcan0rs(&mut self) -> MCAN0RS_W { MCAN0RS_W { w: self } } #[doc = "Bit 5 - DAC Reset Clear"] #[inline(always)] pub fn dacrs(&mut self) -> DACRS_W { DACRS_W { w: self } } #[doc = "Bit 6 - MMC Interface Reset Clear"] #[inline(always)] pub fn mmcirs(&mut self) -> MMCIRS_W { MMCIRS_W { w: self } } #[doc = "Bit 7 - USIC1 Reset Clear"] #[inline(always)] pub fn usic1rs(&mut self) -> USIC1RS_W { USIC1RS_W { w: self } } #[doc = "Bit 8 - USIC2 Reset Clear"] #[inline(always)] pub fn usic2rs(&mut self) -> USIC2RS_W { USIC2RS_W { w: self } } #[doc = "Bit 9 - PORTS Reset Clear"] #[inline(always)] pub fn pportsrs(&mut self) -> PPORTSRS_W { PPORTSRS_W { w: self } } }
#[doc = "Writer for register PRCLR1"] pub type W = crate::W<u32, super::PRCLR1>; #[doc = "Register PRCLR1 `reset()`'s with value 0"] impl crate::ResetValue for super::PRCLR1 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "CCU43 Reset Clear\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum CCU43RS_AW { #[doc = "0: No effect"] VALUE1 = 0, #[doc = "1: De-assert reset"] VALUE2 = 1, } impl From<CCU43RS_AW> for bool { #[inline(always)] fn from(variant: CCU43RS_AW) -> Self { variant as u8 != 0 } } #[doc = "Write proxy for field `CCU43RS`"] pub struct CCU43RS_W<'a> { w: &'a mut W, } impl<'a> CCU43RS_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: CCU43RS_AW) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "No effect"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(CCU43RS_AW::VALUE1) } #[doc = "De-assert reset"] #[inline(always)] pub fn value2(self) -> &'a mut W { self.variant(CCU43RS_AW::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "LEDTS Reset Clear\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum LEDTSCU0RS_AW { #[doc = "0: No effect"] VALUE1 = 0, #[doc = "1: De-assert reset"] VALUE2 = 1, } impl From<LEDTSCU0RS_AW> for bool { #[inline(always)] fn from(variant: LEDTSCU0RS_AW) -> Self { variant as u8 != 0 } } #[doc = "Write proxy for field `LEDTSCU0RS`"] pub struct LEDTSCU0RS_W<'a> { w: &'a mut W, } impl<'a> LEDTSCU0RS_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: LEDTSCU0RS_AW) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "No effect"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(LEDTSCU0RS_AW::VALUE1) } #[doc = "De-assert reset"] #[inline(always)] pub fn value2(self) -> &'a mut W { self.variant(LEDTSCU0RS_AW::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 <<
"USIC1 Reset Clear\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum USIC1RS_AW { #[doc = "0: No effect"] VALUE1 = 0, #[doc = "1: De-assert reset"] VALUE2 = 1, } impl From<USIC1RS_AW> for bool { #[inline(always)] fn from(variant: USIC1RS_AW) -> Self { variant as u8 != 0 } } #[doc = "Write proxy for field `USIC1RS`"] pub struct USIC1RS_W<'a> { w: &'a mut W, } impl<'a> USIC1RS_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: USIC1RS_AW) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "No effect"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(USIC1RS_AW::VALUE1) } #[doc = "De-assert reset"] #[inline(always)] pub fn value2(self) -> &'a mut W { self.variant(USIC1RS_AW::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } #[doc = "USIC2 Reset Clear\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum USIC2RS_AW { #[doc = "0: No effect"] VALUE1 = 0, #[doc = "1: De-assert reset"] VALUE2 = 1, } impl From<USIC2RS_AW> for bool { #[inline(always)] fn from(variant: USIC2RS_AW) -> Self { variant as u8 != 0 } } #[doc = "Write proxy for field `USIC2RS`"] pub struct USIC2RS_W<'a> { w: &'a mut W, } impl<'a> USIC2RS_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: USIC2RS_AW) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "No effect"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(USIC2RS_AW::VALUE1) } #[doc = "De-assert reset"] #[inline(always)] pub fn value2(self) -> &'a mut W { self.variant(USIC2RS_AW::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8); self.w } } #[doc = "PORTS Reset Clear\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PPORTSRS_AW { #[doc = "0: No effect"] VALUE1 = 0, #[doc = "1: De-assert reset"] VALUE2 = 1, } impl From<PPORTSRS_AW> for bool { #[inline(always)] fn from(variant: PPORTSRS_AW) -> Self { variant as u8 != 0 } } #[doc = "Write proxy for field `PPORTSRS`"] pub struct PPORTSRS_W<'a> { w: &'a mut W, } impl<'a> PPORTSRS_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: PPORTSRS_AW) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "No effect"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(PPORTSRS_AW::VALUE1) } #[doc = "De-assert reset"] #[inline(always)] pub fn value2(self) -> &'a mut W { self.variant(PPORTSRS_AW::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9); self.w } } impl W { #[doc = "Bit 0 - CCU43 Reset Clear"] #[inline(always)] pub fn ccu43rs(&mut self) -> CCU43RS_W { CCU43RS_W { w: self } } #[doc = "Bit 3 - LEDTS Reset Clear"] #[inline(always)] pub fn ledtscu0rs(&mut self) -> LEDTSCU0RS_W { LEDTSCU0RS_W { w: self } } #[doc = "Bit 4 - MultiCAN Reset Clear"] #[inline(always)] pub fn mcan0rs(&mut self) -> MCAN0RS_W { MCAN0RS_W { w: self } } #[doc = "Bit 5 - DAC Reset Clear"] #[inline(always)] pub fn dacrs(&mut self) -> DACRS_W { DACRS_W { w: self } } #[doc = "Bit 6 - MMC Interface Reset Clear"] #[inline(always)] pub fn mmcirs(&mut self) -> MMCIRS_W { MMCIRS_W { w: self } } #[doc = "Bit 7 - USIC1 Reset Clear"] #[inline(always)] pub fn usic1rs(&mut self) -> USIC1RS_W { USIC1RS_W { w: self } } #[doc = "Bit 8 - USIC2 Reset Clear"] #[inline(always)] pub fn usic2rs(&mut self) -> USIC2RS_W { USIC2RS_W { w: self } } #[doc = "Bit 9 - PORTS Reset Clear"] #[inline(always)] pub fn pportsrs(&mut self) -> PPORTSRS_W { PPORTSRS_W { w: self } } }
3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "MultiCAN Reset Clear\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum MCAN0RS_AW { #[doc = "0: No effect"] VALUE1 = 0, #[doc = "1: De-assert reset"] VALUE2 = 1, } impl From<MCAN0RS_AW> for bool { #[inline(always)] fn from(variant: MCAN0RS_AW) -> Self { variant as u8 != 0 } } #[doc = "Write proxy for field `MCAN0RS`"] pub struct MCAN0RS_W<'a> { w: &'a mut W, } impl<'a> MCAN0RS_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: MCAN0RS_AW) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "No effect"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(MCAN0RS_AW::VALUE1) } #[doc = "De-assert reset"] #[inline(always)] pub fn value2(self) -> &'a mut W { self.variant(MCAN0RS_AW::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "DAC Reset Clear\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum DACRS_AW { #[doc = "0: No effect"] VALUE1 = 0, #[doc = "1: De-assert reset"] VALUE2 = 1, } impl From<DACRS_AW> for bool { #[inline(always)] fn from(variant: DACRS_AW) -> Self { variant as u8 != 0 } } #[doc = "Write proxy for field `DACRS`"] pub struct DACRS_W<'a> { w: &'a mut W, } impl<'a> DACRS_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: DACRS_AW) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "No effect"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(DACRS_AW::VALUE1) } #[doc = "De-assert reset"] #[inline(always)] pub fn value2(self) -> &'a mut W { self.variant(DACRS_AW::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5); self.w } } #[doc = "MMC Interface Reset Clear\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum MMCIRS_AW { #[doc = "0: No effect"] VALUE1 = 0, #[doc = "1: De-assert reset"] VALUE2 = 1, } impl From<MMCIRS_AW> for bool { #[inline(always)] fn from(variant: MMCIRS_AW) -> Self { variant as u8 != 0 } } #[doc = "Write proxy for field `MMCIRS`"] pub struct MMCIRS_W<'a> { w: &'a mut W, } impl<'a> MMCIRS_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: MMCIRS_AW) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "No effect"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(MMCIRS_AW::VALUE1) } #[doc = "De-assert reset"] #[inline(always)] pub fn value2(self) -> &'a mut W { self.variant(MMCIRS_AW::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6); self.w } } #[doc =
random
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "src/generic.rs", "rank": 0, "score": 169917.93770332087 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\")).unwrap().write_all(include_bytes!(\"device.x\")).unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "build.rs", "rank": 25, "score": 64684.75335907798 }, { "content": "#[doc = \"Reader of register TYPE\"]\n\npub type R = crate::R<u32, super::TYPE>;\n\n#[doc = \"Reader of field `VALUE`\"]\n\npub type VALUE_R = crate::R<u32, u32>;\n\nimpl R {\n\n #[doc = \"Bits 0:31 - Component Type\"]\n\n #[inline(always)]\n\n pub fn value(&self) -> VALUE_R {\n\n VALUE_R::new((self.bits & 0xffff_ffff) as u32)\n\n }\n\n}\n", "file_path": "src/gpdma0/type_.rs", "rank": 26, "score": 60978.54169541505 }, { "content": "#[doc = \"Reader of register TYPE\"]\n\npub type R = crate::R<u32, super::TYPE>;\n\n#[doc = \"Reader of field `VALUE`\"]\n\npub type VALUE_R = crate::R<u32, u32>;\n\nimpl R {\n\n #[doc = \"Bits 0:31 - Component Type\"]\n\n #[inline(always)]\n\n pub fn value(&self) -> VALUE_R {\n\n VALUE_R::new((self.bits & 0xffff_ffff) as u32)\n\n }\n\n}\n", "file_path": "src/gpdma1/type_.rs", "rank": 27, "score": 60978.54169541505 }, { "content": "#[doc = \"Reader of register DEBUG\"]\n\npub type R = crate::R<u32, super::DEBUG>;\n\n#[doc = \"Reader of field `RPESTS`\"]\n\npub type RPESTS_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `RFCFCSTS`\"]\n\npub type RFCFCSTS_R = crate::R<u8, u8>;\n\n#[doc = \"Reader of field `RWCSTS`\"]\n\npub type RWCSTS_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `RRCSTS`\"]\n\npub type RRCSTS_R = crate::R<u8, u8>;\n\n#[doc = \"Reader of field `RXFSTS`\"]\n\npub type RXFSTS_R = crate::R<u8, u8>;\n\n#[doc = \"Reader of field `TPESTS`\"]\n\npub type TPESTS_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TFCSTS`\"]\n\npub type TFCSTS_R = crate::R<u8, u8>;\n\n#[doc = \"Reader of field `TXPAUSED`\"]\n\npub type TXPAUSED_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TRCSTS`\"]\n\npub type TRCSTS_R = crate::R<u8, u8>;\n", "file_path": "src/eth0/debug.rs", "rank": 28, "score": 60966.44927631018 }, { "content": "#[doc = \"Reader of field `TWCSTS`\"]\n\npub type TWCSTS_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXFSTS`\"]\n\npub type TXFSTS_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXSTSFSTS`\"]\n\npub type TXSTSFSTS_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - MAC MII Receive Protocol Engine Status\"]\n\n #[inline(always)]\n\n pub fn rpests(&self) -> RPESTS_R {\n\n RPESTS_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bits 1:2 - MAC Receive Frame Controller FIFO Status\"]\n\n #[inline(always)]\n\n pub fn rfcfcsts(&self) -> RFCFCSTS_R {\n\n RFCFCSTS_R::new(((self.bits >> 1) & 0x03) as u8)\n\n }\n\n #[doc = \"Bit 4 - MTL Rx FIFO Write Controller Active Status\"]\n\n #[inline(always)]\n\n pub fn rwcsts(&self) -> RWCSTS_R {\n", "file_path": "src/eth0/debug.rs", "rank": 29, "score": 60963.321327050144 }, { "content": " TFCSTS_R::new(((self.bits >> 17) & 0x03) as u8)\n\n }\n\n #[doc = \"Bit 19 - MAC transmitter in PAUSE\"]\n\n #[inline(always)]\n\n pub fn txpaused(&self) -> TXPAUSED_R {\n\n TXPAUSED_R::new(((self.bits >> 19) & 0x01) != 0)\n\n }\n\n #[doc = \"Bits 20:21 - MTL Tx FIFO Read Controller Status\"]\n\n #[inline(always)]\n\n pub fn trcsts(&self) -> TRCSTS_R {\n\n TRCSTS_R::new(((self.bits >> 20) & 0x03) as u8)\n\n }\n\n #[doc = \"Bit 22 - MTL Tx FIFO Write Controller Active Status\"]\n\n #[inline(always)]\n\n pub fn twcsts(&self) -> TWCSTS_R {\n\n TWCSTS_R::new(((self.bits >> 22) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 24 - MTL Tx FIFO Not Empty Status\"]\n\n #[inline(always)]\n\n pub fn txfsts(&self) -> TXFSTS_R {\n\n TXFSTS_R::new(((self.bits >> 24) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 25 - MTL TxStatus FIFO Full Status\"]\n\n #[inline(always)]\n\n pub fn txstsfsts(&self) -> TXSTSFSTS_R {\n\n TXSTSFSTS_R::new(((self.bits >> 25) & 0x01) != 0)\n\n }\n\n}\n", "file_path": "src/eth0/debug.rs", "rank": 30, "score": 60953.78574343594 }, { "content": " RWCSTS_R::new(((self.bits >> 4) & 0x01) != 0)\n\n }\n\n #[doc = \"Bits 5:6 - MTL Rx FIFO Read Controller State\"]\n\n #[inline(always)]\n\n pub fn rrcsts(&self) -> RRCSTS_R {\n\n RRCSTS_R::new(((self.bits >> 5) & 0x03) as u8)\n\n }\n\n #[doc = \"Bits 8:9 - MTL Rx FIFO Fill-level Status\"]\n\n #[inline(always)]\n\n pub fn rxfsts(&self) -> RXFSTS_R {\n\n RXFSTS_R::new(((self.bits >> 8) & 0x03) as u8)\n\n }\n\n #[doc = \"Bit 16 - MAC MII Transmit Protocol Engine Status\"]\n\n #[inline(always)]\n\n pub fn tpests(&self) -> TPESTS_R {\n\n TPESTS_R::new(((self.bits >> 16) & 0x01) != 0)\n\n }\n\n #[doc = \"Bits 17:18 - MAC Transmit Frame Controller Status\"]\n\n #[inline(always)]\n\n pub fn tfcsts(&self) -> TFCSTS_R {\n", "file_path": "src/eth0/debug.rs", "rank": 31, "score": 60950.43491612092 }, { "content": "#[doc(hidden)]\n\npub struct _PRSET1;\n\n#[doc = \"`write(|w| ..)` method takes [prset1::W](prset1::W) writer structure\"]\n\nimpl crate::Writable for PRSET1 {}\n\n#[doc = \"RCU Peripheral 1 Reset Set\"]\n\npub mod prset1;\n\n#[doc = \"RCU Peripheral 1 Reset Clear\\n\\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [prclr1](prclr1) module\"]\n\npub type PRCLR1 = crate::Reg<u32, _PRCLR1>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _PRCLR1;\n\n#[doc = \"`write(|w| ..)` method takes [prclr1::W](prclr1::W) writer structure\"]\n\nimpl crate::Writable for PRCLR1 {}\n\n#[doc = \"RCU Peripheral 1 Reset Clear\"]\n\npub mod prclr1;\n\n#[doc = \"RCU Peripheral 2 Reset Status\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [prstat2](prstat2) module\"]\n\npub type PRSTAT2 = crate::Reg<u32, _PRSTAT2>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _PRSTAT2;\n", "file_path": "src/scu_reset.rs", "rank": 32, "score": 60911.45160811253 }, { "content": "impl crate::Readable for RSTSTAT {}\n\n#[doc = \"RCU Reset Status\"]\n\npub mod rststat;\n\n#[doc = \"RCU Reset Set Register\\n\\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [rstset](rstset) module\"]\n\npub type RSTSET = crate::Reg<u32, _RSTSET>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _RSTSET;\n\n#[doc = \"`write(|w| ..)` method takes [rstset::W](rstset::W) writer structure\"]\n\nimpl crate::Writable for RSTSET {}\n\n#[doc = \"RCU Reset Set Register\"]\n\npub mod rstset;\n\n#[doc = \"RCU Reset Clear Register\\n\\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [rstclr](rstclr) module\"]\n\npub type RSTCLR = crate::Reg<u32, _RSTCLR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _RSTCLR;\n\n#[doc = \"`write(|w| ..)` method takes [rstclr::W](rstclr::W) writer structure\"]\n\nimpl crate::Writable for RSTCLR {}\n\n#[doc = \"RCU Reset Clear Register\"]\n", "file_path": "src/scu_reset.rs", "rank": 33, "score": 60906.090176463935 }, { "content": "#[doc = \"`read()` method returns [prstat2::R](prstat2::R) reader structure\"]\n\nimpl crate::Readable for PRSTAT2 {}\n\n#[doc = \"RCU Peripheral 2 Reset Status\"]\n\npub mod prstat2;\n\n#[doc = \"RCU Peripheral 2 Reset Set\\n\\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [prset2](prset2) module\"]\n\npub type PRSET2 = crate::Reg<u32, _PRSET2>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _PRSET2;\n\n#[doc = \"`write(|w| ..)` method takes [prset2::W](prset2::W) writer structure\"]\n\nimpl crate::Writable for PRSET2 {}\n\n#[doc = \"RCU Peripheral 2 Reset Set\"]\n\npub mod prset2;\n\n#[doc = \"RCU Peripheral 2 Reset Clear\\n\\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [prclr2](prclr2) module\"]\n\npub type PRCLR2 = crate::Reg<u32, _PRCLR2>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _PRCLR2;\n\n#[doc = \"`write(|w| ..)` method takes [prclr2::W](prclr2::W) writer structure\"]\n\nimpl crate::Writable for PRCLR2 {}\n", "file_path": "src/scu_reset.rs", "rank": 34, "score": 60902.86293656503 }, { "content": "pub type PRCLR0 = crate::Reg<u32, _PRCLR0>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _PRCLR0;\n\n#[doc = \"`write(|w| ..)` method takes [prclr0::W](prclr0::W) writer structure\"]\n\nimpl crate::Writable for PRCLR0 {}\n\n#[doc = \"RCU Peripheral 0 Reset Clear\"]\n\npub mod prclr0;\n\n#[doc = \"RCU Peripheral 1 Reset Status\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [prstat1](prstat1) module\"]\n\npub type PRSTAT1 = crate::Reg<u32, _PRSTAT1>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _PRSTAT1;\n\n#[doc = \"`read()` method returns [prstat1::R](prstat1::R) reader structure\"]\n\nimpl crate::Readable for PRSTAT1 {}\n\n#[doc = \"RCU Peripheral 1 Reset Status\"]\n\npub mod prstat1;\n\n#[doc = \"RCU Peripheral 1 Reset Set\\n\\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [prset1](prset1) module\"]\n\npub type PRSET1 = crate::Reg<u32, _PRSET1>;\n\n#[allow(missing_docs)]\n", "file_path": "src/scu_reset.rs", "rank": 35, "score": 60901.40091404827 }, { "content": "#[doc = \"RCU Peripheral 3 Reset Clear\\n\\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [prclr3](prclr3) module\"]\n\npub type PRCLR3 = crate::Reg<u32, _PRCLR3>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _PRCLR3;\n\n#[doc = \"`write(|w| ..)` method takes [prclr3::W](prclr3::W) writer structure\"]\n\nimpl crate::Writable for PRCLR3 {}\n\n#[doc = \"RCU Peripheral 3 Reset Clear\"]\n\npub mod prclr3;\n", "file_path": "src/scu_reset.rs", "rank": 36, "score": 60900.86411264772 }, { "content": "#[doc = \"RCU Peripheral 2 Reset Clear\"]\n\npub mod prclr2;\n\n#[doc = \"RCU Peripheral 3 Reset Status\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [prstat3](prstat3) module\"]\n\npub type PRSTAT3 = crate::Reg<u32, _PRSTAT3>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _PRSTAT3;\n\n#[doc = \"`read()` method returns [prstat3::R](prstat3::R) reader structure\"]\n\nimpl crate::Readable for PRSTAT3 {}\n\n#[doc = \"RCU Peripheral 3 Reset Status\"]\n\npub mod prstat3;\n\n#[doc = \"RCU Peripheral 3 Reset Set\\n\\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [prset3](prset3) module\"]\n\npub type PRSET3 = crate::Reg<u32, _PRSET3>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _PRSET3;\n\n#[doc = \"`write(|w| ..)` method takes [prset3::W](prset3::W) writer structure\"]\n\nimpl crate::Writable for PRSET3 {}\n\n#[doc = \"RCU Peripheral 3 Reset Set\"]\n\npub mod prset3;\n", "file_path": "src/scu_reset.rs", "rank": 37, "score": 60899.9505020238 }, { "content": "pub mod rstclr;\n\n#[doc = \"RCU Peripheral 0 Reset Status\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [prstat0](prstat0) module\"]\n\npub type PRSTAT0 = crate::Reg<u32, _PRSTAT0>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _PRSTAT0;\n\n#[doc = \"`read()` method returns [prstat0::R](prstat0::R) reader structure\"]\n\nimpl crate::Readable for PRSTAT0 {}\n\n#[doc = \"RCU Peripheral 0 Reset Status\"]\n\npub mod prstat0;\n\n#[doc = \"RCU Peripheral 0 Reset Set\\n\\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [prset0](prset0) module\"]\n\npub type PRSET0 = crate::Reg<u32, _PRSET0>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _PRSET0;\n\n#[doc = \"`write(|w| ..)` method takes [prset0::W](prset0::W) writer structure\"]\n\nimpl crate::Writable for PRSET0 {}\n\n#[doc = \"RCU Peripheral 0 Reset Set\"]\n\npub mod prset0;\n\n#[doc = \"RCU Peripheral 0 Reset Clear\\n\\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [prclr0](prclr0) module\"]\n", "file_path": "src/scu_reset.rs", "rank": 38, "score": 60898.5678647326 }, { "content": " pub prclr1: PRCLR1,\n\n #[doc = \"0x24 - RCU Peripheral 2 Reset Status\"]\n\n pub prstat2: PRSTAT2,\n\n #[doc = \"0x28 - RCU Peripheral 2 Reset Set\"]\n\n pub prset2: PRSET2,\n\n #[doc = \"0x2c - RCU Peripheral 2 Reset Clear\"]\n\n pub prclr2: PRCLR2,\n\n #[doc = \"0x30 - RCU Peripheral 3 Reset Status\"]\n\n pub prstat3: PRSTAT3,\n\n #[doc = \"0x34 - RCU Peripheral 3 Reset Set\"]\n\n pub prset3: PRSET3,\n\n #[doc = \"0x38 - RCU Peripheral 3 Reset Clear\"]\n\n pub prclr3: PRCLR3,\n\n}\n\n#[doc = \"RCU Reset Status\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [rststat](rststat) module\"]\n\npub type RSTSTAT = crate::Reg<u32, _RSTSTAT>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _RSTSTAT;\n\n#[doc = \"`read()` method returns [rststat::R](rststat::R) reader structure\"]\n", "file_path": "src/scu_reset.rs", "rank": 39, "score": 60887.93114648673 }, { "content": "#[doc = r\"Register block\"]\n\n#[repr(C)]\n\npub struct RegisterBlock {\n\n #[doc = \"0x00 - RCU Reset Status\"]\n\n pub rststat: RSTSTAT,\n\n #[doc = \"0x04 - RCU Reset Set Register\"]\n\n pub rstset: RSTSET,\n\n #[doc = \"0x08 - RCU Reset Clear Register\"]\n\n pub rstclr: RSTCLR,\n\n #[doc = \"0x0c - RCU Peripheral 0 Reset Status\"]\n\n pub prstat0: PRSTAT0,\n\n #[doc = \"0x10 - RCU Peripheral 0 Reset Set\"]\n\n pub prset0: PRSET0,\n\n #[doc = \"0x14 - RCU Peripheral 0 Reset Clear\"]\n\n pub prclr0: PRCLR0,\n\n #[doc = \"0x18 - RCU Peripheral 1 Reset Status\"]\n\n pub prstat1: PRSTAT1,\n\n #[doc = \"0x1c - RCU Peripheral 1 Reset Set\"]\n\n pub prset1: PRSET1,\n\n #[doc = \"0x20 - RCU Peripheral 1 Reset Clear\"]\n", "file_path": "src/scu_reset.rs", "rank": 40, "score": 60875.5190109204 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "src/generic.rs", "rank": 41, "score": 59399.27789517614 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "src/generic.rs", "rank": 42, "score": 59385.89299697688 }, { "content": "#[doc = \"Writer for register DEBUG_SEL\"]\n\npub type W = crate::W<u32, super::DEBUG_SEL>;\n\n#[doc = \"Register DEBUG_SEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DEBUG_SEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Debug_sel\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum DEBUG_SEL_AW {\n\n #[doc = \"0: receiver module and fifo_ctrl module signals are probed out\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: cmd register, Interrupt status, transmitter module and clk sdcard signals are probed out.\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<DEBUG_SEL_AW> for bool {\n\n #[inline(always)]\n", "file_path": "src/sdmmc/debug_sel.rs", "rank": 43, "score": 58652.18944330827 }, { "content": " }\n\n #[doc = \"cmd register, Interrupt status, transmitter module and clk sdcard signals are probed out.\"]\n\n #[inline(always)]\n\n pub fn value2(self) -> &'a mut W {\n\n self.variant(DEBUG_SEL_AW::VALUE2)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n", "file_path": "src/sdmmc/debug_sel.rs", "rank": 44, "score": 58634.76506649636 }, { "content": " fn from(variant: DEBUG_SEL_AW) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `DEBUG_SEL`\"]\n\npub struct DEBUG_SEL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DEBUG_SEL_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: DEBUG_SEL_AW) -> &'a mut W {\n\n {\n\n self.bit(variant.into())\n\n }\n\n }\n\n #[doc = \"receiver module and fifo_ctrl module signals are probed out\"]\n\n #[inline(always)]\n\n pub fn value1(self) -> &'a mut W {\n\n self.variant(DEBUG_SEL_AW::VALUE1)\n", "file_path": "src/sdmmc/debug_sel.rs", "rank": 45, "score": 58632.38538318614 }, { "content": "#[doc = \"Reader of register MPU_TYPE\"]\n\npub type R = crate::R<u32, super::MPU_TYPE>;\n\n#[doc = \"Reader of field `SEPARATE`\"]\n\npub type SEPARATE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `DREGION`\"]\n\npub type DREGION_R = crate::R<u8, u8>;\n\n#[doc = \"Reader of field `IREGION`\"]\n\npub type IREGION_R = crate::R<u8, u8>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Support for unified or separate instruction and date memory maps\"]\n\n #[inline(always)]\n\n pub fn separate(&self) -> SEPARATE_R {\n\n SEPARATE_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bits 8:15 - Number of supported MPU data regions\"]\n\n #[inline(always)]\n\n pub fn dregion(&self) -> DREGION_R {\n\n DREGION_R::new(((self.bits >> 8) & 0xff) as u8)\n\n }\n\n #[doc = \"Bits 16:23 - Number of supported MPU instruction regions\"]\n\n #[inline(always)]\n\n pub fn iregion(&self) -> IREGION_R {\n\n IREGION_R::new(((self.bits >> 16) & 0xff) as u8)\n\n }\n\n}\n", "file_path": "src/ppb/mpu_type.rs", "rank": 46, "score": 58611.56473017686 }, { "content": " self.w\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bit 0 - Debug_sel\"]\n\n #[inline(always)]\n\n pub fn debug_sel(&mut self) -> DEBUG_SEL_W {\n\n DEBUG_SEL_W { w: self }\n\n }\n\n}\n", "file_path": "src/sdmmc/debug_sel.rs", "rank": 47, "score": 58607.318069381305 }, { "content": "#[doc = \"Writer for register PRSET1\"]\n\npub type W = crate::W<u32, super::PRSET1>;\n\n#[doc = \"Register PRSET1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::PRSET1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"CCU43 Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum CCU43RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<CCU43RS_AW> for bool {\n\n #[inline(always)]\n", "file_path": "src/scu_reset/prset1.rs", "rank": 48, "score": 58595.29978457242 }, { "content": " self.w\n\n }\n\n}\n\n#[doc = \"LEDTS Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum LEDTSCU0RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<LEDTSCU0RS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: LEDTSCU0RS_AW) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `LEDTSCU0RS`\"]\n\npub struct LEDTSCU0RS_W<'a> {\n\n w: &'a mut W,\n", "file_path": "src/scu_reset/prset1.rs", "rank": 49, "score": 58593.44362507647 }, { "content": " #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);\n\n self.w\n\n }\n\n}\n\n#[doc = \"USIC0 Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum USIC0RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<USIC0RS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: USIC0RS_AW) -> Self {\n\n variant as u8 != 0\n\n }\n", "file_path": "src/scu_reset/prset0.rs", "rank": 50, "score": 58588.84097495308 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);\n\n self.w\n\n }\n\n}\n\n#[doc = \"USIC2 Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum USIC2RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<USIC2RS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: USIC2RS_AW) -> Self {\n\n variant as u8 != 0\n", "file_path": "src/scu_reset/prset1.rs", "rank": 51, "score": 58588.84097495308 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);\n\n self.w\n\n }\n\n}\n\n#[doc = \"CCU81 Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum CCU81RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<CCU81RS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: CCU81RS_AW) -> Self {\n\n variant as u8 != 0\n", "file_path": "src/scu_reset/prset0.rs", "rank": 52, "score": 58588.84097495308 }, { "content": " #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);\n\n self.w\n\n }\n\n}\n\n#[doc = \"USIC0 Reset Clear\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum USIC0RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: De-assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<USIC0RS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: USIC0RS_AW) -> Self {\n\n variant as u8 != 0\n\n }\n", "file_path": "src/scu_reset/prclr0.rs", "rank": 53, "score": 58588.30807198768 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);\n\n self.w\n\n }\n\n}\n\n#[doc = \"CCU81 Reset Clear\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum CCU81RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: De-assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<CCU81RS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: CCU81RS_AW) -> Self {\n\n variant as u8 != 0\n", "file_path": "src/scu_reset/prclr0.rs", "rank": 54, "score": 58588.30807198768 }, { "content": "#[doc = \"Writer for register RSTSET\"]\n\npub type W = crate::W<u32, super::RSTSET>;\n\n#[doc = \"Register RSTSET `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::RSTSET {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Set Hibernate Wake-up Reset Status\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum HIBWK_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Assert reset status bit\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<HIBWK_AW> for bool {\n\n #[inline(always)]\n", "file_path": "src/scu_reset/rstset.rs", "rank": 55, "score": 58586.152959470215 }, { "content": " self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"DMA1 Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum DMA1RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<DMA1RS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: DMA1RS_AW) -> Self {\n", "file_path": "src/scu_reset/prset2.rs", "rank": 56, "score": 58584.20646339818 }, { "content": " self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"DAC Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum DACRS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<DACRS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: DACRS_AW) -> Self {\n", "file_path": "src/scu_reset/prset1.rs", "rank": 57, "score": 58584.20646339818 }, { "content": " self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"CCU41 Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum CCU41RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<CCU41RS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: CCU41RS_AW) -> Self {\n", "file_path": "src/scu_reset/prset0.rs", "rank": 58, "score": 58584.20646339818 }, { "content": " self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"DMA1 Reset Clear\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum DMA1RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: De-assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<DMA1RS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: DMA1RS_AW) -> Self {\n", "file_path": "src/scu_reset/prclr2.rs", "rank": 59, "score": 58583.70895907842 }, { "content": " self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"CCU41 Reset Clear\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum CCU41RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: De-assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<CCU41RS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: CCU41RS_AW) -> Self {\n", "file_path": "src/scu_reset/prclr0.rs", "rank": 60, "score": 58583.70895907842 }, { "content": "#[doc = \"Writer for register PRSET0\"]\n\npub type W = crate::W<u32, super::PRSET0>;\n\n#[doc = \"Register PRSET0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::PRSET0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"VADC Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum VADCRS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<VADCRS_AW> for bool {\n\n #[inline(always)]\n", "file_path": "src/scu_reset/prset0.rs", "rank": 61, "score": 58583.70179008662 }, { "content": "#[doc = \"Writer for register PRSET2\"]\n\npub type W = crate::W<u32, super::PRSET2>;\n\n#[doc = \"Register PRSET2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::PRSET2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"WDT Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum WDTRS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<WDTRS_AW> for bool {\n\n #[inline(always)]\n", "file_path": "src/scu_reset/prset2.rs", "rank": 62, "score": 58583.70179008662 }, { "content": "#[doc = \"Writer for register PRSET3\"]\n\npub type W = crate::W<u32, super::PRSET3>;\n\n#[doc = \"Register PRSET3 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::PRSET3 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"EBU Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum EBURS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<EBURS_AW> for bool {\n\n #[inline(always)]\n", "file_path": "src/scu_reset/prset3.rs", "rank": 63, "score": 58583.70179008662 }, { "content": "#[doc = \"Writer for register PRCLR0\"]\n\npub type W = crate::W<u32, super::PRCLR0>;\n\n#[doc = \"Register PRCLR0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::PRCLR0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"VADC Reset Clear\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum VADCRS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: De-assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<VADCRS_AW> for bool {\n\n #[inline(always)]\n", "file_path": "src/scu_reset/prclr0.rs", "rank": 64, "score": 58583.26309135901 }, { "content": "#[doc = \"Writer for register PRCLR3\"]\n\npub type W = crate::W<u32, super::PRCLR3>;\n\n#[doc = \"Register PRCLR3 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::PRCLR3 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"EBU Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum EBURS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: De-assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<EBURS_AW> for bool {\n\n #[inline(always)]\n", "file_path": "src/scu_reset/prclr3.rs", "rank": 65, "score": 58583.26309135901 }, { "content": "#[doc = \"Writer for register PRCLR2\"]\n\npub type W = crate::W<u32, super::PRCLR2>;\n\n#[doc = \"Register PRCLR2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::PRCLR2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"WDT Reset Clear\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum WDTRS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: De-assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<WDTRS_AW> for bool {\n\n #[inline(always)]\n", "file_path": "src/scu_reset/prclr2.rs", "rank": 66, "score": 58583.26309135901 }, { "content": " self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Enable Lockup Reset\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum LCKEN_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Disable reset when Lockup gets asserted\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<LCKEN_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: LCKEN_AW) -> Self {\n", "file_path": "src/scu_reset/rstclr.rs", "rank": 67, "score": 58582.731753087995 }, { "content": "#[doc = \"Writer for register RSTCLR\"]\n\npub type W = crate::W<u32, super::RSTCLR>;\n\n#[doc = \"Register RSTCLR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::RSTCLR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Clear Reset Status\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum RSCLR_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Clears field RSTSTAT.RSTSTAT\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<RSCLR_AW> for bool {\n\n #[inline(always)]\n", "file_path": "src/scu_reset/rstclr.rs", "rank": 68, "score": 58582.59607902118 }, { "content": " self.w\n\n }\n\n}\n\n#[doc = \"Clear Hibernate Wake-up Reset Status\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum HIBWK_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: De-assert reset status bit\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<HIBWK_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: HIBWK_AW) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `HIBWK`\"]\n\npub struct HIBWK_W<'a> {\n\n w: &'a mut W,\n", "file_path": "src/scu_reset/rstclr.rs", "rank": 69, "score": 58581.33013235952 }, { "content": "}\n\n#[doc = \"PORTS Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum PPORTSRS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<PPORTSRS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: PPORTSRS_AW) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `PPORTSRS`\"]\n\npub struct PPORTSRS_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> PPORTSRS_W<'a> {\n", "file_path": "src/scu_reset/prset1.rs", "rank": 70, "score": 58580.01507022185 }, { "content": "}\n\n#[doc = \"POSIF0 Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum POSIF0RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<POSIF0RS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: POSIF0RS_AW) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `POSIF0RS`\"]\n\npub struct POSIF0RS_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> POSIF0RS_W<'a> {\n", "file_path": "src/scu_reset/prset0.rs", "rank": 71, "score": 58580.01507022185 }, { "content": "#[doc = \"ERU1 Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum ERU1RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<ERU1RS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: ERU1RS_AW) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ERU1RS`\"]\n\npub struct ERU1RS_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ERU1RS_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n", "file_path": "src/scu_reset/prset0.rs", "rank": 72, "score": 58579.994267383154 }, { "content": "#[doc = \"ERU1 Reset Clear\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum ERU1RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: De-assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<ERU1RS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: ERU1RS_AW) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ERU1RS`\"]\n\npub struct ERU1RS_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ERU1RS_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n", "file_path": "src/scu_reset/prclr0.rs", "rank": 73, "score": 58579.4989355543 }, { "content": "}\n\n#[doc = \"POSIF0 Reset Clear\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum POSIF0RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: De-assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<POSIF0RS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: POSIF0RS_AW) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `POSIF0RS`\"]\n\npub struct POSIF0RS_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> POSIF0RS_W<'a> {\n", "file_path": "src/scu_reset/prclr0.rs", "rank": 74, "score": 58579.49560135521 }, { "content": " self.w\n\n }\n\n}\n\n#[doc = \"DSD Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum DSDRS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<DSDRS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: DSDRS_AW) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `DSDRS`\"]\n\npub struct DSDRS_W<'a> {\n\n w: &'a mut W,\n", "file_path": "src/scu_reset/prset0.rs", "rank": 75, "score": 58579.48823907289 }, { "content": " self.w\n\n }\n\n}\n\n#[doc = \"ETH0 Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum ETH0RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<ETH0RS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: ETH0RS_AW) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ETH0RS`\"]\n\npub struct ETH0RS_W<'a> {\n\n w: &'a mut W,\n", "file_path": "src/scu_reset/prset2.rs", "rank": 76, "score": 58579.48823907289 }, { "content": " }\n\n}\n\n#[doc = \"FCE Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum FCERS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<FCERS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: FCERS_AW) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `FCERS`\"]\n\npub struct FCERS_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/scu_reset/prset2.rs", "rank": 77, "score": 58579.48823907289 }, { "content": " }\n\n}\n\n#[doc = \"CCU42 Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum CCU42RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<CCU42RS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: CCU42RS_AW) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `CCU42RS`\"]\n\npub struct CCU42RS_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/scu_reset/prset0.rs", "rank": 78, "score": 58579.48823907289 }, { "content": " self.w\n\n }\n\n}\n\n#[doc = \"Set Hibernate Reset\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum HIBRS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<HIBRS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: HIBRS_AW) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `HIBRS`\"]\n\npub struct HIBRS_W<'a> {\n\n w: &'a mut W,\n", "file_path": "src/scu_reset/rstset.rs", "rank": 79, "score": 58579.48823907289 }, { "content": " self.w\n\n }\n\n}\n\n#[doc = \"ETH0 Reset Clear\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum ETH0RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: De-assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<ETH0RS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: ETH0RS_AW) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ETH0RS`\"]\n\npub struct ETH0RS_W<'a> {\n\n w: &'a mut W,\n", "file_path": "src/scu_reset/prclr2.rs", "rank": 80, "score": 58578.93699814819 }, { "content": " self.w\n\n }\n\n}\n\n#[doc = \"DSD Reset Clear\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum DSDRS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: De-assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<DSDRS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: DSDRS_AW) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `DSDRS`\"]\n\npub struct DSDRS_W<'a> {\n\n w: &'a mut W,\n", "file_path": "src/scu_reset/prclr0.rs", "rank": 81, "score": 58578.93699814819 }, { "content": " }\n\n}\n\n#[doc = \"MMC Interface Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum MMCIRS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<MMCIRS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: MMCIRS_AW) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `MMCIRS`\"]\n\npub struct MMCIRS_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/scu_reset/prset1.rs", "rank": 82, "score": 58578.93699814819 }, { "content": " }\n\n}\n\n#[doc = \"FCE Reset Clear\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum FCERS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: De-assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<FCERS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: FCERS_AW) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `FCERS`\"]\n\npub struct FCERS_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/scu_reset/prclr2.rs", "rank": 83, "score": 58578.93699814819 }, { "content": " }\n\n}\n\n#[doc = \"CCU42 Reset Clear\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum CCU42RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: De-assert reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<CCU42RS_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: CCU42RS_AW) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `CCU42RS`\"]\n\npub struct CCU42RS_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/scu_reset/prclr0.rs", "rank": 84, "score": 58578.93699814819 }, { "content": " #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u8) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Software Reset for CMD Line\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum SW_RST_CMD_LINE_A {\n\n #[doc = \"0: Work\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Reset\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<SW_RST_CMD_LINE_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: SW_RST_CMD_LINE_A) -> Self {\n\n variant as u8 != 0\n\n }\n", "file_path": "src/sdmmc/sw_reset.rs", "rank": 85, "score": 58577.705688538466 }, { "content": "#[doc = \"Reader of register PRSTAT1\"]\n\npub type R = crate::R<u32, super::PRSTAT1>;\n\n#[doc = \"CCU43 Reset Status\\n\\nValue on reset: 1\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum CCU43RS_A {\n\n #[doc = \"0: Reset de-asserted\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Reset asserted\"]\n\n VALUE2 = 1,\n\n}\n\nimpl From<CCU43RS_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: CCU43RS_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `CCU43RS`\"]\n\npub type CCU43RS_R = crate::R<bool, CCU43RS_A>;\n\nimpl CCU43RS_R {\n\n #[doc = r\"Get enumerated values variant\"]\n", "file_path": "src/scu_reset/prstat1.rs", "rank": 86, "score": 58575.150007856384 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);\n\n self.w\n\n }\n\n}\n\n#[doc = \"POSIF1 Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum POSIF1RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Assert reset\"]\n", "file_path": "src/scu_reset/prset0.rs", "rank": 87, "score": 58571.834661907975 }, { "content": " pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);\n\n self.w\n\n }\n\n}\n\n#[doc = \"USB Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum USBRS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n", "file_path": "src/scu_reset/prset2.rs", "rank": 88, "score": 58571.76836331538 }, { "content": " pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);\n\n self.w\n\n }\n\n}\n\n#[doc = \"USB Reset Clear\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum USBRS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n", "file_path": "src/scu_reset/prclr2.rs", "rank": 89, "score": 58571.76836331538 }, { "content": " pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);\n\n self.w\n\n }\n\n}\n\n#[doc = \"USIC1 Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum USIC1RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n", "file_path": "src/scu_reset/prset1.rs", "rank": 90, "score": 58571.76836331538 }, { "content": " pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"CCU80 Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum CCU80RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n", "file_path": "src/scu_reset/prset0.rs", "rank": 91, "score": 58571.76836331538 }, { "content": " pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"CCU80 Reset Clear\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum CCU80RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n", "file_path": "src/scu_reset/prclr0.rs", "rank": 92, "score": 58571.76836331538 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);\n\n self.w\n\n }\n\n}\n\n#[doc = \"POSIF1 Reset Clear\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum POSIF1RS_AW {\n\n #[doc = \"0: No effect\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: De-assert reset\"]\n", "file_path": "src/scu_reset/prclr0.rs", "rank": 93, "score": 58571.44919648676 }, { "content": "#[doc = \"Reader of register SW_RESET\"]\n\npub type R = crate::R<u8, super::SW_RESET>;\n\n#[doc = \"Writer for register SW_RESET\"]\n\npub type W = crate::W<u8, super::SW_RESET>;\n\n#[doc = \"Register SW_RESET `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SW_RESET {\n\n type Type = u8;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Software Reset for DAT Line\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum SW_RST_DAT_LINE_A {\n\n #[doc = \"0: Work\"]\n\n VALUE1 = 0,\n\n #[doc = \"1: Reset\"]\n\n VALUE2 = 1,\n\n}\n", "file_path": "src/sdmmc/sw_reset.rs", "rank": 94, "score": 58571.23200050336 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);\n\n self.w\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bit 0 - CCU43 Reset Assert\"]\n\n #[inline(always)]\n\n pub fn ccu43rs(&mut self) -> CCU43RS_W {\n\n CCU43RS_W { w: self }\n\n }\n", "file_path": "src/scu_reset/prset1.rs", "rank": 95, "score": 58569.57573553532 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"CCU40 Reset Clear\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum CCU40RS_AW {\n\n #[doc = \"0: No effect\"]\n", "file_path": "src/scu_reset/prclr0.rs", "rank": 96, "score": 58566.815605858406 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"CCU40 Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum CCU40RS_AW {\n\n #[doc = \"0: No effect\"]\n", "file_path": "src/scu_reset/prset0.rs", "rank": 97, "score": 58566.815605858406 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"DMA0 Reset Clear\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum DMA0RS_AW {\n\n #[doc = \"0: No effect\"]\n", "file_path": "src/scu_reset/prclr2.rs", "rank": 98, "score": 58566.815605858406 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"DMA0 Reset Assert\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum DMA0RS_AW {\n\n #[doc = \"0: No effect\"]\n", "file_path": "src/scu_reset/prset2.rs", "rank": 99, "score": 58566.815605858406 } ]
Rust
src/main.rs
0x4c37373230/BDumper
fac8b161d544bccdc26f9f3607ebcafcf3c53f39
#![windows_subsystem = "windows"] extern crate native_windows_derive as nwd; extern crate native_windows_gui as nwg; use { bedrock_dumper::*, nwd::NwgUi, nwg::{CheckBoxState, NativeUi}, }; #[derive(Default, NwgUi)] pub struct BedrockDumper { #[nwg_control(size: (590, 225), position: (300, 300), title: "BDumper", flags: "WINDOW|VISIBLE")] #[nwg_events( OnWindowClose: [BedrockDumper::exit_program] )] window: nwg::Window, #[nwg_control(text: "BDumper is a .pdb file dumper made in Rust by Luke7720 designed to extract \ function prototypes and RVAs (Relative Virtual Addresses) and export them into either text \ or C++ header files. It can also find specific functions within the pdb\n\ -----------------------------------------------------------------------\ ----------------------------------------------------------------------- ", size: (580, 70), position: (10, 10))] label: nwg::Label, #[nwg_control(text: "Input your .pdb file path here", size: (280, 25), position: (10, 80))] label2: nwg::Label, #[nwg_control(text: "", size: (280, 25), position: (10, 100))] pdb_path: nwg::TextInput, #[nwg_control(text: "Input your file type (.hpp or .txt) here", size: (280, 25), position: (300, 80))] label3: nwg::Label, #[nwg_control(text: "", size: (280, 25), position: (300, 100))] file_type: nwg::TextInput, #[nwg_control(text: "Input a function's name here", size: (280, 25), position: (10, 130))] label4: nwg::Label, #[nwg_control(text: "", size: (280, 25), position: (10, 150))] func_name: nwg::TextInput, #[nwg_control(text: "Include demangled function prototypes", size: (280, 25), position: (300, 150))] should_demangle: nwg::CheckBox, #[nwg_control(text: "Dump Data", size: (185, 30), position: (10, 180))] #[nwg_events( OnButtonClick: [BedrockDumper::dump] )] dump: nwg::Button, #[nwg_control(text: "Find Function", size: (185, 30), position: (200, 180))] #[nwg_events( OnButtonClick: [BedrockDumper::find] )] find: nwg::Button, #[nwg_control(text: "Filtered Dump", size: (185, 30), position: (390, 180))] #[nwg_events( OnButtonClick: [BedrockDumper::filtered_dump] )] find_filtered: nwg::Button, } impl BedrockDumper { fn dump(&self) { let pdb_path: &str = &self.pdb_path.text(); let file_type: &str = &self.file_type.text(); let demangle = if &self.should_demangle.check_state() == &CheckBoxState::Checked { true } else { false }; match setup::dump_init(pdb_path, file_type) { Ok(dump_file) => pdb::pdb_dump(pdb_path, file_type, dump_file, demangle) .expect("ERROR: Failed to dump pdb contents"), Err(str) => { nwg::simple_message("Error", &str); return; } } } fn find(&self) { match pdb::find_function(&self.pdb_path.text(), &self.func_name.text()) { Ok(bds_func) => nwg::simple_message( "Found a match", &format!( "Function name: {}\nSymbol: {}\nRVA: {}", bds_func.name, bds_func.symbol, bds_func.rva ), ), Err(str) => nwg::simple_message("Error", &str), }; } fn filtered_dump(&self) { let pdb_path: &str = &self.pdb_path.text(); let file_type: &str = &self.file_type.text(); match setup::dump_init(pdb_path, file_type) { Ok(dump_file) => match pdb::find_functions(pdb_path, file_type, dump_file) { Err(str) => { nwg::simple_message("Error", &str); } _ => {} }, Err(str) => { nwg::simple_message("Error", &str); } } } fn exit_program(&self) { nwg::stop_thread_dispatch(); } } fn main() { nwg::init().expect("Failed to init Native Windows GUI"); setup::filter_manager(); let _app = BedrockDumper::build_ui(Default::default()).expect("Failed to build UI"); nwg::dispatch_thread_events(); }
#![windows_subsystem = "windows"] extern crate native_windows_derive as nwd; extern crate native_windows_gui as nwg; use { bedrock_dumper::*, nwd::NwgUi, nwg::{CheckBoxState, NativeUi}, }; #[derive(Default, NwgUi)] pub struct BedrockDumper { #[nwg_control(size: (590, 225), position: (300, 300), title: "BDumper", flags: "WINDOW|VISIBLE")] #[nwg_events( OnWindowClose: [BedrockDumper::exit_program] )] window: nwg::Window, #[nwg_control(text: "BDumper is a .pdb file dumper made in Rust by Luke7720 designed to extract \ function prototypes and RVAs (Relative Virtual Addresses) and export them into either text \ or C++ header files. It can also find specific functions within the pdb\n\ -----------------------------------------------------------------------\ ----------------------------------------------------------------------- ", size: (580, 70), position: (10, 10))] label: nwg::Label, #[nwg_control(text: "Input your .pdb file path here", size: (280, 25), position: (10, 80))] label2: nwg::Label, #[nwg_control(text: "", size: (280, 25), position: (10, 100))] pdb_path: nwg::TextInput, #[nwg_control(text: "Input your file type (.hpp or .txt) here", size: (280, 25), position: (300, 80))] label3: nwg::Label, #[nwg_control(text: "", size: (280, 25), position: (300, 100))] file_type: nwg::TextInput, #[nwg_control(text: "Input a function's name here", size: (280, 25), position: (10, 130))] label4: nwg::Label, #[nwg_control(text: "", size: (280, 25), position: (10, 150))] func_name: nwg::TextInput, #[nwg_control(text: "Include demangled function prototypes", size: (280, 25), position: (300, 150))] should_demangle: nwg::CheckBox, #[nwg_control(text: "Dump Data", size: (185, 30), position: (10, 180))] #[nwg_events( OnButtonClick: [BedrockDumper::dump] )] dump: nwg::Button, #[nwg_control(text: "Find Function", size: (185, 30), position: (200, 180))] #[nwg_events( OnButtonClick: [BedrockDumper::find] )] find: nwg::Button, #[nwg_control(text: "Filtered Dump", size: (185, 30), position: (390, 180))] #[nwg_events( OnButtonClick: [BedrockDumper::filtered_dump] )] find_filtered: nwg::Button,
ath, file_type, dump_file) { Err(str) => { nwg::simple_message("Error", &str); } _ => {} }, Err(str) => { nwg::simple_message("Error", &str); } } } fn exit_program(&self) { nwg::stop_thread_dispatch(); } } fn main() { nwg::init().expect("Failed to init Native Windows GUI"); setup::filter_manager(); let _app = BedrockDumper::build_ui(Default::default()).expect("Failed to build UI"); nwg::dispatch_thread_events(); }
} impl BedrockDumper { fn dump(&self) { let pdb_path: &str = &self.pdb_path.text(); let file_type: &str = &self.file_type.text(); let demangle = if &self.should_demangle.check_state() == &CheckBoxState::Checked { true } else { false }; match setup::dump_init(pdb_path, file_type) { Ok(dump_file) => pdb::pdb_dump(pdb_path, file_type, dump_file, demangle) .expect("ERROR: Failed to dump pdb contents"), Err(str) => { nwg::simple_message("Error", &str); return; } } } fn find(&self) { match pdb::find_function(&self.pdb_path.text(), &self.func_name.text()) { Ok(bds_func) => nwg::simple_message( "Found a match", &format!( "Function name: {}\nSymbol: {}\nRVA: {}", bds_func.name, bds_func.symbol, bds_func.rva ), ), Err(str) => nwg::simple_message("Error", &str), }; } fn filtered_dump(&self) { let pdb_path: &str = &self.pdb_path.text(); let file_type: &str = &self.file_type.text(); match setup::dump_init(pdb_path, file_type) { Ok(dump_file) => match pdb::find_functions(pdb_p
random
[ { "content": "# BDumper\n\nA windows BDS .pdb dumper written in rust which dumps symbols, reconstructed function prototypes from demangled symbols and RVAs (Relative Virtual Addresses) into either a C++ header (.hpp) or a text file (.txt) and can also find data corresponding to specific functions. This project was inspired by [Player]'s BDS .pdb dumper. The newest version has a GUI and can search and find specific functions while the older versions are CLI programs. The variables in the headers are named after the symbol's MD5 hashes.\n\n\n\n## Dependencies\n\n\n\n### Current version\n\n\n\n- [md5 0.7.0](https://crates.io/crates/md5)\n\n- [msvc_demangler](https://crates.io/crates/msvc-demangler)\n\n- [native-windows-gui 1.0.12](https://crates.io/crates/native-windows-gui)\n\n- [native-windows-derive 1.0.3](https://crates.io/crates/native-windows-derive)\n\n- [pdb 0.7.0](https://crates.io/crates/pdb)\n\n\n\n## Usage\n\n\n\n### Current Version UI\n\n<p align=\"center\">\n\n <img src=\"https://media.discordapp.net/attachments/891760155614642277/929838892495040553/Screenshot_70.png\" />\n\n</p>\n\n\n\n### Full Dump\n\n\n\nInput the pdb file path and the file type and then click the 'Dump Data' button which will create a SymHook file in the project/executable directory. The latter can be either '.txt' or '.hpp'. The C++ header mode uses the symbol md5 hashes as variable names\n\n\n\n### Filtered Dump\n\n\n\nUpon startup for the first time, BDumper creates a file with the name 'dumpFilter.txt'. This file will hold the functions to be dumped in this mode. BDumper will ignore lines starting with '#' (which are considered comments) and newlines. An example:\n\n\n\n```\n\n# Dump Filter Example\n\n# This is a comment, BDumper ignores lines starting with '#' and empty lines\n\n\n\nOceanMonumentFeature::isFeatureChunk\n\nFossilFeature::place\n\nPistonBlock::canSurvive\n\nPistonBlockActor::_attachedBlockWalker\n\nBlockSource::getSeenPercent\n\n```\n\n\n\nClick on the 'Filtered Dump' button to dump the desired functions.\n\n\n\n### Function Search\n\n\n\nIf you need to quickly hook into a function or update an RVA, this function can find an individual function with the same method the filtered dump uses. Input the function name (`PistonBlockActor::_checkAttachedBlocks` as an example) and click on the 'FInd Function' button. A window will pop up with the symbol and RVA if the function exists\n", "file_path": "README.md", "rank": 1, "score": 9372.844501988415 }, { "content": "# License\n\n\n\nThe project is dual licensed under the terms of the Apache License, Version 2.0,\n\nand the MIT License. You may obtain copies of the two licenses at\n\n\n\n* https://www.apache.org/licenses/LICENSE-2.0 and\n\n* https://opensource.org/licenses/MIT, respectively.\n\n\n\nThe following two notices apply to every file of the project.\n\n\n\n## The Apache License\n\n\n\n```\n\nCopyright 2015–2019 The md5 Developers\n\n\n\nLicensed under the Apache License, Version 2.0 (the “License”); you may not use\n\nthis file except in compliance with the License. You may obtain a copy of the\n\nLicense at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software distributed\n\nunder the License is distributed on an “AS IS” BASIS, WITHOUT WARRANTIES OR\n\nCONDITIONS OF ANY KIND, either express or implied. See the License for the\n\nspecific language governing permissions and limitations under the License.\n\n```\n\n\n\n## The MIT License\n\n\n\n```\n\nCopyright 2015–2019 The md5 Developers\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\n\nthis software and associated documentation files (the “Software”), to deal in\n\nthe Software without restriction, including without limitation the rights to\n\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of\n\nthe Software, and to permit persons to whom the Software is furnished to do so,\n\nsubject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\n\nFOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\n\nCOPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\n\nIN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\n\nCONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", "file_path": "3rd party notices/LICENSE-md5.md", "rank": 2, "score": 8037.566429422628 }, { "content": "\n\n pub struct BDSFunction {\n\n pub name: String,\n\n pub symbol: String,\n\n pub rva: Rva,\n\n }\n\n\n\n impl BDSFunction {\n\n fn create_instance(name: String, symbol: String, rva: Rva) -> BDSFunction {\n\n return BDSFunction { name, symbol, rva };\n\n }\n\n }\n\n\n\n pub fn pdb_dump(pdb_path: &str, file_type: &str, mut dump_file: File, should_demangle: bool) -> pdb::Result<()> {\n\n let start = Instant::now();\n\n let file_path = File::open(&pdb_path)?;\n\n let mut pdb = pdb::PDB::open(file_path)?;\n\n let symbol_table = pdb.global_symbols()?;\n\n let address_map = pdb.address_map()?;\n\n let mut symbols = symbol_table.iter();\n", "file_path": "src/lib.rs", "rank": 5, "score": 16.54797078808332 }, { "content": "\n\n while let Some(symbol) = symbols.next()? {\n\n match symbol.parse() {\n\n Ok(pdb::SymbolData::Public(data)) if data.function => {\n\n let rva = data.offset.to_rva(&address_map).unwrap_or_default();\n\n\n\n if file_type == \".txt\" {\n\n write!(\n\n dump_file,\n\n \"{}\\n{}\\n{}\\n\\n\",\n\n data.name,\n\n demangle::cleanup_symbol(&data.name.to_string()),\n\n rva\n\n )\n\n .expect(\"ERROR: Could not write to file\");\n\n } else if file_type == \".hpp\" {\n\n if should_demangle {\n\n write!(\n\n dump_file,\n\n \"//{}\\n//{}\\nconstexpr unsigned int MD5_{:x} = {};\\n\\n\",\n", "file_path": "src/lib.rs", "rank": 7, "score": 15.801130484464359 }, { "content": " .write(true)\n\n .append(true)\n\n .open(\"./SymHook.hpp\")\n\n .unwrap())\n\n }\n\n _ => Err(\"Invalid File Type\"),\n\n }\n\n }\n\n}\n\n\n\npub mod setup {\n\n use {crate::files, std::fs::File, std::io::Write};\n\n\n\n pub fn filter_manager() -> bool {\n\n if !files::path_exists(\"./dumpFilter.txt\") {\n\n File::create(\"dumpFilter.txt\").unwrap();\n\n\n\n return false;\n\n }\n\n true\n", "file_path": "src/lib.rs", "rank": 8, "score": 14.299550879599146 }, { "content": " }\n\n\n\n pub fn dump_init(pdb_path: &str, file_type: &str) -> Result<File, String> {\n\n if files::path_exists(pdb_path) == false {\n\n return Err(String::from(&format!(\"File does not exist: {}\", pdb_path)));\n\n }\n\n\n\n let mut dump_file = match files::create_file(&file_type) {\n\n Ok(file) => file,\n\n Err(str) => {\n\n return Err(String::from(&format!(\"{}: {}\", str, file_type)));\n\n }\n\n };\n\n\n\n write!(\n\n dump_file,\n\n \"/*###############################################################\\\n\n \\nBDS function symbols and RVAs\\\n\n \\nFile generated by BDumper, a rust bds pdb dumper made by Luke7720\\\n\n \\n###############################################################*/\\n\"\n", "file_path": "src/lib.rs", "rank": 9, "score": 14.098821732180529 }, { "content": " \"Function was either not found or does not exist\",\n\n ))\n\n }\n\n\n\n pub fn find_functions(pdb_path: &str, file_type: &str, mut dump_file: File, ) -> Result<(), String> {\n\n let file = File::open(\"./dumpFilter.txt\").unwrap();\n\n let functions = BufReader::new(file);\n\n\n\n for line in functions.lines() {\n\n let line_ref: &str = &line.unwrap();\n\n\n\n if line_ref.starts_with(\"#\") || line_ref.is_empty() {\n\n continue;\n\n }\n\n\n\n match find_function(pdb_path, line_ref) {\n\n Ok(bds_func) => {\n\n if file_type == \".txt\" {\n\n write!(\n\n dump_file,\n", "file_path": "src/lib.rs", "rank": 11, "score": 13.578267008344564 }, { "content": "extern crate native_windows_gui as nwg;\n\n\n\npub mod files {\n\n pub fn path_exists(path: &str) -> bool {\n\n std::fs::metadata(path).is_ok()\n\n }\n\n\n\n pub fn create_file(file_type: &str) -> Result<std::fs::File, &str> {\n\n match file_type {\n\n \".txt\" => {\n\n std::fs::File::create(\"./SymHook.txt\").expect(\"ERROR: Could not create file\");\n\n Ok(std::fs::OpenOptions::new()\n\n .write(true)\n\n .append(true)\n\n .open(\"./SymHook.txt\")\n\n .unwrap())\n\n }\n\n \".hpp\" => {\n\n std::fs::File::create(\"SymHook.hpp\").expect(\"ERROR: Could not create file\");\n\n Ok(std::fs::OpenOptions::new()\n", "file_path": "src/lib.rs", "rank": 12, "score": 12.7345967041359 }, { "content": " )\n\n .expect(\"ERROR: Could not write to file\");\n\n\n\n if file_type == \".hpp\" {\n\n write!(dump_file, \"#pragma once\\n\").expect(\"ERROR: Could not write to file\");\n\n }\n\n\n\n Ok(dump_file)\n\n }\n\n}\n\n\n\npub mod demangle {\n\n pub fn cleanup_symbol(symbol: &str) -> String {\n\n let flags = msvc_demangler::DemangleFlags::llvm();\n\n\n\n return match msvc_demangler::demangle(symbol, flags) {\n\n Ok(res) => {\n\n let demangled_name = res.replace(\"const\", \" const\").replace(\"(\", \"( \");\n\n\n\n let mut declaration: Vec<&str> = demangled_name.split(\" \").collect();\n", "file_path": "src/lib.rs", "rank": 13, "score": 11.945363649339455 }, { "content": " _ => {}\n\n }\n\n }\n\n nwg::simple_message(\n\n \"Completed\",\n\n &format!(\"Completed dumping {} in {:?}\", pdb_path, start.elapsed()),\n\n );\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn find_function(pdb_path: &str, function_name: &str) -> Result<BDSFunction, String> {\n\n let file_path = File::open(&pdb_path).unwrap();\n\n let mut pdb = pdb::PDB::open(file_path).unwrap();\n\n let symbol_table = pdb.global_symbols().unwrap();\n\n let address_map = pdb.address_map().unwrap();\n\n let mut symbols = symbol_table.iter();\n\n\n\n while let Some(symbol) = symbols.next().unwrap() {\n\n match symbol.parse() {\n", "file_path": "src/lib.rs", "rank": 14, "score": 11.550556547487382 }, { "content": " Ok(pdb::SymbolData::Public(data)) if data.function => {\n\n let symbol = data.name.to_string().to_string();\n\n let rva = data.offset.to_rva(&address_map).unwrap_or_default();\n\n let function_sym: Vec<&str> = function_name.split(\"::\").collect();\n\n let substr = format!(\"{}@{}\", function_sym[1], function_sym[0]);\n\n\n\n if symbol.contains(&substr) {\n\n let found_function = BDSFunction::create_instance(\n\n demangle::cleanup_symbol(&symbol),\n\n symbol,\n\n rva,\n\n );\n\n return Ok(found_function);\n\n }\n\n }\n\n _ => {}\n\n }\n\n }\n\n\n\n Err(String::from(\n", "file_path": "src/lib.rs", "rank": 15, "score": 10.611460787219345 }, { "content": " .replace(\" \", \" \")\n\n .replace(\" \", \" \")\n\n .replace(\"< \", \"<\")\n\n .replace(\" >\", \">\")\n\n .replace(\" &\", \"&\")\n\n .replace(\" *\", \"*\")\n\n .replace(\"( \", \"(\")\n\n }\n\n Err(_) => \"Unable to demangle symbol\".to_string(),\n\n };\n\n }\n\n}\n\n\n\npub mod pdb {\n\n use {\n\n crate::demangle,\n\n pdb::{FallibleIterator, Rva},\n\n std::io::{BufRead, BufReader},\n\n std::{fs::File, io::Write, time::Instant},\n\n };\n", "file_path": "src/lib.rs", "rank": 16, "score": 9.683844905125339 }, { "content": " data.name,\n\n demangle::cleanup_symbol(&data.name.to_string()),\n\n md5::compute(data.name.to_string().to_string()),\n\n rva\n\n )\n\n .expect(\"ERROR: Could not write to file\");\n\n } else {\n\n write!(\n\n dump_file,\n\n \"//{}\\nconstexpr unsigned int MD5_{:x} = {};\\n\\n\",\n\n data.name,\n\n md5::compute(data.name.to_string().to_string()),\n\n rva\n\n )\n\n .expect(\"ERROR: Could not write to file\");\n\n }\n\n } else {\n\n break;\n\n }\n\n }\n", "file_path": "src/lib.rs", "rank": 17, "score": 9.469601901993533 }, { "content": " \"{}\\n{}\\n{}\\n\\n\",\n\n &bds_func.symbol,\n\n demangle::cleanup_symbol(&bds_func.symbol),\n\n bds_func.rva\n\n )\n\n .expect(\"ERROR: Could not write to file\");\n\n } else if file_type == \".hpp\" {\n\n write!(\n\n dump_file,\n\n \"//{}\\n//{}\\nconstexpr unsigned int MD5_{:x} = {};\\n\\n\",\n\n &bds_func.symbol,\n\n demangle::cleanup_symbol(&bds_func.symbol),\n\n md5::compute(&bds_func.symbol),\n\n bds_func.rva\n\n )\n\n .expect(\"ERROR: Could not write to file\");\n\n }\n\n }\n\n Err(str) => {\n\n return Err(str);\n", "file_path": "src/lib.rs", "rank": 18, "score": 7.136873559739362 }, { "content": " }\n\n }\n\n }\n\n\n\n nwg::simple_message(\n\n \"Completed\",\n\n &format!(\"Completed filtered dumping of {}\", pdb_path),\n\n );\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 19, "score": 6.490234440518801 } ]
Rust
kosem-gui/src/work_on_procedure.rs
idanarye/kosem
c72198c1f78855cfd49d1d0fa1bc848ac0c8f9cc
use actix::prelude::*; use gtk::prelude::*; use kosem_webapi::Uuid; use kosem_webapi::pairing_messages; use kosem_webapi::phase_control_messages; use crate::internal_messages::gui_control::{ ProcedureScreenAttach, UserClickedButton, ShowJoinMenu, }; #[derive(woab::Factories)] pub struct WorkOnProcedureFactories { pub app_work_on_procedure_window: woab::BuilderFactory, row_phase: woab::BuilderFactory, cld_caption: woab::BuilderFactory, cld_button: woab::BuilderFactory, } #[derive(typed_builder::TypedBuilder)] pub struct WorkOnProcedureActor { factories: crate::Factories, widgets: WorkOnProcedureWidgets, join_menu: Addr<crate::join_menu::JoinMenuActor>, gui_client: Addr<crate::client::GuiClientActor>, server_idx: usize, procedure: pairing_messages::AvailableProcedure, #[builder(default, setter(skip))] phases: std::collections::HashMap<Uuid, PhaseRow>, } impl Actor for WorkOnProcedureActor { type Context = Context<Self>; fn started(&mut self, ctx: &mut Self::Context) { self.widgets.lbl_title.set_text(&self.procedure.name); self.widgets.app_work_on_procedure_window.show_all(); self.gui_client.do_send(ProcedureScreenAttach { server_idx: self.server_idx, request_uid: self.procedure.uid, addr: ctx.address(), }); } } #[derive(woab::WidgetsFromBuilder)] pub struct WorkOnProcedureWidgets { app_work_on_procedure_window: gtk::ApplicationWindow, lst_phases: gtk::ListBox, lbl_title: gtk::Label, } impl actix::Handler<woab::Signal> for WorkOnProcedureActor { type Result = woab::SignalResult; fn handle(&mut self, msg: woab::Signal, _ctx: &mut Self::Context) -> Self::Result { Ok(match msg.name() { "close" => { self.join_menu.do_send(ShowJoinMenu); None } _ => msg.cant_handle()?, }) } } impl Handler<phase_control_messages::PhasePushed> for WorkOnProcedureActor { type Result = (); fn handle(&mut self, msg: phase_control_messages::PhasePushed, ctx: &mut Self::Context) -> Self::Result { let phase_widgets: PhaseWidgets = self.factories.work_on_procedure.row_phase.instantiate().widgets().unwrap(); self.widgets.lst_phases.add(&phase_widgets.row_phase); for (i, component) in msg.components.iter().enumerate() { match &component.params { phase_control_messages::ComponentParams::Caption { text } => { let widgets: ComponentCaptionWidgets = self.factories.work_on_procedure.cld_caption.instantiate().widgets().unwrap(); widgets.lbl_caption.set_text(&text); phase_widgets.box_components.add(&widgets.cld_caption); } phase_control_messages::ComponentParams::Button { text } => { let widgets: ComponentButtonWidgets = self.factories.work_on_procedure.cld_button.instantiate() .connect_to(((msg.phase_uid, i), ctx.address())) .widgets().unwrap(); widgets.btn_button.set_label(&text); phase_widgets.box_components.add(&widgets.cld_button); } } } self.phases.insert(msg.phase_uid, PhaseRow { widgets: phase_widgets, msg, }); } } impl Handler<phase_control_messages::PhasePopped> for WorkOnProcedureActor { type Result = (); fn handle(&mut self, msg: phase_control_messages::PhasePopped, _ctx: &mut Self::Context) -> Self::Result { let phase_row = if let Some(p) = self.phases.get(&msg.phase_uid) { p } else { log::warn!("Unknown phase {}", msg.phase_uid); return; }; self.widgets.lst_phases.remove(&phase_row.widgets.row_phase); } } impl Handler<pairing_messages::ProcedureFinished> for WorkOnProcedureActor { type Result = (); fn handle(&mut self, _msg: pairing_messages::ProcedureFinished, _ctx: &mut Self::Context) -> Self::Result { self.widgets.app_work_on_procedure_window.close(); } } struct PhaseRow { widgets: PhaseWidgets, msg: phase_control_messages::PhasePushed, } #[derive(woab::WidgetsFromBuilder)] struct PhaseWidgets { row_phase: gtk::ListBoxRow, box_components: gtk::FlowBox, } #[derive(woab::WidgetsFromBuilder)] struct ComponentCaptionWidgets { cld_caption: gtk::FlowBoxChild, lbl_caption: gtk::Label, } #[derive(woab::WidgetsFromBuilder)] struct ComponentButtonWidgets { cld_button: gtk::FlowBoxChild, btn_button: gtk::Button, } impl actix::Handler<woab::Signal<(Uuid, usize)>> for WorkOnProcedureActor { type Result = woab::SignalResult; fn handle(&mut self, msg: woab::Signal<(Uuid, usize)>, _ctx: &mut Self::Context) -> Self::Result { let (phase_uid, component_ordinal) = *msg.tag(); let phase_row = if let Some(p) = self.phases.get(&phase_uid) { p } else { log::warn!("Unknown phase {}", phase_uid); return Ok(None); }; let component = if let Some(c) = phase_row.msg.components.get(component_ordinal) { c } else { log::warn!("Phase {} only has {} components - cannot access component {}", phase_uid, phase_row.msg.components.len(), component_ordinal); return Ok(None); }; Ok(match msg.name() { "button_clicked" => { self.gui_client.do_send(UserClickedButton { server_idx: self.server_idx, request_uid: self.procedure.uid, phase_uid, button_name: component.name.clone(), }); None } _ => msg.cant_handle()?, }) } }
use actix::prelude::*; use gtk::prelude::*; use kosem_webapi::Uuid; use kosem_webapi::pairing_messages; use kosem_webapi::phase_control_messages; use crate::internal_messages::gui_control::{ ProcedureScreenAttach, UserClickedButton, ShowJoinMenu, }; #[derive(woab::Factories)] pub struct WorkOnProcedureFactories { pub app_work_on_procedure_window: woab::BuilderFactory, row_phase: woab::BuilderFactory, cld_caption: woab::BuilderFactory, cld_button: woab::BuilderFactory, } #[derive(typed_builder::TypedBuilder)] pub struct WorkOnProcedureActor { factories: crate::Factories, widgets: WorkOnProcedureWidgets, join_menu: Addr<crate::join_menu::JoinMenuActor>, gui_client: Addr<crate::client::GuiClientActor>, server_idx: usize, procedure: pairing_messages::AvailableProcedure, #[builder(default, setter(skip))] phases: std::collections::HashMap<Uuid, PhaseRow>, } impl Actor for WorkOnProcedureActor { type Context = Context<Self>; fn started(&mut self, ctx: &mut Self::Context) { self.widgets.lbl_title.set_text(&self.procedure.name); self.widgets.app_work_on_procedure_window.show_all(); self.gui_client.do_send(ProcedureScreenAttach { server_idx: self.server_idx, request_uid: self.procedure.uid, addr: ctx.address(), }); } } #[derive(woab::WidgetsFromBuilder)] pub struct WorkOnProcedureWidgets { app_work_on_procedure_window: gtk::ApplicationWindow, lst_phases: gtk::ListBox, lbl_title: gtk::Label, } impl actix::Handler<woab::Signal> for WorkOnProcedureActor { type Result = woab::SignalResult; fn handle(&mut self, msg: woab::Signal, _ctx: &mut Self::Context) -> Self::Result { Ok(match msg.name() { "close" => { self.join_menu.do_send(ShowJoinMenu); None } _ => msg.cant_handle()?, }) } } impl Handler<phase_control_messages::PhasePushed> for WorkOnProcedureActor { type Result = (); fn handle(&mut self, msg: phase_control_messages::PhasePushed, ctx: &mut Self::Context) -> Self::Result { let phase_widgets: PhaseWidgets = self.factories.work_on_procedure.row_phase.instantiate().widgets().unwrap(); self.widgets.lst_phases.add(&phase_widgets.row_phase); for (i, component) in msg.components.iter().enumerate() { match &component.params { phase_control_messages::ComponentParams::Caption { text } => { let widgets: ComponentCaptionWidgets = self.factories.work_on_procedure.cld_caption.instantiate().widgets().unwrap(); widgets.lbl_caption.set_text(&text); phase_widgets.box_components.add(&widgets.cld_caption); } phase_control_messages::ComponentParams::Button { text } => { let widgets: ComponentButtonWidgets = self.factories.work_on_procedure.cld_button.instantiate() .connect_to(((msg.phase_uid, i), ctx.address())) .widgets().unwrap(); widgets.btn_button.set_label(&text); phase_widgets.box_components.add(&widgets.cld_button); } } } self.phases.insert(msg.phase_uid, PhaseRow { widgets: phase_widgets, msg, }); } } impl Handler<phase_control_messages::PhasePopped> for WorkOnProcedureActor { type Result = (); fn handle(&mut self, msg: phase_control_messages::PhasePopped, _ctx: &mut Self::Context) -> Self::Result { let phase_row = if let Some(p) = self.phases.get(&msg.phase_uid) { p } else { log::warn!("Unknown phase {}", msg.phase_uid); return; }; self.widgets.lst_phases.remove(&phase_row.widgets.row_phase); } } impl Handler<pairing_messages::ProcedureFinished> for WorkOnProcedureActor { type Result = (); fn handle(&mut self, _msg: pairing_messages::ProcedureFinished, _ctx: &mut Self::Context) -> Self::Result { self.widgets.app_work_on_procedure_window.close(); } } struct PhaseRow { widgets: PhaseWidgets, msg: phase_control_messages::PhasePushed, } #[derive(woab::WidgetsFromBuilder)] struct PhaseWidgets { row_phase: gtk::ListBoxRow, box_components: gtk::FlowBox, } #[derive(woab::WidgetsFromBuilder)] struct ComponentCaptionWidgets { cld_caption: gtk::FlowBoxChild, lbl_caption: gtk::Label, } #[derive(woab::WidgetsFromBuilder)] struct ComponentButtonWidgets { cld_button: gtk::FlowBoxChild, btn_button: gtk::Button, } impl actix::Handler<woab::Signal<(Uuid, usize)>> for WorkOnProcedureActor { type Result = woab::SignalResult; fn handle(&mut self, msg: woab::Signal<(Uuid, usize)>, _ctx: &mut Self::Context) -> Self::Result { let (phase_uid, component_
}
ordinal) = *msg.tag(); let phase_row = if let Some(p) = self.phases.get(&phase_uid) { p } else { log::warn!("Unknown phase {}", phase_uid); return Ok(None); }; let component = if let Some(c) = phase_row.msg.components.get(component_ordinal) { c } else { log::warn!("Phase {} only has {} components - cannot access component {}", phase_uid, phase_row.msg.components.len(), component_ordinal); return Ok(None); }; Ok(match msg.name() { "button_clicked" => { self.gui_client.do_send(UserClickedButton { server_idx: self.server_idx, request_uid: self.procedure.uid, phase_uid, button_name: component.name.clone(), }); None } _ => msg.cant_handle()?, }) }
function_block-function_prefixed
[ { "content": "pub fn start_gtk(settings: client_config::ClientConfig) -> anyhow::Result<()> {\n\n let factories = Factories::new(FactoriesInner {\n\n join_menu: join_menu::JoinMenuFactories::read(&*Asset::get(\"join_menu.glade\").unwrap())?,\n\n work_on_procedure: work_on_procedure::WorkOnProcedureFactories::read(&*Asset::get(\"work_on_procedure.glade\").unwrap())?,\n\n });\n\n gtk::init()?;\n\n woab::run_actix_inside_gtk_event_loop()?;\n\n\n\n let css_provider = Asset::css_provider(\"default.css\");\n\n\n\n woab::block_on(async move {\n\n factories.join_menu.app_join_menu_window.instantiate().connect_with(|bld| {\n\n let widgets: crate::join_menu::JoinMenuWidgets = bld.widgets().unwrap();\n\n gtk::StyleContext::add_provider_for_screen(\n\n &widgets.app_join_menu_window.get_screen().unwrap(),\n\n &css_provider,\n\n gtk::STYLE_PROVIDER_PRIORITY_APPLICATION);\n\n\n\n join_menu::JoinMenuActor::create(|ctx| {\n\n let gui_client = crate::client::GuiClientActor::builder()\n", "file_path": "kosem-gui/src/lib.rs", "rank": 4, "score": 91673.81326571433 }, { "content": "fn main() -> anyhow::Result<()> {\n\n flexi_logger::Logger::with_env_or_str(\"warn\").start()?;\n\n\n\n let mut settings = config::Config::default();\n\n settings.merge(config::File::with_name(\"KosemClient.toml\"))?;\n\n\n\n let settings = settings.try_into::<client_config::ClientConfig>()?;\n\n log::warn!(\"{:?}\", settings);\n\n\n\n kosem_gui::start_gtk(settings)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "kosem-gui/src/main.rs", "rank": 5, "score": 82020.60862255486 }, { "content": " def phase(self, *components):\n\n is_message_relevant = [\n\n component.is_message_relevant\n\n for component in components\n\n if component.is_message_relevant is not None]\n\n if is_message_relevant:\n\n stream = self._con.stream_messages() \n\n else:\n\n stream = None\n\n uid = self._con.call('PushPhase', components=[c.to_json() for c in components])\n\n if stream:\n\n stream = (\n\n msg for msg in stream\n\n if msg['params'].get('phase_uid') == uid\n\n and any(pred(msg) for pred in is_message_relevant))\n", "file_path": "kosem-python/kosem/procedure.py", "rank": 6, "score": 76224.53712058325 }, { "content": " type Result = ();\n\n\n\n fn handle(&mut self, _msg: ConnectionClosed, ctx: &mut actix::Context<Self>) -> Self::Result {\n\n ctx.stop();\n\n }\n\n}\n\n\n\nimpl actix::Handler<RequestHuman> for ProcedureActor {\n\n type Result = <RequestHuman as actix::Message>::Result;\n\n\n\n fn handle(&mut self, msg: RequestHuman, ctx: &mut actix::Context<Self>) -> Self::Result {\n\n log::info!(\"RequestHuman from {}: {:?}\", self.name, msg);\n\n let uid = Uuid::new_v4();\n\n self.pending_requests_for_humans.insert(uid);\n\n PairingActor::from_registry().do_send(ProcedureRequestingHuman {\n\n uid: uid,\n\n orig_request: msg,\n\n addr: ctx.address(),\n\n });\n\n Ok(uid)\n", "file_path": "kosem-server/src/role_actors/procedure_actor.rs", "rank": 7, "score": 60509.95412334227 }, { "content": " }\n\n}\n\n\n\nimpl actix::Handler<PushPhase> for ProcedureActor {\n\n type Result = <PushPhase as actix::Message>::Result;\n\n\n\n fn handle(&mut self, msg: PushPhase, _ctx: &mut actix::Context<Self>) -> Self::Result {\n\n let phase_uid = Uuid::new_v4();\n\n log::info!(\"Phase pushed: {:?}. Generated UID {}\", msg, phase_uid);\n\n let phase = Phase::new(msg.components);\n\n log::info!(\"Phase looks like this: {:?}\", phase);\n\n self.phase_uids.push(phase_uid);\n\n for (&request_uid, human) in self.humans.iter() {\n\n log::info!(\"Informing {} of {}\", request_uid, phase_uid);\n\n human.do_send(PhasePushed {\n\n request_uid,\n\n phase_uid,\n\n parent_uid: None,\n\n components: phase.components.clone(),\n\n });\n", "file_path": "kosem-server/src/role_actors/procedure_actor.rs", "rank": 8, "score": 60508.480880057265 }, { "content": "}\n\n\n\nimpl actix::Handler<ButtonClicked> for ProcedureActor {\n\n type Result = <ButtonClicked as actix::Message>::Result;\n\n\n\n fn handle(&mut self, msg: ButtonClicked, _ctx: &mut actix::Context<Self>) -> Self::Result {\n\n self.con_actor.do_send(RpcMessage::new(\"ButtonClicked\", msg));\n\n }\n\n}\n", "file_path": "kosem-server/src/role_actors/procedure_actor.rs", "rank": 9, "score": 60506.69166358777 }, { "content": " }\n\n self.phases.insert(phase_uid, phase);\n\n Ok(phase_uid)\n\n }\n\n}\n\n\n\nimpl actix::Handler<PopPhase> for ProcedureActor {\n\n type Result = <PopPhase as actix::Message>::Result;\n\n\n\n fn handle(&mut self, msg: PopPhase, _ctx: &mut actix::Context<Self>) -> Self::Result {\n\n let _phase = self.phases.remove(&msg.phase_uid)\n\n .ok_or_else(|| KosemError::new(\"Phase does not exist\").with(\"phase_uid\", msg.phase_uid))?;\n\n for (&request_uid, human) in self.humans.iter() {\n\n human.do_send(PhasePopped {\n\n request_uid,\n\n phase_uid: msg.phase_uid,\n\n });\n\n }\n\n Ok(())\n\n }\n", "file_path": "kosem-server/src/role_actors/procedure_actor.rs", "rank": 10, "score": 60505.86180864853 }, { "content": "use crate::internal_messages::info_sharing;\n\n\n\n#[derive(typed_builder::TypedBuilder)]\n\npub struct ProcedureActor {\n\n con_actor: actix::Addr<WsJrpc>,\n\n pub uid: Uuid,\n\n name: String,\n\n #[builder(default)]\n\n pending_requests_for_humans: HashSet<Uuid>,\n\n #[builder(default)]\n\n humans: HashMap<Uuid, Addr<HumanActor>>, // NOTE: the key is the request UID, not the human UID\n\n #[builder(default)]\n\n phase_uids: Vec<Uuid>,\n\n #[builder(default)]\n\n phases: HashMap<Uuid, Phase>,\n\n}\n\n\n\nimpl actix::Actor for ProcedureActor {\n\n type Context = actix::Context<Self>;\n\n\n", "file_path": "kosem-server/src/role_actors/procedure_actor.rs", "rank": 11, "score": 60505.253370861086 }, { "content": " }\n\n}\n\n\n\nimpl actix::Handler<PairingPerformed> for ProcedureActor {\n\n type Result = <PairingPerformed as actix::Message>::Result;\n\n\n\n fn handle(&mut self, msg: PairingPerformed, ctx: &mut actix::Context<Self>) -> Self::Result {\n\n log::info!(\"Paired request {} to human {}\", msg.request_uid, msg.human_uid);\n\n self.pending_requests_for_humans.remove(&msg.request_uid);\n\n if self.pending_requests_for_humans.is_empty() {\n\n log::info!(\"Procedure {} got all the humans it needs!\", self.name);\n\n } else {\n\n log::info!(\"Procedure {} still needs {} more humans...\", self.name, self.pending_requests_for_humans.len());\n\n }\n\n\n\n self.humans.insert(msg.request_uid, msg.human_addr.clone());\n\n\n\n let PairingPerformed { human_uid, request_uid, .. } = msg;\n\n\n\n ctx.spawn(\n", "file_path": "kosem-server/src/role_actors/procedure_actor.rs", "rank": 12, "score": 60504.07053335409 }, { "content": " fn started(&mut self, _ctx: &mut Self::Context) {\n\n log::info!(\"Starting ProcedureActor {} - {}\", self.uid, self.name);\n\n }\n\n\n\n fn stopped(&mut self, _ctx: &mut Self::Context) {\n\n log::info!(\"Ending ProcedureActor {}\", self.uid);\n\n for pending_request in self.pending_requests_for_humans.iter() {\n\n PairingActor::from_registry().do_send(RemoveRequestForHuman {\n\n uid: *pending_request,\n\n });\n\n }\n\n for human in self.humans.values() {\n\n human.do_send(ProcedureTerminated {\n\n procedure_uid: self.uid,\n\n });\n\n }\n\n }\n\n}\n\n\n\nimpl actix::Handler<ConnectionClosed> for ProcedureActor {\n", "file_path": "kosem-server/src/role_actors/procedure_actor.rs", "rank": 13, "score": 60503.983746274724 }, { "content": " msg.human_addr.send(info_sharing::GetInfo::<info_sharing::HumanDetails>::default())\n\n .into_actor(self)\n\n .map(move |msg, this, _ctx| {\n\n let msg = msg.unwrap();\n\n this.con_actor.do_send(RpcMessage::new(\"HumanJoined\", kosem_webapi::pairing_messages::HumanJoined {\n\n human_uid,\n\n request_uid,\n\n human_name: msg.name,\n\n }));\n\n })\n\n );\n\n\n\n for (&phase_uid, phase) in self.phases.iter() {\n\n msg.human_addr.do_send(PhasePushed {\n\n request_uid,\n\n phase_uid,\n\n parent_uid: None,\n\n components: phase.components.clone(),\n\n });\n\n }\n", "file_path": "kosem-server/src/role_actors/procedure_actor.rs", "rank": 14, "score": 60501.581349338776 }, { "content": "use std::collections::{HashMap, HashSet};\n\n\n\nuse actix::prelude::*;\n\n\n\nuse kosem_webapi::{Uuid, KosemError};\n\nuse kosem_webapi::pairing_messages::*;\n\nuse kosem_webapi::phase_control_messages::*;\n\n\n\nuse crate::common_types::Phase;\n\n\n\nuse crate::protocol_handlers::websocket_jsonrpc::WsJrpc;\n\n\n\nuse crate::role_actors::{PairingActor, HumanActor};\n\nuse crate::internal_messages::connection::{RpcMessage, ConnectionClosed};\n\nuse crate::internal_messages::pairing::{\n\n RemoveRequestForHuman,\n\n ProcedureRequestingHuman,\n\n PairingPerformed,\n\n ProcedureTerminated,\n\n};\n", "file_path": "kosem-server/src/role_actors/procedure_actor.rs", "rank": 15, "score": 60498.200279350815 }, { "content": " def close(self):\n", "file_path": "kosem-python/kosem/wsjrpc_connection.py", "rank": 16, "score": 50254.381609547636 }, { "content": "class KosemPhase(object):\n\n def __init__(self, procedure, uid, stream):\n\n self.procedure = procedure\n\n self.uid = uid\n\n self.next_component_ordinal = 0\n\n self.stream = stream\n\n\n\n def __gen_ordinal(self):\n\n ordinal = self.next_component_ordinal\n\n self.next_component_ordinal += 1\n\n return ordinal\n\n\n\n def add_caption(self, text):\n\n ordinal = self.__gen_ordinal()\n\n self.procedure._con.call('AddComponent',\n\n phase_uid=self.uid,\n\n ordinal=ordinal,\n\n type='Caption',\n\n params=dict(\n\n text=text,\n\n ))\n\n\n\n def relevant_messages(self):\n\n if self.stream:\n\n for msg in self.stream:\n\n yield msg\n\n\n\n def wait_for_button(self):\n\n for msg in self.relevant_messages():\n\n if msg['method'] == 'ButtonClicked':\n\n return msg['params'].get('button_name', None)\n\n\n\n def pop(self):\n\n self.procedure._con.call('PopPhase', phase_uid=self.uid)\n\n\n\n def __enter__(self):\n\n return self\n\n\n\n def __exit__(self, *args):\n", "file_path": "kosem-python/kosem/procedure.py", "rank": 17, "score": 49906.31618882758 }, { "content": "#[derive(rust_embed::RustEmbed)]\n\n#[folder = \"assets\"]\n\nstruct Asset;\n\n\n\nimpl Asset {\n\n pub fn css_provider(filename: &str) -> gtk::CssProvider {\n\n let css_provider = gtk::CssProvider::new();\n\n css_provider.load_from_data(Self::get(filename).unwrap().as_ref()).unwrap();\n\n css_provider\n\n }\n\n}\n\n\n\npub struct FactoriesInner {\n\n pub join_menu: join_menu::JoinMenuFactories,\n\n pub work_on_procedure: work_on_procedure::WorkOnProcedureFactories,\n\n}\n\n\n\npub type Factories = std::rc::Rc<FactoriesInner>;\n\n\n", "file_path": "kosem-gui/src/lib.rs", "rank": 18, "score": 48697.31030658446 }, { "content": "fn default_name() -> String {\n\n \"Kosem Server\".to_owned()\n\n}\n\n\n", "file_path": "kosem-server/src/server_config.rs", "rank": 19, "score": 42267.292837505796 }, { "content": "fn default_port() -> u16 {\n\n 8206\n\n}\n", "file_path": "kosem-server/src/server_config.rs", "rank": 20, "score": 42267.292837505796 }, { "content": "fn default_port() -> u16 {\n\n 8206\n\n}\n", "file_path": "kosem-base-rpc-client/src/config.rs", "rank": 21, "score": 41389.28443208408 }, { "content": " fn handle(&mut self, msg: PhasePushed, _ctx: &mut actix::Context<Self>) -> Self::Result {\n\n log::info!(\"Human {} got phase {}\", self.uid, msg.phase_uid);\n\n self.con_actor.do_send(RpcMessage::new(\"PhasePushed\", msg));\n\n }\n\n}\n\n\n\nimpl actix::Handler<PhasePopped> for HumanActor {\n\n type Result = <PhasePopped as actix::Message>::Result;\n\n\n\n fn handle(&mut self, msg: PhasePopped, _ctx: &mut actix::Context<Self>) -> Self::Result {\n\n log::info!(\"Human {} got phase {}\", self.uid, msg.phase_uid);\n\n self.con_actor.do_send(RpcMessage::new(\"PhasePopped\", msg));\n\n }\n\n}\n\n\n\nimpl actix::Handler<ClickButton> for HumanActor {\n\n type Result = <ClickButton as actix::Message>::Result;\n\n\n\n fn handle(&mut self, msg: ClickButton, _ctx: &mut actix::Context<Self>) -> Self::Result {\n\n self.procedure_actor.do_send(ButtonClicked {\n\n human_uid: self.uid,\n\n phase_uid: msg.phase_uid,\n\n button_name: msg.button_name,\n\n });\n\n Ok(())\n\n }\n\n}\n", "file_path": "kosem-server/src/role_actors/human_actor.rs", "rank": 22, "score": 34326.76571169224 }, { "content": "}\n\n\n\nimpl actix::Actor for HumanActor {\n\n type Context = actix::Context<Self>;\n\n\n\n fn started(&mut self, _ctx: &mut Self::Context) {\n\n log::info!(\"Starting HumanActor {} - {}\", self.uid, self.name);\n\n }\n\n\n\n fn stopped(&mut self, _ctx: &mut Self::Context) {\n\n log::info!(\"Ending HumanActor {}\", self.uid);\n\n }\n\n}\n\n\n\nimpl actix::Handler<ConnectionClosed> for HumanActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, _msg: ConnectionClosed, ctx: &mut actix::Context<Self>) -> Self::Result {\n\n ctx.stop();\n\n }\n", "file_path": "kosem-server/src/role_actors/human_actor.rs", "rank": 23, "score": 34326.666802554064 }, { "content": "}\n\n\n\nimpl actix::Handler<CreateNewHumanActor> for JoinerActor {\n\n type Result = <CreateNewHumanActor as actix::Message>::Result;\n\n\n\n fn handle(&mut self, msg: CreateNewHumanActor, _ctx: &mut actix::Context<Self>) -> Self::Result {\n\n HumanActor::builder()\n\n .con_actor(self.con_actor.clone())\n\n .procedure_actor(msg.procedure_addr)\n\n .uid(self.uid)\n\n .request_uid(msg.request_uid)\n\n .name(self.name.clone())\n\n .build().start()\n\n }\n\n}\n\n\n\nimpl actix::Handler<PairingPerformed> for JoinerActor {\n\n type Result = <PairingPerformed as actix::Message>::Result;\n\n\n\n fn handle(&mut self, msg: PairingPerformed, _ctx: &mut actix::Context<Self>) -> Self::Result {\n\n msg.human_addr.clone().do_send(msg);\n\n }\n\n}\n", "file_path": "kosem-server/src/role_actors/joiner_actor.rs", "rank": 24, "score": 34326.36130654886 }, { "content": "impl actix::Actor for PairingActor {\n\n type Context = actix::Context<Self>;\n\n}\n\n\n\nimpl actix::Handler<HumanAvailable> for PairingActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: HumanAvailable, _ctx: &mut Self::Context) -> Self::Result {\n\n for procedure in self.procedures_requesting_humans.values() {\n\n msg.addr.do_send(procedure.clone());\n\n }\n\n log::info!(\"Adding joiner, {} joiners already exist\", self.available_joiners.len());\n\n self.available_joiners.insert(msg.uid, msg);\n\n log::info!(\"Added joiner, {} joiners already exist\", self.available_joiners.len());\n\n }\n\n}\n\nimpl actix::Handler<ProcedureRequestingHuman> for PairingActor {\n\n\n\n type Result = ();\n\n\n", "file_path": "kosem-server/src/role_actors/pairing_actor.rs", "rank": 25, "score": 34325.29619742355 }, { "content": " fn handle(&mut self, msg: ProcedureRequestingHuman, _ctx: &mut Self::Context) -> Self::Result {\n\n log::info!(\"Adding message, {} joiners already exist\", self.available_joiners.len());\n\n for joiner in self.available_joiners.values() {\n\n joiner.addr.do_send(msg.clone());\n\n }\n\n self.procedures_requesting_humans.insert(msg.uid, msg);\n\n }\n\n}\n\n\n\nimpl actix::Handler<RemoveRequestForHuman> for PairingActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: RemoveRequestForHuman, _ctx: &mut Self::Context) -> Self::Result {\n\n for joiner in self.available_joiners.values() {\n\n joiner.addr.do_send(msg.clone());\n\n }\n\n self.procedures_requesting_humans.remove(&msg.uid);\n\n }\n\n}\n\n\n", "file_path": "kosem-server/src/role_actors/pairing_actor.rs", "rank": 26, "score": 34325.022874247334 }, { "content": " name: self.name.clone(),\n\n });\n\n }\n\n\n\n fn stopped(&mut self, _ctx: &mut Self::Context) {\n\n log::info!(\"Ending JoinerActor {}\", self.uid);\n\n PairingActor::from_registry().do_send(RemoveAvailableHuman {\n\n uid: self.uid,\n\n });\n\n }\n\n}\n\n\n\nimpl actix::Handler<ConnectionClosed> for JoinerActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, _msg: ConnectionClosed, ctx: &mut actix::Context<Self>) -> Self::Result {\n\n ctx.stop();\n\n }\n\n}\n\n\n", "file_path": "kosem-server/src/role_actors/joiner_actor.rs", "rank": 27, "score": 34325.00366684159 }, { "content": "impl actix::Handler<RemoveAvailableHuman> for PairingActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: RemoveAvailableHuman, _ctx: &mut Self::Context) -> Self::Result {\n\n self.available_joiners.remove(&msg.uid);\n\n }\n\n}\n\n\n\nimpl actix::Handler<HumanJoiningProcedure> for PairingActor {\n\n type Result = ResponseActFuture<Self, KosemResult<()>>;\n\n\n\n fn handle(&mut self, msg: HumanJoiningProcedure, _ctx: &mut Self::Context) -> Self::Result {\n\n let joiner_entry = self.available_joiners.entry(msg.human_uid);\n\n let request_entry = self.procedures_requesting_humans.entry(msg.request_uid);\n\n use std::collections::hash_map::Entry;\n\n let (joiner_entry, request_entry) = match (joiner_entry, request_entry) {\n\n (Entry::Occupied(joiner_entry), Entry::Occupied(request_entry)) => {\n\n (joiner_entry, request_entry)\n\n },\n\n (Entry::Vacant(_), _) => {\n", "file_path": "kosem-server/src/role_actors/pairing_actor.rs", "rank": 28, "score": 34324.86534907268 }, { "content": "impl actix::Handler<ProcedureRequestingHuman> for JoinerActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: ProcedureRequestingHuman, _ctx: &mut Self::Context) -> Self::Result {\n\n self.con_actor.do_send(RpcMessage::new(\"AvailableProcedure\", kosem_webapi::pairing_messages::AvailableProcedure {\n\n uid: msg.uid,\n\n name: msg.orig_request.name,\n\n }));\n\n }\n\n}\n\n\n\nimpl actix::Handler<RemoveRequestForHuman> for JoinerActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: RemoveRequestForHuman, _ctx: &mut Self::Context) -> Self::Result {\n\n self.con_actor.do_send(RpcMessage::new(\"UnavailableProcedure\", kosem_webapi::pairing_messages::UnavailableProcedure {\n\n uid: msg.uid,\n\n }));\n\n }\n\n}\n", "file_path": "kosem-server/src/role_actors/joiner_actor.rs", "rank": 29, "score": 34324.67324040964 }, { "content": " fn handle(&mut self, _msg: ProcedureTerminated, _ctx: &mut actix::Context<Self>) -> Self::Result {\n\n self.con_actor.do_send(RpcMessage::new(\"ProcedureFinished\", kosem_webapi::pairing_messages::ProcedureFinished {\n\n request_uid: self.request_uid,\n\n }));\n\n }\n\n}\n\n\n\nimpl actix::Handler<info_sharing::GetInfo<info_sharing::HumanDetails>> for HumanActor {\n\n type Result = <info_sharing::GetInfo<info_sharing::HumanDetails> as actix::Message>::Result;\n\n\n\n fn handle(&mut self, _msg: info_sharing::GetInfo<info_sharing::HumanDetails>, _ctx: &mut actix::Context<Self>) -> Self::Result {\n\n info_sharing::HumanDetails {\n\n name: self.name.clone(),\n\n }\n\n }\n\n}\n\n\n\nimpl actix::Handler<PhasePushed> for HumanActor {\n\n type Result = <PhasePushed as actix::Message>::Result;\n\n\n", "file_path": "kosem-server/src/role_actors/human_actor.rs", "rank": 30, "score": 34324.22551109112 }, { "content": "\n\nimpl actix::Handler<JoinProcedure> for JoinerActor {\n\n type Result = ResponseActFuture<Self, KosemResult<()>>;\n\n\n\n fn handle(&mut self, msg: JoinProcedure, _ctx: &mut actix::Context<Self>) -> Self::Result {\n\n log::info!(\"Human {} joined procedure {}\", self.name, msg.uid);\n\n\n\n Box::pin(\n\n PairingActor::from_registry().send(HumanJoiningProcedure {\n\n human_uid: self.uid,\n\n request_uid: msg.uid,\n\n })\n\n .into_actor(self)\n\n .then(|result, _actor, _ctx| {\n\n let result = result.unwrap();\n\n log::warn!(\"Join result is {:?}\", result);\n\n fut::result(result)\n\n })\n\n )\n\n }\n", "file_path": "kosem-server/src/role_actors/joiner_actor.rs", "rank": 31, "score": 34323.658966784606 }, { "content": "}\n\n\n\nimpl actix::Handler<PairingPerformed> for HumanActor {\n\n type Result = <PairingPerformed as actix::Message>::Result;\n\n\n\n fn handle(&mut self, msg: PairingPerformed, ctx: &mut actix::Context<Self>) -> Self::Result {\n\n log::info!(\"Paired human {} to request {}\", msg.human_uid, msg.request_uid);\n\n self.con_actor.do_send(AddHumanActor {\n\n request_uid: msg.request_uid,\n\n addr: ctx.address()\n\n });\n\n self.con_actor.do_send(RpcMessage::new(\"JoinConfirmation\", kosem_webapi::pairing_messages::JoinConfirmation {\n\n request_uid: msg.request_uid,\n\n }));\n\n }\n\n}\n\n\n\nimpl actix::Handler<ProcedureTerminated> for HumanActor {\n\n type Result = <ProcedureTerminated as actix::Message>::Result;\n\n\n", "file_path": "kosem-server/src/role_actors/human_actor.rs", "rank": 32, "score": 34323.21010477675 }, { "content": "pub struct JoinerActor {\n\n con_actor: actix::Addr<WsJrpc>,\n\n uid: Uuid,\n\n name: String,\n\n}\n\n\n\nimpl actix::Actor for JoinerActor {\n\n type Context = actix::Context<Self>;\n\n\n\n fn started(&mut self, ctx: &mut Self::Context) {\n\n log::info!(\"Starting JoinerActor {} - {}\", self.uid, self.name);\n\n let response = kosem_webapi::handshake_messages::LoginConfirmed {\n\n uid: self.uid,\n\n };\n\n let message = RpcMessage::new(\"LoginConfirmed\", response);\n\n self.con_actor.do_send(message);\n\n\n\n PairingActor::from_registry().do_send(HumanAvailable {\n\n uid: self.uid,\n\n addr: ctx.address(),\n", "file_path": "kosem-server/src/role_actors/joiner_actor.rs", "rank": 33, "score": 34319.66164034749 }, { "content": "use actix::prelude::*;\n\n\n\nuse kosem_webapi::Uuid;\n\n\n\nuse crate::protocol_handlers::websocket_jsonrpc::WsJrpc;\n\n\n\nuse crate::internal_messages::connection::{RpcMessage, ConnectionClosed, AddHumanActor};\n\nuse crate::internal_messages::pairing::{PairingPerformed, ProcedureTerminated};\n\nuse crate::internal_messages::info_sharing;\n\nuse kosem_webapi::phase_control_messages::*;\n\n\n\nuse crate::role_actors::ProcedureActor;\n\n\n\n#[derive(typed_builder::TypedBuilder)]\n\npub struct HumanActor {\n\n con_actor: actix::Addr<WsJrpc>,\n\n procedure_actor: actix::Addr<ProcedureActor>,\n\n uid: Uuid,\n\n request_uid: Uuid,\n\n name: String,\n", "file_path": "kosem-server/src/role_actors/human_actor.rs", "rank": 34, "score": 34318.78363531398 }, { "content": "use std::collections::HashMap;\n\n\n\nuse actix::prelude::*;\n\n\n\nuse kosem_webapi::{Uuid, KosemResult, KosemError};\n\n\n\nuse crate::internal_messages::pairing::*;\n\n\n\n#[derive(Default)]\n\npub struct PairingActor {\n\n available_joiners: HashMap<Uuid, HumanAvailable>,\n\n procedures_requesting_humans: HashMap<Uuid, ProcedureRequestingHuman>,\n\n}\n\n\n\nimpl actix::Supervised for PairingActor {\n\n}\n\n\n\nimpl actix::SystemService for PairingActor {\n\n}\n\n\n", "file_path": "kosem-server/src/role_actors/pairing_actor.rs", "rank": 35, "score": 34315.49079165587 }, { "content": "use actix::prelude::*;\n\n\n\nuse kosem_webapi::{Uuid, KosemResult};\n\nuse kosem_webapi::pairing_messages::*;\n\n\n\nuse crate::protocol_handlers::websocket_jsonrpc::WsJrpc;\n\nuse crate::role_actors::{PairingActor, HumanActor};\n\n\n\nuse crate::internal_messages::connection::{RpcMessage, ConnectionClosed};\n\nuse crate::internal_messages::pairing::{\n\n HumanAvailable,\n\n ProcedureRequestingHuman,\n\n RemoveRequestForHuman,\n\n RemoveAvailableHuman,\n\n HumanJoiningProcedure,\n\n CreateNewHumanActor,\n\n PairingPerformed,\n\n};\n\n\n\n#[derive(typed_builder::TypedBuilder)]\n", "file_path": "kosem-server/src/role_actors/joiner_actor.rs", "rank": 36, "score": 34314.64293536654 }, { "content": " .into_actor(self)\n\n .then(move |human_addr, actor, _ctx| {\n\n let human_addr = human_addr.unwrap();\n\n let pairing_performed = PairingPerformed {\n\n human_uid: joiner_uid,\n\n human_addr,\n\n request_uid: request.uid,\n\n procedure_addr: request.addr,\n\n };\n\n\n\n joiner_addr.do_send(pairing_performed.clone());\n\n pairing_performed.procedure_addr.clone().do_send(pairing_performed);\n\n\n\n // NOTE: this does not include the joiner that accepted the request, because they were just\n\n // removed.\n\n for joiner in actor.available_joiners.values() {\n\n if joiner.uid != joiner_uid {\n\n joiner.addr.do_send(RemoveRequestForHuman {\n\n uid: request.uid,\n\n });\n\n }\n\n }\n\n fut::ok(())\n\n }))\n\n }\n\n}\n", "file_path": "kosem-server/src/role_actors/pairing_actor.rs", "rank": 37, "score": 34311.19419320189 }, { "content": " return Box::pin(fut::err(KosemError::new(\"Human is not available for handling procedures\")));\n\n },\n\n (_, Entry::Vacant(_)) => {\n\n return Box::pin(fut::err(\n\n KosemError::new(\"Request does not exist in pending requests\")\n\n .with(\"request_uid\", msg.request_uid)\n\n ));\n\n },\n\n };\n\n\n\n let joiner = joiner_entry.get();\n\n let joiner_addr = joiner.addr.clone();\n\n let joiner_uid = joiner.uid;\n\n let request = request_entry.remove();\n\n\n\n Box::pin(\n\n joiner_addr.send(CreateNewHumanActor {\n\n request_uid: request.uid,\n\n procedure_addr: request.addr.clone(),\n\n })\n", "file_path": "kosem-server/src/role_actors/pairing_actor.rs", "rank": 38, "score": 34309.99620891793 }, { "content": "class KosemComponent(object):\n\n name = None\n\n\n\n def named(self, name):\n\n self.name = name\n\n return self\n\n\n\n def to_json(self):\n\n return dict(\n\n type=type(self).__name__,\n\n params=self.params,\n\n name=self.name)\n\n\n", "file_path": "kosem-python/kosem/components.py", "rank": 39, "score": 34124.87572500464 }, { "content": " LoginAsProcedure => NotYetIdentifiedActor;\n\n LoginAsHuman => NotYetIdentifiedActor;\n\n RequestHuman => ProcedureActor;\n\n JoinProcedure => JoinerActor;\n\n PushPhase => ProcedureActor;\n\n PopPhase => ProcedureActor;\n\n ClickButton => HumanActor;\n\n }\n\n }\n\n\n\n pub fn notify_connection_is_closed(&self) {\n\n let msg = crate::internal_messages::connection::ConnectionClosed;\n\n\n\n match self {\n\n ActorRoleState::Init => {},\n\n ActorRoleState::NotYetIdentifiedActor(addr) => addr.do_send(msg),\n\n ActorRoleState::ProcedureActor(addr) => addr.do_send(msg),\n\n ActorRoleState::HumanActor { joiner, procedures } => {\n\n for procedure in procedures.values() {\n\n procedure.do_send(msg.clone());\n\n }\n\n joiner.do_send(msg);\n\n },\n\n }\n\n }\n\n}\n", "file_path": "kosem-server/src/role_actors/actor_role_state.rs", "rank": 40, "score": 33537.007463177 }, { "content": "}\n\n\n\nimpl ActorRoleState {\n\n pub fn start_not_yet_identified_actor(con_actor: actix::Addr<WsJrpc>) -> Self {\n\n let actor = NotYetIdentifiedActor::new(con_actor);\n\n let actor = actor.start();\n\n ActorRoleState::NotYetIdentifiedActor(actor)\n\n }\n\n\n\n fn role_name(&self) -> &'static str {\n\n match self {\n\n Self::Init => \"init\",\n\n Self::NotYetIdentifiedActor(_) => \"not-logged-in\",\n\n Self::ProcedureActor(_) => \"procedure\",\n\n Self::HumanActor { .. } => \"human\",\n\n }\n\n }\n\n\n\n fn variant_text_to_role_name(variant_name: &str) -> &'static str {\n\n match variant_name {\n", "file_path": "kosem-server/src/role_actors/actor_role_state.rs", "rank": 41, "score": 33534.632244659755 }, { "content": " None\n\n }\n\n };\n\n (ProcedureActor, $msg:expr) => {\n\n if let Self::ProcedureActor(actor) = self {\n\n Some(actor)\n\n } else {\n\n None\n\n }\n\n };\n\n (JoinerActor, $msg:expr) => {\n\n if let Self::HumanActor { joiner, .. } = self {\n\n Some(joiner)\n\n } else {\n\n None\n\n }\n\n };\n\n (HumanActor, $msg:expr) => {\n\n if let Self::HumanActor { procedures, .. } = self {\n\n Some(&procedures[&$msg.request_uid])\n", "file_path": "kosem-server/src/role_actors/actor_role_state.rs", "rank": 42, "score": 33533.778393591616 }, { "content": " \"Init\" => \"init\",\n\n \"NotYetIdentifiedActor\" => \"not-logged-in\",\n\n \"ProcedureActor\" => \"procedure\",\n\n \"JoinerActor\" => \"human\",\n\n \"HumanActor\" => \"human\",\n\n _ => unreachable!(\"Unhandled variant\"),\n\n }\n\n }\n\n\n\n pub fn send_request_from_connection<'de, Deser: serde::Deserializer<'de>>(\n\n &self,\n\n method: &str,\n\n params: Deser,\n\n _error_classifier: impl FnOnce(&str, Deser::Error),\n\n ) -> Result<ResponseFuture<KosemResult<serde_value::Value>>, RoutingError<Deser::Error>> {\n\n macro_rules! get_actor {\n\n (NotYetIdentifiedActor, $msg:expr) => {\n\n if let Self::NotYetIdentifiedActor(actor) = self {\n\n Some(actor)\n\n } else {\n", "file_path": "kosem-server/src/role_actors/actor_role_state.rs", "rank": 43, "score": 33532.58678015103 }, { "content": "use std::collections::HashMap;\n\n\n\nuse actix::prelude::*;\n\nuse serde::Deserialize;\n\n\n\nuse kosem_webapi::{Uuid, KosemResult};\n\nuse kosem_webapi::handshake_messages::*;\n\nuse kosem_webapi::pairing_messages::*;\n\nuse kosem_webapi::phase_control_messages::*;\n\n\n\nuse crate::protocol_handlers::websocket_jsonrpc::WsJrpc;\n\n\n\nuse crate::role_actors::{\n\n NotYetIdentifiedActor,\n\n ProcedureActor,\n\n JoinerActor,\n\n HumanActor,\n\n};\n\n\n\npub enum ActorRoleState {\n", "file_path": "kosem-server/src/role_actors/actor_role_state.rs", "rank": 44, "score": 33530.496256655126 }, { "content": " Init,\n\n NotYetIdentifiedActor(actix::Addr<NotYetIdentifiedActor>),\n\n ProcedureActor(actix::Addr<ProcedureActor>),\n\n HumanActor {\n\n joiner: actix::Addr<JoinerActor>,\n\n procedures: HashMap<Uuid, actix::Addr<HumanActor>>,\n\n },\n\n}\n\n\n\npub enum RoutingError<E: serde::de::Error> {\n\n MethodNotFound(String),\n\n MethodNotAllowedForRole {\n\n method: String,\n\n current_role: &'static str,\n\n allowed_roles: Vec<&'static str>,\n\n },\n\n DeserializationError {\n\n method: Option<String>,\n\n error: E,\n\n }\n", "file_path": "kosem-server/src/role_actors/actor_role_state.rs", "rank": 45, "score": 33527.58826005596 }, { "content": " } else {\n\n None\n\n }\n\n };\n\n }\n\n macro_rules! route {\n\n ($( $method:ident => $role:ident; )*) => {\n\n match method {\n\n $(\n\n stringify!($method) => {\n\n let params = $method::deserialize(params).map_err(|error| {\n\n RoutingError::DeserializationError {\n\n method: Some(method.to_owned()),\n\n error\n\n }\n\n })?;\n\n if let Some(actor) = get_actor!($role, params) {\n\n let sent = actor.send(params);\n\n Ok(Box::pin(async {\n\n let res = sent.await.unwrap()?;\n", "file_path": "kosem-server/src/role_actors/actor_role_state.rs", "rank": 46, "score": 33523.936806019025 }, { "content": " Ok(serde_value::to_value(res).unwrap())\n\n }))\n\n } else {\n\n Err(RoutingError::MethodNotAllowedForRole {\n\n method: method.to_owned(),\n\n current_role: self.role_name(),\n\n allowed_roles: vec![\n\n Self::variant_text_to_role_name(stringify!($role))\n\n ],\n\n })\n\n }\n\n },\n\n )*\n\n _ => {\n\n Err(RoutingError::MethodNotFound(method.to_owned()))\n\n }\n\n }\n\n }\n\n }\n\n route! {\n", "file_path": "kosem-server/src/role_actors/actor_role_state.rs", "rank": 47, "score": 33521.37345858046 }, { "content": "class KosemProcedure(object):\n\n from .components import Caption, Button\n\n\n\n def __init__(self, connection, name):\n\n self._con = connection\n\n self.name = name\n\n self.humans = []\n\n self._login()\n\n\n\n def _login(self):\n\n self.uid = self._con.call('LoginAsProcedure', name=self.name)\n\n\n\n def __enter__(self):\n\n return self\n\n\n\n def __exit__(self, *args):\n\n self._con.close()\n\n\n\n @contextmanager\n\n def request_humans(self):\n\n stream = self._con.stream_messages()\n\n human_requests = {}\n\n\n\n def request_dlg(name):\n\n uid = self._con.call('RequestHuman', name=name)\n\n human = KosemHuman(self, uid)\n\n human_requests[uid] = human\n\n return human\n\n yield request_dlg\n\n\n\n for msg in stream:\n\n if msg['method'] == 'HumanJoined':\n\n human = human_requests.pop(msg['params']['request_uid'], None)\n\n if human is None:\n\n continue\n\n human._join_confirmation(msg['params'])\n\n self.humans.append(human)\n\n if not human_requests:\n\n break\n\n\n\n def phase(self, *components):\n\n is_message_relevant = [\n\n component.is_message_relevant\n\n for component in components\n\n if component.is_message_relevant is not None]\n\n if is_message_relevant:\n\n stream = self._con.stream_messages() \n\n else:\n\n stream = None\n\n uid = self._con.call('PushPhase', components=[c.to_json() for c in components])\n\n if stream:\n\n stream = (\n\n msg for msg in stream\n\n if msg['params'].get('phase_uid') == uid\n\n and any(pred(msg) for pred in is_message_relevant))\n", "file_path": "kosem-python/kosem/procedure.py", "rank": 48, "score": 32855.851996495796 }, { "content": "use std::collections::HashMap;\n\n\n\nuse kosem_webapi::phase_control_messages::Component;\n\n\n\n#[derive(Debug)]\n\npub struct Phase {\n\n pub components: Vec<Component>,\n\n pub components_names: HashMap<String, usize>,\n\n}\n\n\n\nimpl Phase {\n\n pub fn new(components: Vec<Component>) -> Self {\n\n let mut components_names = HashMap::new();\n\n for (index, component) in components.iter().enumerate() {\n\n if let Some(ref name) = component.name {\n\n components_names.insert(name.to_owned(), index);\n\n }\n\n }\n\n Self { components, components_names }\n\n }\n\n}\n", "file_path": "kosem-server/src/common_types.rs", "rank": 49, "score": 29610.30601952603 }, { "content": "use serde::{Deserialize, Serialize};\n\nuse uuid::Uuid;\n\n\n\nuse actix::Message;\n\n\n\nuse crate::KosemResult;\n\n\n\n#[derive(Debug, Serialize, Deserialize, Message)]\n\n#[rtype(result=\"KosemResult<Uuid>\")]\n\npub struct PushPhase {\n\n #[serde(default)]\n\n pub limit_to_human_uids: Vec<Uuid>,\n\n pub components: Vec<Component>,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Message)]\n\n#[rtype(result=\"()\")]\n\npub struct PhasePushed {\n\n pub request_uid: Uuid,\n\n pub phase_uid: Uuid,\n", "file_path": "kosem-webapi/src/phase_control_messages.rs", "rank": 50, "score": 28303.214650592523 }, { "content": "}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Message)]\n\n#[rtype(result=\"KosemResult<()>\")]\n\npub struct PopPhase {\n\n pub phase_uid: Uuid,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Message)]\n\n#[rtype(result=\"()\")]\n\npub struct PhasePopped {\n\n pub request_uid: Uuid,\n\n pub phase_uid: Uuid,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize, Message)]\n\n#[rtype(result=\"KosemResult<()>\")]\n\npub struct ClickButton {\n\n pub request_uid: Uuid,\n\n pub phase_uid: Uuid,\n", "file_path": "kosem-webapi/src/phase_control_messages.rs", "rank": 51, "score": 28298.263526509272 }, { "content": " pub parent_uid: Option<Uuid>,\n\n pub components: Vec<Component>,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct Component {\n\n pub name: Option<String>,\n\n #[serde(flatten)]\n\n pub params: ComponentParams,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\n#[serde(tag = \"type\", content = \"params\")]\n\npub enum ComponentParams {\n\n Caption {\n\n text: String,\n\n },\n\n Button {\n\n text: String,\n\n },\n", "file_path": "kosem-webapi/src/phase_control_messages.rs", "rank": 52, "score": 28298.165150142915 }, { "content": " pub button_name: Option<String>,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize, Message)]\n\n#[rtype(result=\"()\")]\n\npub struct ButtonClicked {\n\n pub human_uid: Uuid,\n\n pub phase_uid: Uuid,\n\n pub button_name: Option<String>,\n\n}\n", "file_path": "kosem-webapi/src/phase_control_messages.rs", "rank": 53, "score": 28296.06984140876 }, { "content": "class KosemComponent(object):\n\n name = None\n\n\n\n def named(self, name):\n\n self.name = name\n\n return self\n\n\n\n def to_json(self):\n\n return dict(\n\n type=type(self).__name__,\n\n params=self.params,\n\n name=self.name)\n\n\n\n is_message_relevant = None\n\n\n\n\n\nclass Caption(KosemComponent):\n\n def __init__(self, text):\n\n self.params = dict(text=text)\n\n\n\n\n\nclass Button(KosemComponent):\n\n def __init__(self, text):\n\n self.params = dict(text=text)\n\n\n\n def is_message_relevant(self, msg):\n\n return msg['method'] == 'ButtonClicked'\n", "file_path": "kosem-python/kosem/components.py", "rank": 61, "score": 27194.20593048221 }, { "content": "mod not_yet_identified;\n\nmod actor_role_state;\n\nmod pairing_actor;\n\nmod procedure_actor;\n\nmod joiner_actor;\n\nmod human_actor;\n\n\n\npub use actor_role_state::{ActorRoleState, RoutingError};\n\npub use not_yet_identified::NotYetIdentifiedActor;\n\npub use pairing_actor::PairingActor;\n\npub use procedure_actor::ProcedureActor;\n\npub use joiner_actor::JoinerActor;\n\npub use human_actor::HumanActor;\n", "file_path": "kosem-server/src/role_actors/mod.rs", "rank": 62, "score": 27090.51627328599 }, { "content": "fn format_deserialization_error(method: Option<String>, error: serde_json::Error) -> JrpcError {\n\n use serde_json::error::Category;\n\n match error.classify() {\n\n Category::Data => if let Some(method) = method {\n\n JrpcError {\n\n code: -32602,\n\n message: \"Invalid params\".to_owned(),\n\n data: Some(serde_json::json!({\n\n \"method_name\": method,\n\n \"error\": error.to_string(),\n\n \"line\": error.line(),\n\n \"column\": error.column(),\n\n })),\n\n }\n\n } else {\n\n JrpcError {\n\n code: -32600,\n\n message: \"Invalid Request\".to_owned(),\n\n data: Some(serde_json::json!({\n\n \"error\": error.to_string(),\n", "file_path": "kosem-server/src/protocol_handlers/websocket_jsonrpc.rs", "rank": 63, "score": 26958.600502377714 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\n#[serde(remote = \"Result::<serde_json::Value, JrpcError>\")]\n\nenum InternalUsageJrpcResult {\n\n #[serde(rename = \"result\")]\n\n Ok(serde_json::Value),\n\n #[serde(rename = \"error\")]\n\n Err(JrpcError),\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct JrpcError {\n\n pub code: i32,\n\n pub message: String,\n\n pub data: Option<serde_json::Value>,\n\n}\n", "file_path": "kosem-webapi/src/protocols.rs", "rank": 64, "score": 26480.792598251173 }, { "content": " def __init__(self, text):\n", "file_path": "kosem-python/kosem/components.py", "rank": 65, "score": 26258.52858095663 }, { "content": " def named(self, name):\n\n self.name = name\n", "file_path": "kosem-python/kosem/components.py", "rank": 66, "score": 26258.52858095663 }, { "content": " def to_json(self):\n\n return dict(\n\n type=type(self).__name__,\n\n params=self.params,\n", "file_path": "kosem-python/kosem/components.py", "rank": 67, "score": 26258.52858095663 }, { "content": "class Button(KosemComponent):\n\n def __init__(self, text):\n\n self.params = dict(text=text)\n\n\n\n def is_message_relevant(self, msg):\n", "file_path": "kosem-python/kosem/components.py", "rank": 68, "score": 26258.52858095663 }, { "content": "class Caption(KosemComponent):\n\n def __init__(self, text):\n", "file_path": "kosem-python/kosem/components.py", "rank": 69, "score": 26258.52858095663 }, { "content": "import time\n\nfrom contextlib import contextmanager\n\n\n\n\n\nclass KosemProcedure(object):\n\n from .components import Caption, Button\n\n\n\n def __init__(self, connection, name):\n\n self._con = connection\n\n self.name = name\n\n self.humans = []\n\n self._login()\n\n\n\n def _login(self):\n\n self.uid = self._con.call('LoginAsProcedure', name=self.name)\n\n\n\n def __enter__(self):\n\n return self\n\n\n\n def __exit__(self, *args):\n\n self._con.close()\n\n\n\n @contextmanager\n\n def request_humans(self):\n\n stream = self._con.stream_messages()\n\n human_requests = {}\n\n\n\n def request_dlg(name):\n\n uid = self._con.call('RequestHuman', name=name)\n\n human = KosemHuman(self, uid)\n\n human_requests[uid] = human\n\n return human\n\n yield request_dlg\n\n\n\n for msg in stream:\n\n if msg['method'] == 'HumanJoined':\n\n human = human_requests.pop(msg['params']['request_uid'], None)\n\n if human is None:\n\n continue\n\n human._join_confirmation(msg['params'])\n\n self.humans.append(human)\n\n if not human_requests:\n\n break\n\n\n\n def phase(self, *components):\n\n is_message_relevant = [\n\n component.is_message_relevant\n\n for component in components\n\n if component.is_message_relevant is not None]\n\n if is_message_relevant:\n\n stream = self._con.stream_messages() \n\n else:\n\n stream = None\n\n uid = self._con.call('PushPhase', components=[c.to_json() for c in components])\n\n if stream:\n\n stream = (\n\n msg for msg in stream\n\n if msg['params'].get('phase_uid') == uid\n\n and any(pred(msg) for pred in is_message_relevant))\n\n return KosemPhase(self, uid, stream)\n\n\n\n\n\nclass KosemHuman(object):\n\n def __init__(self, procedure, uid):\n\n self.procedure = procedure\n\n self.request_uid = uid\n\n\n\n def _join_confirmation(self, msg_params):\n\n self.uid = msg_params['human_uid']\n\n self.name = msg_params['human_name']\n\n\n\n\n\nclass KosemPhase(object):\n\n def __init__(self, procedure, uid, stream):\n\n self.procedure = procedure\n\n self.uid = uid\n\n self.next_component_ordinal = 0\n\n self.stream = stream\n\n\n\n def __gen_ordinal(self):\n\n ordinal = self.next_component_ordinal\n\n self.next_component_ordinal += 1\n\n return ordinal\n\n\n\n def add_caption(self, text):\n\n ordinal = self.__gen_ordinal()\n\n self.procedure._con.call('AddComponent',\n\n phase_uid=self.uid,\n\n ordinal=ordinal,\n\n type='Caption',\n\n params=dict(\n\n text=text,\n\n ))\n\n\n\n def relevant_messages(self):\n\n if self.stream:\n\n for msg in self.stream:\n\n yield msg\n\n\n\n def wait_for_button(self):\n\n for msg in self.relevant_messages():\n\n if msg['method'] == 'ButtonClicked':\n\n return msg['params'].get('button_name', None)\n\n\n\n def pop(self):\n\n self.procedure._con.call('PopPhase', phase_uid=self.uid)\n\n\n\n def __enter__(self):\n\n return self\n\n\n\n def __exit__(self, *args):\n\n self.pop()\n", "file_path": "kosem-python/kosem/procedure.py", "rank": 70, "score": 26182.91748266961 }, { "content": " type Context = actix::Context<Self>;\n\n}\n\n\n\nimpl actix::Handler<ConnectionClosed> for NotYetIdentifiedActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, _msg: ConnectionClosed, ctx: &mut actix::Context<Self>) -> Self::Result {\n\n ctx.stop();\n\n }\n\n}\n\n\n\nimpl actix::Handler<LoginAsProcedure> for NotYetIdentifiedActor {\n\n type Result = <LoginAsProcedure as actix::Message>::Result;\n\n\n\n fn handle(&mut self, msg: LoginAsProcedure, ctx: &mut actix::Context<Self>) -> Self::Result {\n\n log::info!(\"LoginAsProcedure: {:?}\", msg);\n\n let procedure_uid = Uuid::new_v4();\n\n let actor = ProcedureActor::builder().uid(procedure_uid).con_actor(self.con_actor.clone()).name(msg.name).build();\n\n let actor = actor.start();\n\n self.con_actor.do_send(SetRole::Procedure(actor));\n", "file_path": "kosem-server/src/role_actors/not_yet_identified.rs", "rank": 71, "score": 26139.567068699973 }, { "content": " (self.write_fn)(ws::Message::Close(Some(ws::CloseReason {\n\n code: ws::CloseCode::Normal,\n\n description: None,\n\n }))).unwrap();\n\n },\n\n Ok(_) => (),\n\n Err(e) => panic!(\"Protocol error {:?}\", e),\n\n }\n\n }\n\n}\n\n\n\nimpl WriteHandler<WsProtocolError> for ClientActor {\n\n}\n\n\n\n// Message Handling\n\n\n\nimpl Handler<RpcMessage> for ClientActor {\n\n type Result = <RpcMessage as actix::Message>::Result;\n\n\n\n fn handle(&mut self, msg: RpcMessage, _ctx: &mut Self::Context) -> Self::Result {\n", "file_path": "kosem-base-rpc-client/src/client_actor.rs", "rank": 72, "score": 26137.01469474102 }, { "content": " ctx.stop();\n\n Ok(procedure_uid)\n\n }\n\n}\n\n\n\nimpl actix::Handler<LoginAsHuman> for NotYetIdentifiedActor {\n\n type Result = <LoginAsHuman as actix::Message>::Result;\n\n\n\n fn handle(&mut self, msg: LoginAsHuman, ctx: &mut actix::Context<Self>) -> Self::Result {\n\n log::info!(\"LoginAsHuman: {:?}\", msg);\n\n let human_uid = Uuid::new_v4();\n\n let actor = JoinerActor::builder().uid(human_uid).con_actor(self.con_actor.clone()).name(msg.name).build();\n\n let actor = actor.start();\n\n self.con_actor.do_send(SetRole::Human(actor));\n\n ctx.stop();\n\n Ok(human_uid)\n\n }\n\n}\n", "file_path": "kosem-server/src/role_actors/not_yet_identified.rs", "rank": 73, "score": 26132.396280523313 }, { "content": "}\n\n\n\nimpl StreamHandler<Result<ws::Frame, WsProtocolError>> for ClientActor {\n\n fn handle(&mut self, msg: Result<ws::Frame, WsProtocolError>, _ctx: &mut Self::Context) {\n\n match msg {\n\n Ok(ws::Frame::Ping(msg)) => {\n\n (self.write_fn)(ws::Message::Pong(msg)).unwrap();\n\n },\n\n Ok(ws::Frame::Text(txt)) => {\n\n let txt = String::from_utf8(Vec::from(txt.as_ref())).unwrap();\n\n let request: JrpcMessage = serde_json::from_str(&txt)\n\n .map_err(|err| format!(\"Unable to parse {:?} - {:?}\", txt, err))\n\n .unwrap();\n\n self.routing.rpc_message.do_send(RpcMessage {\n\n idx: Some(self.idx),\n\n method: request.method,\n\n params: request.params,\n\n }).unwrap();\n\n },\n\n Ok(ws::Frame::Close(_)) => {\n", "file_path": "kosem-base-rpc-client/src/client_actor.rs", "rank": 74, "score": 26131.951128058008 }, { "content": "use actix::prelude::*;\n\n\n\nuse kosem_webapi::Uuid;\n\nuse kosem_webapi::handshake_messages::*;\n\n\n\nuse crate::protocol_handlers::websocket_jsonrpc::WsJrpc;\n\nuse crate::role_actors::{ProcedureActor, JoinerActor};\n\nuse crate::internal_messages::connection::{SetRole, ConnectionClosed};\n\n\n\npub struct NotYetIdentifiedActor {\n\n con_actor: actix::Addr<WsJrpc>,\n\n}\n\n\n\nimpl NotYetIdentifiedActor {\n\n pub fn new(con_actor: actix::Addr<WsJrpc>) -> Self {\n\n Self { con_actor }\n\n }\n\n}\n\n\n\nimpl actix::Actor for NotYetIdentifiedActor {\n", "file_path": "kosem-server/src/role_actors/not_yet_identified.rs", "rank": 75, "score": 26131.395283092017 }, { "content": "macro_rules! wrap_addr_as_routing {\n\n ($addr:expr) => {\n\n $crate::ClientRouting {\n\n connect_client_actor: $addr.clone().recipient(),\n\n rpc_message: $addr.clone().recipient(),\n\n }\n\n }\n\n}\n\n\n\npub struct ClientActor {\n\n idx: usize,\n\n server_config: ServerConfig,\n\n write_fn: Box<dyn FnMut(awc::ws::Message) -> std::io::Result<()>>,\n\n routing: ClientRouting,\n\n}\n\n\n\nimpl ClientActor {\n\n pub fn start_actor(idx: usize, server_config: ServerConfig, routing: ClientRouting) {\n\n let url = format!(\"http://{}:{}/ws-jrpc\", server_config.url, server_config.port);\n\n Arbiter::current().spawn_fn(move || {\n", "file_path": "kosem-base-rpc-client/src/client_actor.rs", "rank": 76, "score": 26129.867501913788 }, { "content": " }\n\n }),\n\n routing,\n\n }\n\n });\n\n });\n\n });\n\n }\n\n}\n\n\n\nimpl Actor for ClientActor {\n\n type Context = Context<Self>;\n\n\n\n fn started(&mut self, ctx: &mut Self::Context) {\n\n self.routing.connect_client_actor.do_send(ConnectClientActor {\n\n idx: self.idx,\n\n server_config: self.server_config.clone(),\n\n client_actor: ctx.address(),\n\n }).expect(\"routing should be present when the GUI is created\");\n\n }\n", "file_path": "kosem-base-rpc-client/src/client_actor.rs", "rank": 77, "score": 26129.203053835292 }, { "content": " actix::spawn(async move {\n\n let (response, framed) = Client::new()\n\n .ws(&url)\n\n .connect()\n\n .await\n\n .map_err(|e| {\n\n log::error!(\"Error: {}\", e);\n\n }).unwrap();\n\n log::info!(\"hello {:?}\", response);\n\n let (sink, stream) = framed.split();\n\n let _addr = ClientActor::create(move |ctx| {\n\n ClientActor::add_stream(stream, ctx);\n\n let mut sink_write = SinkWrite::new(sink, ctx);\n\n ClientActor {\n\n idx,\n\n server_config,\n\n write_fn: Box::new(move |msg| {\n\n match sink_write.write(msg) {\n\n Some(_) => Err(std::io::Error::new(std::io::ErrorKind::BrokenPipe, \"websocket sink is closed\")),\n\n None => Ok(())\n", "file_path": "kosem-base-rpc-client/src/client_actor.rs", "rank": 78, "score": 26128.748099192093 }, { "content": " let response = JrpcMessage {\n\n jsonrpc: \"2.0\".into(),\n\n method: msg.method,\n\n id: None,\n\n params: msg.params.into(),\n\n };\n\n let response_text = serde_json::to_string(&response).expect(\"Response must be serializable\");\n\n (self.write_fn)(ws::Message::Text(response_text.into())).unwrap();\n\n }\n\n}\n", "file_path": "kosem-base-rpc-client/src/client_actor.rs", "rank": 79, "score": 26123.98394224026 }, { "content": "use actix::prelude::*;\n\nuse actix::io::{SinkWrite, WriteHandler};\n\n\n\nuse futures::stream::StreamExt;\n\n\n\nuse awc::Client;\n\nuse awc::ws;\n\nuse awc::error::WsProtocolError;\n\n\n\nuse kosem_webapi::protocols::JrpcMessage;\n\n\n\nuse crate::control_messages::*;\n\nuse crate::config::ServerConfig;\n\n\n\npub struct ClientRouting {\n\n pub connect_client_actor: Recipient<ConnectClientActor>,\n\n pub rpc_message: Recipient<RpcMessage>,\n\n}\n\n\n\n#[macro_export]\n", "file_path": "kosem-base-rpc-client/src/client_actor.rs", "rank": 80, "score": 26121.35207441215 }, { "content": " def is_message_relevant(self, msg):\n", "file_path": "kosem-python/kosem/components.py", "rank": 81, "score": 25385.09763989998 }, { "content": " def __init__(self, procedure, uid, stream):\n\n self.procedure = procedure\n\n self.uid = uid\n\n self.next_component_ordinal = 0\n", "file_path": "kosem-python/kosem/procedure.py", "rank": 82, "score": 25282.035769276008 }, { "content": " def pop(self):\n", "file_path": "kosem-python/kosem/procedure.py", "rank": 83, "score": 25282.035769276008 }, { "content": " def __enter__(self):\n", "file_path": "kosem-python/kosem/procedure.py", "rank": 84, "score": 25282.035769276008 }, { "content": " def _login(self):\n", "file_path": "kosem-python/kosem/procedure.py", "rank": 85, "score": 25282.035769276008 }, { "content": " def __exit__(self, *args):\n", "file_path": "kosem-python/kosem/procedure.py", "rank": 86, "score": 25282.035769276008 }, { "content": "class KosemHuman(object):\n\n def __init__(self, procedure, uid):\n\n self.procedure = procedure\n\n self.request_uid = uid\n\n\n\n def _join_confirmation(self, msg_params):\n\n self.uid = msg_params['human_uid']\n", "file_path": "kosem-python/kosem/procedure.py", "rank": 87, "score": 24441.08566707565 }, { "content": " def wait_for_button(self):\n\n for msg in self.relevant_messages():\n\n if msg['method'] == 'ButtonClicked':\n", "file_path": "kosem-python/kosem/procedure.py", "rank": 88, "score": 24441.08566707565 }, { "content": " def add_caption(self, text):\n\n ordinal = self.__gen_ordinal()\n\n self.procedure._con.call('AddComponent',\n\n phase_uid=self.uid,\n\n ordinal=ordinal,\n\n type='Caption',\n\n params=dict(\n\n text=text,\n", "file_path": "kosem-python/kosem/procedure.py", "rank": 89, "score": 24441.08566707565 }, { "content": " def request_dlg(name):\n\n uid = self._con.call('RequestHuman', name=name)\n\n human = KosemHuman(self, uid)\n\n human_requests[uid] = human\n", "file_path": "kosem-python/kosem/procedure.py", "rank": 90, "score": 24441.08566707565 }, { "content": " def _join_confirmation(self, msg_params):\n\n self.uid = msg_params['human_uid']\n", "file_path": "kosem-python/kosem/procedure.py", "rank": 91, "score": 24441.08566707565 }, { "content": " def request_humans(self):\n\n stream = self._con.stream_messages()\n\n human_requests = {}\n\n\n\n def request_dlg(name):\n\n uid = self._con.call('RequestHuman', name=name)\n\n human = KosemHuman(self, uid)\n\n human_requests[uid] = human\n\n return human\n\n yield request_dlg\n\n\n\n for msg in stream:\n\n if msg['method'] == 'HumanJoined':\n\n human = human_requests.pop(msg['params']['request_uid'], None)\n\n if human is None:\n\n continue\n\n human._join_confirmation(msg['params'])\n\n self.humans.append(human)\n\n if not human_requests:\n", "file_path": "kosem-python/kosem/procedure.py", "rank": 92, "score": 24441.08566707565 }, { "content": " def __gen_ordinal(self):\n\n ordinal = self.next_component_ordinal\n\n self.next_component_ordinal += 1\n", "file_path": "kosem-python/kosem/procedure.py", "rank": 93, "score": 24441.08566707565 }, { "content": " def relevant_messages(self):\n\n if self.stream:\n\n for msg in self.stream:\n", "file_path": "kosem-python/kosem/procedure.py", "rank": 94, "score": 24441.08566707565 }, { "content": "pub struct JoinMenuWidgets {\n\n pub app_join_menu_window: gtk::ApplicationWindow,\n\n #[allow(dead_code)]\n\n lst_procedures: gtk::ListBox,\n\n}\n\n\n\nimpl actix::Handler<woab::Signal> for JoinMenuActor {\n\n type Result = woab::SignalResult;\n\n\n\n fn handle(&mut self, msg: woab::Signal, _ctx: &mut Self::Context) -> Self::Result {\n\n Ok(match msg.name() {\n\n \"close\" => {\n\n gtk::main_quit();\n\n None\n\n }\n\n _ => msg.cant_handle()?,\n\n })\n\n }\n\n}\n\n\n", "file_path": "kosem-gui/src/join_menu.rs", "rank": 95, "score": 28.38994085706517 }, { "content": " method: msg.method,\n\n id: None,\n\n params: Deserialize::deserialize(msg.params).unwrap(),\n\n };\n\n ctx.text(serde_json::to_string(&response).unwrap());\n\n }\n\n}\n\n\n\nimpl actix::Handler<SetRole> for WsJrpc {\n\n type Result = <SetRole as actix::Message>::Result;\n\n\n\n fn handle(&mut self, msg: SetRole, _ctx: &mut Self::Context) -> Self::Result {\n\n match msg {\n\n SetRole::Procedure(addr) => {\n\n self.state = role_actors::ActorRoleState::ProcedureActor(addr);\n\n }\n\n SetRole::Human(addr) => {\n\n self.state = role_actors::ActorRoleState::HumanActor {\n\n joiner: addr,\n\n procedures: Default::default(),\n", "file_path": "kosem-server/src/protocol_handlers/websocket_jsonrpc.rs", "rank": 96, "score": 27.8497104319615 }, { "content": "impl Handler<ShowJoinMenu> for JoinMenuActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, _msg: ShowJoinMenu, _ctx: &mut Self::Context) -> Self::Result {\n\n self.widgets.app_join_menu_window.show_all();\n\n }\n\n}\n\n\n\nimpl Handler<MessageFromServer<pairing_messages::AvailableProcedure>> for JoinMenuActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: MessageFromServer<pairing_messages::AvailableProcedure>, ctx: &mut Self::Context) -> Self::Result {\n\n let procedure_uid = msg.msg.uid;\n\n let new_row_widgets: RequestRowWidgets = self.factories.join_menu.row_request.instantiate()\n\n .connect_to((procedure_uid, ctx.address()))\n\n .widgets().unwrap();\n\n new_row_widgets.lbl_request_name.set_text(&msg.msg.name);\n\n self.widgets.lst_procedures.add(&new_row_widgets.row_request);\n\n self.procedure_requests.insert(procedure_uid, RequestRow {\n\n widgets: new_row_widgets,\n", "file_path": "kosem-gui/src/join_menu.rs", "rank": 97, "score": 26.95148997672034 }, { "content": "}\n\n\n\n#[derive(typed_builder::TypedBuilder)]\n\npub struct JoinMenuActor {\n\n factories: crate::Factories,\n\n widgets: JoinMenuWidgets,\n\n gui_client: Addr<crate::client::GuiClientActor>,\n\n #[builder(default)]\n\n procedure_requests: HashMap<Uuid, RequestRow>,\n\n}\n\n\n\nimpl Actor for JoinMenuActor {\n\n type Context = Context<Self>;\n\n\n\n fn started(&mut self, _ctx: &mut Self::Context) {\n\n self.widgets.app_join_menu_window.show_all();\n\n }\n\n}\n\n\n\n#[derive(woab::WidgetsFromBuilder)]\n", "file_path": "kosem-gui/src/join_menu.rs", "rank": 98, "score": 26.69084797814338 }, { "content": "}\n\n\n\nimpl Handler<ProcedureScreenAttach> for GuiClientActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: ProcedureScreenAttach, _ctx: &mut Self::Context) -> Self::Result {\n\n self.procedure_screens.insert(msg.request_uid, msg.addr);\n\n }\n\n}\n\n\n\nimpl Handler<UserClickedButton> for GuiClientActor {\n\n type Result = <UserClickedButton as actix::Message>::Result;\n\n\n\n fn handle(&mut self, msg: UserClickedButton, _ctx: &mut Self::Context) -> Self::Result {\n\n if let Some((_, client)) = self.client_actors.get(&msg.server_idx) {\n\n client.do_send(RpcMessage::new(\"ClickButton\", ClickButton {\n\n request_uid: msg.request_uid,\n\n phase_uid: msg.phase_uid,\n\n button_name: msg.button_name,\n\n }));\n\n }\n\n }\n\n}\n", "file_path": "kosem-gui/src/client.rs", "rank": 99, "score": 25.770896042371174 } ]
Rust
src/main.rs
codahale/veil-rs
45e3a32ad522b4ce68425442a39ccc76d92bf43c
use std::ffi::{OsStr, OsString}; use std::path::{Path, PathBuf}; use std::{fs, result}; use anyhow::Result; use clap::{App, IntoApp, Parser}; use clap::{AppSettings, Subcommand, ValueHint}; use clap_complete::generate_to; use clap_complete::Shell; use clio::{Input, Output}; use mimalloc::MiMalloc; use veil::{PublicKey, PublicKeyError, SecretKey, Signature}; #[global_allocator] static GLOBAL: MiMalloc = MiMalloc; fn main() -> Result<()> { let opts: Opts = Opts::parse(); match opts.cmd { Command::SecretKey(cmd) => cmd.run(), Command::PublicKey(cmd) => cmd.run(), Command::DeriveKey(cmd) => cmd.run(), Command::Encrypt(cmd) => cmd.run(), Command::Decrypt(cmd) => cmd.run(), Command::Sign(cmd) => cmd.run(), Command::Verify(cmd) => cmd.run(), Command::Complete(cmd) => cmd.run(), } } #[derive(Debug, Parser)] #[clap(author, version, about)] #[clap(setting = AppSettings::SubcommandRequired)] struct Opts { #[clap(subcommand)] cmd: Command, } trait Cmd { fn run(self) -> Result<()>; } #[derive(Debug, Subcommand)] #[clap(setting = AppSettings::DeriveDisplayOrder)] enum Command { SecretKey(SecretKeyArgs), PublicKey(PublicKeyArgs), DeriveKey(DeriveKeyArgs), Encrypt(EncryptArgs), Decrypt(DecryptArgs), Sign(SignArgs), Verify(VerifyArgs), Complete(CompleteArgs), } #[derive(Debug, Parser)] struct SecretKeyArgs { #[clap(value_hint = ValueHint::FilePath)] output: PathBuf, #[clap(long, default_value = "128")] time: u32, #[clap(long, default_value = "1024")] space: u32, #[clap(long, value_hint = ValueHint::FilePath)] passphrase_file: Option<PathBuf>, } impl Cmd for SecretKeyArgs { fn run(self) -> Result<()> { let passphrase = prompt_passphrase(&self.passphrase_file)?; let secret_key = SecretKey::new(); let ciphertext = secret_key.encrypt(&passphrase, self.time, self.space); fs::write(self.output, ciphertext)?; Ok(()) } } #[derive(Debug, Parser)] struct PublicKeyArgs { #[clap(value_hint = ValueHint::FilePath)] secret_key: PathBuf, key_id: OsString, #[clap(parse(try_from_os_str = output_from_os_str), value_hint = ValueHint::FilePath, default_value="-")] output: Output, #[clap(long, value_hint = ValueHint::FilePath)] passphrase_file: Option<PathBuf>, } impl Cmd for PublicKeyArgs { fn run(mut self) -> Result<()> { let secret_key = decrypt_secret_key(&self.passphrase_file, &self.secret_key)?; let public_key = secret_key.public_key(self.key_id.to_string_lossy().as_ref()); write!(self.output.lock(), "{}", public_key)?; Ok(()) } } #[derive(Debug, Parser)] struct DeriveKeyArgs { public_key: OsString, sub_key_id: OsString, #[clap(parse(try_from_os_str = output_from_os_str), value_hint = ValueHint::FilePath, default_value="-")] output: Output, } impl Cmd for DeriveKeyArgs { fn run(mut self) -> Result<()> { let root = self.public_key.to_string_lossy().as_ref().parse::<PublicKey>()?; let public_key = root.derive(self.sub_key_id.to_string_lossy().as_ref()); write!(self.output.lock(), "{}", public_key)?; Ok(()) } } #[derive(Debug, Parser)] struct EncryptArgs { #[clap(value_hint = ValueHint::FilePath)] secret_key: PathBuf, key_id: OsString, #[clap(parse(try_from_os_str = input_from_os_str), value_hint = ValueHint::FilePath)] plaintext: Input, #[clap(parse(try_from_os_str = output_from_os_str), value_hint = ValueHint::FilePath)] ciphertext: Output, #[clap(required = true)] recipients: Vec<OsString>, #[clap(long, default_value = "0")] fakes: usize, #[clap(long, default_value = "0")] padding: u64, #[clap(long, value_hint = ValueHint::FilePath)] passphrase_file: Option<PathBuf>, } impl Cmd for EncryptArgs { fn run(mut self) -> Result<()> { let secret_key = decrypt_secret_key(&self.passphrase_file, &self.secret_key)?; let private_key = secret_key.private_key(self.key_id.to_string_lossy().as_ref()); let pks = self .recipients .iter() .map(|s| s.to_string_lossy().as_ref().parse::<PublicKey>()) .collect::<result::Result<Vec<PublicKey>, PublicKeyError>>()?; private_key.encrypt( &mut self.plaintext.lock(), &mut self.ciphertext.lock(), pks, self.fakes, self.padding, )?; Ok(()) } } #[derive(Debug, Parser)] struct DecryptArgs { #[clap(value_hint = ValueHint::FilePath)] secret_key: PathBuf, key_id: OsString, #[clap(parse(try_from_os_str = input_from_os_str), value_hint = ValueHint::FilePath)] ciphertext: Input, #[clap(parse(try_from_os_str = output_from_os_str), value_hint = ValueHint::FilePath)] plaintext: Output, sender: OsString, #[clap(long, value_hint = ValueHint::FilePath)] passphrase_file: Option<PathBuf>, } impl Cmd for DecryptArgs { fn run(mut self) -> Result<()> { let secret_key = decrypt_secret_key(&self.passphrase_file, &self.secret_key)?; let private_key = secret_key.private_key(self.key_id.to_string_lossy().as_ref()); let sender = self.sender.to_string_lossy().parse()?; private_key.decrypt(&mut self.ciphertext.lock(), &mut self.plaintext.lock(), &sender)?; Ok(()) } } #[derive(Debug, Parser)] struct SignArgs { #[clap(value_hint = ValueHint::FilePath)] secret_key: PathBuf, key_id: OsString, #[clap(parse(try_from_os_str = input_from_os_str), value_hint = ValueHint::FilePath)] message: Input, #[clap(parse(try_from_os_str = output_from_os_str), value_hint = ValueHint::FilePath, default_value="-")] output: Output, #[clap(long, value_hint = ValueHint::FilePath)] passphrase_file: Option<PathBuf>, } impl Cmd for SignArgs { fn run(mut self) -> Result<()> { let secret_key = decrypt_secret_key(&self.passphrase_file, &self.secret_key)?; let private_key = secret_key.private_key(self.key_id.to_string_lossy().as_ref()); let sig = private_key.sign(&mut self.message.lock())?; write!(self.output.lock(), "{}", sig)?; Ok(()) } } #[derive(Debug, Parser)] struct VerifyArgs { public_key: OsString, #[clap(parse(try_from_os_str = input_from_os_str), value_hint = ValueHint::FilePath)] message: Input, signature: OsString, } impl Cmd for VerifyArgs { fn run(mut self) -> Result<()> { let signer = self.public_key.to_string_lossy().as_ref().parse::<PublicKey>()?; let sig = self.signature.to_string_lossy().as_ref().parse::<Signature>()?; signer.verify(&mut self.message.lock(), &sig)?; Ok(()) } } #[derive(Debug, Parser)] #[clap(setting = AppSettings::Hidden)] struct CompleteArgs { shell: Shell, #[clap(value_hint = ValueHint::DirPath)] output: OsString, } impl Cmd for CompleteArgs { fn run(self) -> Result<()> { let mut app: App = Opts::into_app(); generate_to(self.shell, &mut app, "veil", &self.output)?; Ok(()) } } fn decrypt_secret_key(passphrase_file: &Option<PathBuf>, path: &Path) -> Result<SecretKey> { let passphrase = prompt_passphrase(passphrase_file)?; let ciphertext = fs::read(path)?; let sk = SecretKey::decrypt(&passphrase, &ciphertext)?; Ok(sk) } fn prompt_passphrase(passphrase_file: &Option<PathBuf>) -> Result<String> { match passphrase_file { Some(p) => Ok(fs::read_to_string(p)?), None => Ok(rpassword::read_password_from_tty(Some("Enter passphrase: "))?), } } fn input_from_os_str(path: &OsStr) -> Result<Input, String> { Input::new(path).map_err(|e| e.to_string()) } fn output_from_os_str(path: &OsStr) -> Result<Output, String> { Output::new(path).map_err(|e| e.to_string()) }
use std::ffi::{OsStr, OsString}; use std::path::{Path, PathBuf}; use std::{fs, result}; use anyhow::Result; use clap::{App, IntoApp, Parser}; use clap::{AppSettings, Subcommand, ValueHint}; use clap_complete::generate_to; use clap_complete::Shell; use clio::{Input, Output}; use mimalloc::MiMalloc; use veil::{PublicKey, PublicKeyError, SecretKey, Signature}; #[global_allocator] static GLOBAL: MiMalloc = MiMalloc; fn main() -> Result<()> { let opts: Opts = Opts::parse(); match opts.cmd { Command::SecretKey(cmd) => cmd.run(), Command::PublicKey(cmd) => cmd.run(), Command::DeriveKey(cmd) => cmd.run(), Command::Encrypt(cmd) => cmd.run(), Command::Decrypt(cmd) => cmd.run(), Command::Sign(cmd) => cmd.run(), Command::Verify(cmd) => cmd.run(), Command::Complete(cmd) => cmd.run(), } } #[derive(Debug, Parser)] #[clap(author, version, about)] #[clap(setting = AppSettings::SubcommandRequired)] struct Opts { #[clap(subcommand)] cmd: Command, } trait Cmd { fn run(self) -> Result<()>; } #[derive(Debug, Subcommand)] #[clap(setting = AppSettings::DeriveDisplayOrder)] enum Command { SecretKey(SecretKeyArgs), PublicKey(PublicKeyArgs), DeriveKey(DeriveKeyArgs), Encrypt(EncryptArgs), Decrypt(DecryptArgs), Sign(SignArgs), Verify(VerifyArgs), Complete(CompleteArgs), } #[derive(Debug, Parser)] struct SecretKeyArgs { #[clap(value_hint = ValueHint::FilePath)] output: PathBuf, #[clap(long, default_value = "128")] time: u32, #[clap(long, default_value = "1024")] space: u32, #[clap(long, value_hint = ValueHint::FilePath)] passphrase_file: Option<PathBuf>, } impl Cmd for SecretKeyArgs {
} #[derive(Debug, Parser)] struct PublicKeyArgs { #[clap(value_hint = ValueHint::FilePath)] secret_key: PathBuf, key_id: OsString, #[clap(parse(try_from_os_str = output_from_os_str), value_hint = ValueHint::FilePath, default_value="-")] output: Output, #[clap(long, value_hint = ValueHint::FilePath)] passphrase_file: Option<PathBuf>, } impl Cmd for PublicKeyArgs { fn run(mut self) -> Result<()> { let secret_key = decrypt_secret_key(&self.passphrase_file, &self.secret_key)?; let public_key = secret_key.public_key(self.key_id.to_string_lossy().as_ref()); write!(self.output.lock(), "{}", public_key)?; Ok(()) } } #[derive(Debug, Parser)] struct DeriveKeyArgs { public_key: OsString, sub_key_id: OsString, #[clap(parse(try_from_os_str = output_from_os_str), value_hint = ValueHint::FilePath, default_value="-")] output: Output, } impl Cmd for DeriveKeyArgs { fn run(mut self) -> Result<()> { let root = self.public_key.to_string_lossy().as_ref().parse::<PublicKey>()?; let public_key = root.derive(self.sub_key_id.to_string_lossy().as_ref()); write!(self.output.lock(), "{}", public_key)?; Ok(()) } } #[derive(Debug, Parser)] struct EncryptArgs { #[clap(value_hint = ValueHint::FilePath)] secret_key: PathBuf, key_id: OsString, #[clap(parse(try_from_os_str = input_from_os_str), value_hint = ValueHint::FilePath)] plaintext: Input, #[clap(parse(try_from_os_str = output_from_os_str), value_hint = ValueHint::FilePath)] ciphertext: Output, #[clap(required = true)] recipients: Vec<OsString>, #[clap(long, default_value = "0")] fakes: usize, #[clap(long, default_value = "0")] padding: u64, #[clap(long, value_hint = ValueHint::FilePath)] passphrase_file: Option<PathBuf>, } impl Cmd for EncryptArgs { fn run(mut self) -> Result<()> { let secret_key = decrypt_secret_key(&self.passphrase_file, &self.secret_key)?; let private_key = secret_key.private_key(self.key_id.to_string_lossy().as_ref()); let pks = self .recipients .iter() .map(|s| s.to_string_lossy().as_ref().parse::<PublicKey>()) .collect::<result::Result<Vec<PublicKey>, PublicKeyError>>()?; private_key.encrypt( &mut self.plaintext.lock(), &mut self.ciphertext.lock(), pks, self.fakes, self.padding, )?; Ok(()) } } #[derive(Debug, Parser)] struct DecryptArgs { #[clap(value_hint = ValueHint::FilePath)] secret_key: PathBuf, key_id: OsString, #[clap(parse(try_from_os_str = input_from_os_str), value_hint = ValueHint::FilePath)] ciphertext: Input, #[clap(parse(try_from_os_str = output_from_os_str), value_hint = ValueHint::FilePath)] plaintext: Output, sender: OsString, #[clap(long, value_hint = ValueHint::FilePath)] passphrase_file: Option<PathBuf>, } impl Cmd for DecryptArgs { fn run(mut self) -> Result<()> { let secret_key = decrypt_secret_key(&self.passphrase_file, &self.secret_key)?; let private_key = secret_key.private_key(self.key_id.to_string_lossy().as_ref()); let sender = self.sender.to_string_lossy().parse()?; private_key.decrypt(&mut self.ciphertext.lock(), &mut self.plaintext.lock(), &sender)?; Ok(()) } } #[derive(Debug, Parser)] struct SignArgs { #[clap(value_hint = ValueHint::FilePath)] secret_key: PathBuf, key_id: OsString, #[clap(parse(try_from_os_str = input_from_os_str), value_hint = ValueHint::FilePath)] message: Input, #[clap(parse(try_from_os_str = output_from_os_str), value_hint = ValueHint::FilePath, default_value="-")] output: Output, #[clap(long, value_hint = ValueHint::FilePath)] passphrase_file: Option<PathBuf>, } impl Cmd for SignArgs { fn run(mut self) -> Result<()> { let secret_key = decrypt_secret_key(&self.passphrase_file, &self.secret_key)?; let private_key = secret_key.private_key(self.key_id.to_string_lossy().as_ref()); let sig = private_key.sign(&mut self.message.lock())?; write!(self.output.lock(), "{}", sig)?; Ok(()) } } #[derive(Debug, Parser)] struct VerifyArgs { public_key: OsString, #[clap(parse(try_from_os_str = input_from_os_str), value_hint = ValueHint::FilePath)] message: Input, signature: OsString, } impl Cmd for VerifyArgs { fn run(mut self) -> Result<()> { let signer = self.public_key.to_string_lossy().as_ref().parse::<PublicKey>()?; let sig = self.signature.to_string_lossy().as_ref().parse::<Signature>()?; signer.verify(&mut self.message.lock(), &sig)?; Ok(()) } } #[derive(Debug, Parser)] #[clap(setting = AppSettings::Hidden)] struct CompleteArgs { shell: Shell, #[clap(value_hint = ValueHint::DirPath)] output: OsString, } impl Cmd for CompleteArgs { fn run(self) -> Result<()> { let mut app: App = Opts::into_app(); generate_to(self.shell, &mut app, "veil", &self.output)?; Ok(()) } } fn decrypt_secret_key(passphrase_file: &Option<PathBuf>, path: &Path) -> Result<SecretKey> { let passphrase = prompt_passphrase(passphrase_file)?; let ciphertext = fs::read(path)?; let sk = SecretKey::decrypt(&passphrase, &ciphertext)?; Ok(sk) } fn prompt_passphrase(passphrase_file: &Option<PathBuf>) -> Result<String> { match passphrase_file { Some(p) => Ok(fs::read_to_string(p)?), None => Ok(rpassword::read_password_from_tty(Some("Enter passphrase: "))?), } } fn input_from_os_str(path: &OsStr) -> Result<Input, String> { Input::new(path).map_err(|e| e.to_string()) } fn output_from_os_str(path: &OsStr) -> Result<Output, String> { Output::new(path).map_err(|e| e.to_string()) }
fn run(self) -> Result<()> { let passphrase = prompt_passphrase(&self.passphrase_file)?; let secret_key = SecretKey::new(); let ciphertext = secret_key.encrypt(&passphrase, self.time, self.space); fs::write(self.output, ciphertext)?; Ok(()) }
function_block-function_prefix_line
[ { "content": "fn init(passphrase: &[u8], salt: &[u8], time: u32, space: u32) -> Strobe {\n\n let mut pbenc = Strobe::new(b\"veil.pbenc\", SecParam::B128);\n\n\n\n // Initialize protocol with metadata.\n\n pbenc.meta_ad_u32(DELTA as u32);\n\n pbenc.meta_ad_u32(N as u32);\n\n pbenc.meta_ad_u32(MAC_LEN as u32);\n\n pbenc.meta_ad_u32(time);\n\n pbenc.meta_ad_u32(space);\n\n\n\n // Key with the passphrase and include the salt as associated data.\n\n pbenc.key(passphrase, false);\n\n pbenc.ad(salt, false);\n\n\n\n // Allocate buffers.\n\n let mut ctr = 0u64;\n\n let mut idx = [0u8; N];\n\n let mut buf = vec![[0u8; N]; space as usize];\n\n\n\n // Step 1: Expand input into buffer.\n", "file_path": "src/pbenc.rs", "rank": 2, "score": 141753.65034103443 }, { "content": "#[must_use]\n\npub fn encrypt(passphrase: &str, time: u32, space: u32, plaintext: &[u8]) -> Vec<u8> {\n\n // Generate a random salt.\n\n let salt: [u8; SALT_LEN] = util::rand_array();\n\n\n\n // Perform the balloon hashing.\n\n let mut pbenc = init(passphrase.nfkc().to_string().as_bytes(), &salt, time, space);\n\n\n\n // Allocate an output buffer.\n\n let mut out = vec![0u8; CT_OFFSET + plaintext.len() + MAC_LEN];\n\n\n\n // Encode the time and space parameters.\n\n out[TIME_OFFSET..SPACE_OFFSET].copy_from_slice(&time.to_le_bytes());\n\n out[SPACE_OFFSET..SALT_OFFSET].copy_from_slice(&space.to_le_bytes());\n\n\n\n // Copy the salt.\n\n out[SALT_OFFSET..CT_OFFSET].copy_from_slice(&salt);\n\n\n\n // Copy the plaintext and encrypt it.\n\n out[CT_OFFSET..CT_OFFSET + plaintext.len()].copy_from_slice(plaintext);\n\n pbenc.send_enc(&mut out[CT_OFFSET..CT_OFFSET + plaintext.len()], false);\n\n\n\n // Generate a MAC.\n\n pbenc.send_mac(&mut out[CT_OFFSET + plaintext.len()..], false);\n\n\n\n out\n\n}\n\n\n\n/// Decrypt the given ciphertext using the given passphrase.\n", "file_path": "src/pbenc.rs", "rank": 5, "score": 132239.30573125565 }, { "content": "#[test]\n\nfn sign_and_verify_message() -> Result<()> {\n\n let dir = tempfile::tempdir()?;\n\n\n\n // Alice picks a passphrase.\n\n let passphrase_path = &dir.path().join(\"passphrase-a\");\n\n fs::write(passphrase_path, \"excelsior\")?;\n\n\n\n // Alice generates a secret key.\n\n let secret_key_path = &dir.path().join(\"secret-key-a\");\n\n create_secret_key(secret_key_path, passphrase_path)?;\n\n\n\n // Alice generates a public key.\n\n let public_key = generate_public_key(secret_key_path, passphrase_path, \"/friends\")?;\n\n\n\n // Alice writes a plaintext message.\n\n let message_file = &dir.path().join(\"message\");\n\n fs::write(message_file, \"this is a public message\")?;\n\n\n\n // Alice signs the message.\n\n let sig = cmd!(\n", "file_path": "tests/integration_test.rs", "rank": 10, "score": 83669.90698932795 }, { "content": "fn create_secret_key(secret_key_path: &PathBuf, passphrase_path: &PathBuf) -> Result<()> {\n\n cmd!(\n\n VEIL_PATH,\n\n \"secret-key\",\n\n secret_key_path,\n\n \"--passphrase-file\",\n\n passphrase_path,\n\n \"--time\",\n\n \"10\",\n\n \"--space\",\n\n \"15\",\n\n )\n\n .run()?;\n\n\n\n Ok(())\n\n}\n", "file_path": "tests/integration_test.rs", "rank": 11, "score": 82630.34412239268 }, { "content": "#[test]\n\npub fn encrypt_and_decrypt_a_message() -> Result<()> {\n\n let dir = tempfile::tempdir()?;\n\n\n\n // Alice picks a passphrase.\n\n let passphrase_path_a = &dir.path().join(\"passphrase-a\");\n\n fs::write(passphrase_path_a, \"excelsior\")?;\n\n\n\n // Alice generates a secret key.\n\n let secret_key_path_a = &dir.path().join(\"secret-key-a\");\n\n create_secret_key(secret_key_path_a, passphrase_path_a)?;\n\n\n\n // Alice generates a public key.\n\n let public_key_a = generate_public_key(secret_key_path_a, passphrase_path_a, \"/friends/bea\")?;\n\n\n\n // Bea picks a passphrase.\n\n let passphrase_path_b = &dir.path().join(\"passphrase-b\");\n\n fs::write(passphrase_path_b, \"dingus\")?;\n\n\n\n // Bea generates a secret key.\n\n let secret_key_path_b = &dir.path().join(\"secret-key-b\");\n", "file_path": "tests/integration_test.rs", "rank": 12, "score": 78864.25732230773 }, { "content": "/// Additional convenience methods for [Strobe] instances.\n\npub trait StrobeExt {\n\n /// Add the given `u32` as little endian encoded meta associated data.\n\n fn meta_ad_u32(&mut self, n: u32);\n\n\n\n /// Add the compressed form of the given point as associated data.\n\n fn ad_point(&mut self, q: &RistrettoPoint);\n\n\n\n /// Derive a scalar from PRF output.\n\n #[must_use]\n\n fn prf_scalar(&mut self) -> Scalar;\n\n\n\n /// Derive an array from PRF output.\n\n #[must_use]\n\n fn prf_array<const N: usize>(&mut self) -> [u8; N];\n\n\n\n /// Clone the current instance, key it with the given secret, key it again with random data, and\n\n /// pass the clone to the given function.\n\n #[must_use]\n\n fn hedge<R, F>(&self, secret: &[u8], f: F) -> R\n\n where\n", "file_path": "src/util.rs", "rank": 21, "score": 47748.292220789095 }, { "content": "struct RngReader(ThreadRng);\n\n\n\nimpl Read for RngReader {\n\n fn read(&mut self, buf: &mut [u8]) -> Result<usize> {\n\n self.0.fill_bytes(buf);\n\n Ok(buf.len())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::io::Cursor;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n pub fn round_trip() -> Result<()> {\n\n let d_s = Scalar::random(&mut rand::thread_rng());\n\n let q_s = G * &d_s;\n\n\n", "file_path": "src/mres.rs", "rank": 22, "score": 45148.71028894528 }, { "content": "#[must_use]\n\npub fn encapsulate(\n\n d_s: &Scalar,\n\n q_s: &RistrettoPoint,\n\n d_e: &Scalar,\n\n q_e: &RistrettoPoint,\n\n q_r: &RistrettoPoint,\n\n plaintext: &[u8],\n\n) -> Vec<u8> {\n\n // Allocate a buffer for output and fill it with plaintext.\n\n let mut out = vec![0u8; POINT_LEN + plaintext.len() + MAC_LEN];\n\n out[..POINT_LEN].copy_from_slice(q_e.compress().as_bytes());\n\n out[POINT_LEN..POINT_LEN + plaintext.len()].copy_from_slice(plaintext);\n\n\n\n // Initialize the protocol.\n\n let mut akem = Strobe::new(b\"veil.akem\", SecParam::B128);\n\n akem.meta_ad_u32(MAC_LEN as u32);\n\n\n\n // Include the sender and receiver as associated data.\n\n akem.ad_point(q_s);\n\n akem.ad_point(q_r);\n", "file_path": "src/akem.rs", "rank": 23, "score": 42281.83536746386 }, { "content": "#[must_use]\n\npub fn decapsulate(\n\n d_r: &Scalar,\n\n q_r: &RistrettoPoint,\n\n q_s: &RistrettoPoint,\n\n ciphertext: &[u8],\n\n) -> Option<(RistrettoPoint, Vec<u8>)> {\n\n // Ensure the ciphertext has a point and MAC, at least.\n\n if ciphertext.len() < POINT_LEN + MAC_LEN {\n\n return None;\n\n }\n\n\n\n // Break the input up into its components.\n\n let mut q_e = Vec::from(ciphertext);\n\n let mut ciphertext = q_e.split_off(POINT_LEN);\n\n let mut mac = ciphertext.split_off(ciphertext.len() - MAC_LEN);\n\n\n\n // Initialize the protocol.\n\n let mut akem = Strobe::new(b\"veil.akem\", SecParam::B128);\n\n akem.meta_ad_u32(MAC_LEN as u32);\n\n\n", "file_path": "src/akem.rs", "rank": 24, "score": 42281.83536746386 }, { "content": "fn generate_public_key(\n\n secret_key_path: &PathBuf,\n\n passphrase_path: &PathBuf,\n\n key_id: &str,\n\n) -> Result<String> {\n\n let out = cmd!(\n\n VEIL_PATH,\n\n \"public-key\",\n\n secret_key_path,\n\n key_id,\n\n \"--passphrase-file\",\n\n passphrase_path\n\n )\n\n .read()?;\n\n Ok(out)\n\n}\n\n\n", "file_path": "tests/integration_test.rs", "rank": 25, "score": 41648.9316512464 }, { "content": "fn decrypt_header<R, W>(\n\n reader: &mut R,\n\n verifier: &mut W,\n\n d_r: &Scalar,\n\n q_r: &RistrettoPoint,\n\n q_s: &RistrettoPoint,\n\n) -> Result<Option<([u8; DEK_LEN], RistrettoPoint)>>\n\nwhere\n\n R: Read,\n\n W: Write,\n\n{\n\n let mut buf = [0u8; ENC_HEADER_LEN];\n\n let mut hdr_offset = 0u64;\n\n\n\n // Iterate through blocks, looking for an encrypted header that can be decrypted.\n\n loop {\n\n match reader.read_exact(&mut buf) {\n\n Ok(()) => {\n\n // Pass the block to the verifier.\n\n verifier.write_all(&buf)?;\n", "file_path": "src/mres.rs", "rank": 26, "score": 38798.7008349847 }, { "content": "fn decrypt_message<R, W>(\n\n reader: &mut R,\n\n writer: &mut W,\n\n verifier: &mut Verifier,\n\n mres: &mut Strobe,\n\n) -> Result<(u64, [u8; SIGNATURE_LEN])>\n\nwhere\n\n R: Read,\n\n W: Write,\n\n{\n\n let mut written = 0u64;\n\n let mut input = [0u8; 32 * 1024];\n\n let mut buf = Vec::with_capacity(input.len() + SIGNATURE_LEN);\n\n\n\n // Prep for streaming decryption.\n\n mres.recv_enc(&mut [], false);\n\n\n\n // Read through src in 32KiB chunks, keeping the last 64 bytes as the signature.\n\n let mut n = usize::MAX;\n\n while n > 0 {\n", "file_path": "src/mres.rs", "rank": 27, "score": 38798.7008349847 }, { "content": "/// Encrypt the contents of `reader` such that they can be decrypted and verified by all members of\n\n/// `q_rs` and write the ciphertext to `writer` with `padding` bytes of random data added.\n\npub fn encrypt<R, W>(\n\n reader: &mut R,\n\n writer: &mut W,\n\n d_s: &Scalar,\n\n q_s: &RistrettoPoint,\n\n q_rs: Vec<RistrettoPoint>,\n\n padding: u64,\n\n) -> Result<u64>\n\nwhere\n\n R: Read,\n\n W: Write,\n\n{\n\n // Initialize a protocol and add the MAC length and sender's public key as associated data.\n\n let mut mres = Strobe::new(b\"veil.mres\", SecParam::B128);\n\n mres.meta_ad_u32(MAC_LEN as u32);\n\n mres.ad_point(q_s);\n\n\n\n // Derive a random ephemeral key pair and DEK from the protocol's current state, the sender's\n\n // private key, and a random nonce.\n\n let (d_e, q_e, dek) = mres.hedge(d_s.as_bytes(), |clone| {\n", "file_path": "src/mres.rs", "rank": 28, "score": 37713.81297902657 }, { "content": "/// Decrypt the contents of `reader` iff they were originally encrypted by `q_s` for `q_r` and write\n\n/// the plaintext to `writer`.\n\npub fn decrypt<R, W>(\n\n reader: &mut R,\n\n writer: &mut W,\n\n d_r: &Scalar,\n\n q_r: &RistrettoPoint,\n\n q_s: &RistrettoPoint,\n\n) -> Result<(bool, u64)>\n\nwhere\n\n R: Read,\n\n W: Write,\n\n{\n\n // Initialize a protocol and add the MAC length and sender's public key as associated data.\n\n let mut mres = Strobe::new(b\"veil.mres\", SecParam::B128);\n\n mres.meta_ad_u32(MAC_LEN as u32);\n\n mres.ad_point(q_s);\n\n\n\n // Initialize a verifier for the entire ciphertext.\n\n let verifier = Verifier::new();\n\n\n\n // Include all encrypted headers and padding as received cleartext.\n", "file_path": "src/mres.rs", "rank": 29, "score": 37713.81297902657 }, { "content": "fn bench_encrypt(c: &mut Criterion) {\n\n let sk_a = SecretKey::new();\n\n let pk_a = sk_a.private_key(\"/one/two\");\n\n\n\n let sk_b = SecretKey::new();\n\n let pk_b = sk_b.private_key(\"/three/four\");\n\n\n\n let mut encrypt = c.benchmark_group(\"encrypt\");\n\n for n in vec![0 * KB, 1 * KB, 2 * KB, 4 * KB, 8 * KB, 16 * KB, 32 * KB, 64 * KB] {\n\n encrypt.throughput(Throughput::Elements(n));\n\n encrypt.bench_with_input(BenchmarkId::from_parameter(n), &n, |b, &n| {\n\n b.iter(|| {\n\n pk_a.encrypt(\n\n &mut io::repeat(0).take(n),\n\n &mut io::sink(),\n\n vec![pk_b.public_key()],\n\n black_box(0),\n\n black_box(0),\n\n )\n\n .unwrap()\n\n });\n\n });\n\n }\n\n encrypt.finish();\n\n}\n\n\n", "file_path": "benches/benches.rs", "rank": 30, "score": 37713.81297902657 }, { "content": "fn bench_verify(c: &mut Criterion) {\n\n let sk_a = SecretKey::new();\n\n let pk_a = sk_a.private_key(\"/one/two\");\n\n\n\n let mut verify = c.benchmark_group(\"verify\");\n\n for n in vec![0 * KB, 1 * KB, 2 * KB, 4 * KB, 8 * KB, 16 * KB, 32 * KB, 64 * KB] {\n\n let sig = pk_a.sign(&mut io::repeat(0).take(n)).unwrap();\n\n verify.throughput(Throughput::Elements(n));\n\n verify.bench_with_input(BenchmarkId::from_parameter(n), &n, |b, &n| {\n\n b.iter(|| pk_a.public_key().verify(&mut io::repeat(0).take(n), &sig).unwrap());\n\n });\n\n }\n\n verify.finish();\n\n}\n\n\n", "file_path": "benches/benches.rs", "rank": 31, "score": 37713.81297902657 }, { "content": "fn bench_pbenc(c: &mut Criterion) {\n\n let sk = SecretKey::new();\n\n\n\n c.bench_function(\"pbenc\", |b| {\n\n b.iter(|| sk.encrypt(black_box(\"passphrase\"), black_box(10), black_box(10)))\n\n });\n\n}\n\n\n\nconst KB: u64 = 1024;\n\n\n\ncriterion_group!(benches, bench_encrypt, bench_decrypt, bench_sign, bench_verify, bench_pbenc);\n\ncriterion_main!(benches);\n", "file_path": "benches/benches.rs", "rank": 32, "score": 37713.81297902657 }, { "content": "fn bench_decrypt(c: &mut Criterion) {\n\n let sk_a = SecretKey::new();\n\n let pk_a = sk_a.private_key(\"/one/two\");\n\n\n\n let sk_b = SecretKey::new();\n\n let pk_b = sk_b.private_key(\"/three/four\");\n\n\n\n let mut decrypt = c.benchmark_group(\"decrypt\");\n\n for n in vec![0 * KB, 1 * KB, 2 * KB, 4 * KB, 8 * KB, 16 * KB, 32 * KB, 64 * KB] {\n\n let mut ciphertext = Cursor::new(Vec::new());\n\n pk_a.encrypt(\n\n &mut io::repeat(0).take(n),\n\n &mut ciphertext,\n\n vec![pk_b.public_key()],\n\n black_box(0),\n\n black_box(0),\n\n )\n\n .unwrap();\n\n let ciphertext = ciphertext.into_inner();\n\n\n", "file_path": "benches/benches.rs", "rank": 33, "score": 37713.81297902657 }, { "content": "fn bench_sign(c: &mut Criterion) {\n\n let sk_a = SecretKey::new();\n\n let pk_a = sk_a.private_key(\"/one/two\");\n\n\n\n let mut sign = c.benchmark_group(\"sign\");\n\n for n in vec![0 * KB, 1 * KB, 2 * KB, 4 * KB, 8 * KB, 16 * KB, 32 * KB, 64 * KB] {\n\n sign.throughput(Throughput::Elements(n));\n\n sign.bench_with_input(BenchmarkId::from_parameter(n), &n, |b, &n| {\n\n b.iter(|| pk_a.sign(&mut io::repeat(0).take(n)).unwrap());\n\n });\n\n }\n\n sign.finish();\n\n}\n\n\n", "file_path": "benches/benches.rs", "rank": 34, "score": 37713.81297902657 }, { "content": "#[must_use]\n\npub fn derive_root(r: &[u8; 64]) -> Scalar {\n\n let mut root_df = Strobe::new(b\"veil.scaldf.root\", SecParam::B128);\n\n root_df.key(r, false);\n\n root_df.prf_scalar()\n\n}\n\n\n\n/// Derive a scalar from another scalar using the given key ID.\n", "file_path": "src/scaldf.rs", "rank": 35, "score": 35025.794758029064 }, { "content": "#[must_use]\n\nfn diffie_hellman(d: &Scalar, q: &RistrettoPoint) -> [u8; 32] {\n\n let zz = q * d;\n\n if zz.is_identity() {\n\n panic!(\"non-contributory ECDH\");\n\n }\n\n\n\n zz.compress().to_bytes()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use curve25519_dalek::ristretto::RistrettoPoint;\n\n use curve25519_dalek::scalar::Scalar;\n\n\n\n use crate::util::G;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn round_trip() {\n", "file_path": "src/akem.rs", "rank": 36, "score": 33433.726502676705 }, { "content": "#[must_use]\n\npub fn derive_scalar(d: Scalar, key_id: &str) -> Scalar {\n\n key_id.trim_matches(KEY_ID_DELIM).split(KEY_ID_DELIM).fold(d, |d_p, label| {\n\n let mut label_df = Strobe::new(b\"veil.scaldf.label\", SecParam::B128);\n\n label_df.key(label.as_bytes(), false);\n\n d_p + label_df.prf_scalar()\n\n })\n\n}\n\n\n\n/// Derive a point from another point using the given key ID.\n", "file_path": "src/scaldf.rs", "rank": 37, "score": 31826.036115300456 }, { "content": "#[must_use]\n\npub fn rand_array<const N: usize>() -> [u8; N] {\n\n let mut out = [0u8; N];\n\n rand::thread_rng().fill_bytes(&mut out);\n\n out\n\n}\n\n\n\n/// The generator point for ristretto255.\n\npub const G: &RistrettoBasepointTable = &RISTRETTO_BASEPOINT_TABLE;\n\n\n\n/// The length of a MAC in bytes.\n\npub const MAC_LEN: usize = 16;\n\n\n\n/// The length of a compressed ristretto255 point in bytes.\n\npub const POINT_LEN: usize = 32;\n\n\n\n/// The length of a `u32` in bytes.\n\npub const U32_LEN: usize = mem::size_of::<u32>();\n\n\n\n/// The length of a `u64` in bytes.\n\npub const U64_LEN: usize = mem::size_of::<u64>();\n\n\n", "file_path": "src/util.rs", "rank": 38, "score": 31826.036115300456 }, { "content": "#[must_use]\n\npub fn derive_point(q: &RistrettoPoint, key_id: &str) -> RistrettoPoint {\n\n q + (G * &derive_scalar(Scalar::zero(), key_id))\n\n}\n\n\n\nconst KEY_ID_DELIM: char = '/';\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn scalar_derivation() {\n\n let d = Scalar::random(&mut rand::thread_rng());\n\n let d1 = derive_scalar(d, \"/one\");\n\n let d2 = derive_scalar(d1, \"/two\");\n\n let d3 = derive_scalar(d2, \"/three\");\n\n\n\n let d3_p = derive_scalar(d, \"/one/two/three\");\n\n\n\n assert_eq!(d3_p, d3);\n", "file_path": "src/scaldf.rs", "rank": 39, "score": 30425.878910459876 }, { "content": "#[must_use]\n\npub fn decrypt(passphrase: &str, ciphertext: &[u8]) -> Option<Vec<u8>> {\n\n if ciphertext.len() < U32_LEN + U32_LEN + SALT_LEN + MAC_LEN {\n\n return None;\n\n }\n\n\n\n // Split the input into parts.\n\n let mut time = Vec::from(ciphertext);\n\n let mut space = time.split_off(U32_LEN);\n\n let mut salt = space.split_off(U32_LEN);\n\n let mut ciphertext = salt.split_off(SALT_LEN);\n\n let mut mac = ciphertext.split_off(ciphertext.len() - MAC_LEN);\n\n\n\n // Decode the time and space parameters.\n\n let time = u32::from_le_bytes(time.try_into().ok()?);\n\n let space = u32::from_le_bytes(space.try_into().ok()?);\n\n\n\n // Perform the balloon hashing.\n\n let mut pbenc = init(passphrase.nfkc().to_string().as_bytes(), &salt, time, space);\n\n\n\n // Decrypt the ciphertext.\n", "file_path": "src/pbenc.rs", "rank": 45, "score": 29949.29638923431 }, { "content": " r: [u8; 64],\n\n}\n\n\n\nimpl SecretKey {\n\n /// Return a randomly generated secret key.\n\n #[must_use]\n\n pub fn new() -> SecretKey {\n\n SecretKey { r: util::rand_array() }\n\n }\n\n\n\n /// Encrypt the secret key with the given passphrase and pbenc parameters.\n\n #[must_use]\n\n pub fn encrypt(&self, passphrase: &str, time: u32, space: u32) -> Vec<u8> {\n\n pbenc::encrypt(passphrase, time, space, &self.r)\n\n }\n\n\n\n /// Decrypt the secret key with the given passphrase and pbenc parameters.\n\n pub fn decrypt(passphrase: &str, ciphertext: &[u8]) -> Result<SecretKey, DecryptionError> {\n\n pbenc::decrypt(passphrase, ciphertext)\n\n .and_then(|b| b.try_into().ok())\n", "file_path": "src/veil.rs", "rank": 46, "score": 13.55823019524351 }, { "content": " .ok_or(SignatureError)\n\n }\n\n}\n\n\n\nimpl fmt::Display for Signature {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", bs58::encode(self.sig).into_string())\n\n }\n\n}\n\n\n\n/// A derived public key, used to verify messages.\n\n#[derive(Eq, PartialEq, Debug, Copy, Clone)]\n\npub struct PublicKey {\n\n q: RistrettoPoint,\n\n}\n\n\n\nimpl PublicKey {\n\n /// Read the contents of the reader and return `Ok(())` iff the given signature was created by\n\n /// this public key of the exact contents. Otherwise, returns\n\n /// [VerificationError::InvalidSignature].\n", "file_path": "src/veil.rs", "rank": 47, "score": 9.93147064269419 }, { "content": "use std::convert::TryInto;\n\nuse std::io::{Result, Write};\n\n\n\nuse curve25519_dalek::ristretto::RistrettoPoint;\n\nuse curve25519_dalek::scalar::Scalar;\n\nuse strobe_rs::{SecParam, Strobe};\n\n\n\nuse crate::util::{StrobeExt, G};\n\n\n\n/// The length of a signature, in bytes.\n\npub const SIGNATURE_LEN: usize = SCALAR_LEN * 2;\n\n\n\n/// A writer which accumulates message contents for signing before passing them along to an inner\n\n/// writer.\n\npub struct Signer<W: Write> {\n\n schnorr: Strobe,\n\n writer: W,\n\n}\n\n\n\nimpl<W> Signer<W>\n", "file_path": "src/schnorr.rs", "rank": 48, "score": 9.57404876115539 }, { "content": " // Map the hashed index block back to an index and hash that block.\n\n let v = u64::from_le_bytes(idx[..U64_LEN].try_into().expect(\"invalid u64 len\"));\n\n hash_counter!(pbenc, ctr, buf[(v % space as u64) as usize], [], buf[m]);\n\n }\n\n }\n\n }\n\n\n\n // Step 3: Extract output from buffer.\n\n pbenc.key(&buf[space as usize - 1], false);\n\n\n\n pbenc\n\n}\n\n\n\nconst SALT_LEN: usize = 16;\n\nconst TIME_OFFSET: usize = 0;\n\nconst SPACE_OFFSET: usize = U32_LEN;\n\nconst SALT_OFFSET: usize = SPACE_OFFSET + U32_LEN;\n\nconst CT_OFFSET: usize = SALT_OFFSET + SALT_LEN;\n\nconst N: usize = 32;\n\nconst DELTA: usize = 3;\n", "file_path": "src/pbenc.rs", "rank": 49, "score": 9.527452671299944 }, { "content": " fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n self.pk.fmt(f)\n\n }\n\n}\n\n\n\n/// A Schnorr signature.\n\n#[derive(Debug, Eq, PartialEq, Copy, Clone)]\n\npub struct Signature {\n\n sig: [u8; SIGNATURE_LEN],\n\n}\n\n\n\nimpl FromStr for Signature {\n\n type Err = SignatureError;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n bs58::decode(s)\n\n .into_vec()\n\n .ok()\n\n .and_then(|b| b.try_into().ok())\n\n .map(|sig| Signature { sig })\n", "file_path": "src/veil.rs", "rank": 50, "score": 8.822402452040743 }, { "content": "#![cfg(feature = \"cli\")]\n\n\n\nuse std::fs;\n\nuse std::path::PathBuf;\n\n\n\nuse anyhow::Result;\n\nuse duct::cmd;\n\n\n\nconst VEIL_PATH: &str = env!(\"CARGO_BIN_EXE_veil\");\n\n\n\n#[test]\n", "file_path": "tests/integration_test.rs", "rank": 51, "score": 8.693253690374938 }, { "content": " }\n\n\n\n #[must_use]\n\n fn root(&self) -> PrivateKey {\n\n let d = scaldf::derive_root(&self.r);\n\n PrivateKey { d, pk: PublicKey { q: G * &d } }\n\n }\n\n}\n\n\n\nimpl Default for SecretKey {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl Debug for SecretKey {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n self.root().fmt(f)\n\n }\n\n}\n", "file_path": "src/veil.rs", "rank": 52, "score": 7.631403152935388 }, { "content": "}\n\n\n\nimpl Verifier {\n\n /// Create a new ver ifier.\n\n #[must_use]\n\n pub fn new() -> Verifier {\n\n let mut schnorr = Strobe::new(b\"veil.schnorr\", SecParam::B128);\n\n schnorr.recv_clr(&[], false);\n\n Verifier { schnorr }\n\n }\n\n\n\n /// Verify the previously-written message contents using the given public key and signature.\n\n #[must_use]\n\n pub fn verify(mut self, q: &RistrettoPoint, sig: &[u8; SIGNATURE_LEN]) -> bool {\n\n // Add the signer's public key as associated data.\n\n self.schnorr.ad_point(q);\n\n\n\n // Split the signature into parts.\n\n let c = sig[..SCALAR_LEN].try_into().expect(\"invalid scalar len\");\n\n let s = sig[SCALAR_LEN..].try_into().expect(\"invalid scalar len\");\n", "file_path": "src/schnorr.rs", "rank": 53, "score": 7.288334359078295 }, { "content": " pub fn verify<R>(&self, reader: &mut R, sig: &Signature) -> Result<(), VerificationError>\n\n where\n\n R: Read,\n\n {\n\n let mut verifier = Verifier::new();\n\n io::copy(reader, &mut verifier)?;\n\n verifier.verify(&self.q, &sig.sig).then(|| ()).ok_or(VerificationError::InvalidSignature)\n\n }\n\n\n\n /// Derive a public key with the given key ID.\n\n ///\n\n /// `key_id` should be slash-separated string (e.g. `/one/two/three`) which define a path of\n\n /// derived keys (e.g. root -> `one` -> `two` -> `three`).\n\n #[must_use]\n\n pub fn derive(&self, key_id: &str) -> PublicKey {\n\n PublicKey { q: scaldf::derive_point(&self.q, key_id) }\n\n }\n\n}\n\n\n\nimpl fmt::Display for PublicKey {\n", "file_path": "src/veil.rs", "rank": 54, "score": 7.193605702129764 }, { "content": "\n\n // Decode the challenge and signature scalars.\n\n let (c, s) = match (Scalar::from_canonical_bytes(c), Scalar::from_canonical_bytes(s)) {\n\n (Some(c), Some(s)) => (c, s),\n\n _ => return false,\n\n };\n\n\n\n // Re-calculate the ephemeral public key and add it as associated data.\n\n let r_g = (G * &s) + (-c * q);\n\n self.schnorr.ad_point(&r_g);\n\n\n\n // Re-derive the challenge scalar.\n\n let c_p = self.schnorr.prf_scalar();\n\n\n\n // Return true iff c' == c.\n\n c_p == c\n\n }\n\n}\n\n\n\nimpl Write for Verifier {\n", "file_path": "src/schnorr.rs", "rank": 55, "score": 6.96607144992282 }, { "content": "use std::convert::TryInto;\n\nuse std::io::{self, ErrorKind, Read, Result, Write};\n\n\n\nuse curve25519_dalek::ristretto::RistrettoPoint;\n\nuse curve25519_dalek::scalar::Scalar;\n\nuse rand::prelude::ThreadRng;\n\nuse rand::RngCore;\n\nuse strobe_rs::{SecParam, Strobe};\n\n\n\nuse crate::akem;\n\nuse crate::schnorr::{Signer, Verifier, SIGNATURE_LEN};\n\nuse crate::util::{StrobeExt, G, MAC_LEN, U64_LEN};\n\n\n\n/// Encrypt the contents of `reader` such that they can be decrypted and verified by all members of\n\n/// `q_rs` and write the ciphertext to `writer` with `padding` bytes of random data added.\n", "file_path": "src/mres.rs", "rank": 56, "score": 6.900618189306334 }, { "content": " W: Write,\n\n {\n\n self.recv_clr(&[], false);\n\n RecvClrWriter(self, w)\n\n }\n\n}\n\n\n\nmacro_rules! strobe_writer {\n\n ($t:ident, $strobe:ident, $buf:ident, $writer:ident, $body:block) => {\n\n pub struct $t<W: Write>(Strobe, W);\n\n\n\n impl<W: Write> $t<W> {\n\n #[must_use]\n\n pub fn into_inner(self) -> (Strobe, W) {\n\n (self.0, self.1)\n\n }\n\n }\n\n\n\n impl<W: Write> Write for $t<W> {\n\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n", "file_path": "src/util.rs", "rank": 57, "score": 6.709026800841682 }, { "content": "# Key Encapsulation\n\n\n\n## Encapsulation\n\n\n\nEncapsulation is as follows, given the sender's key pair, $d_S$ and $Q_S$, an ephemeral key pair, $d_E$ and $Q_E$, the\n\nreceiver's public key, $Q_R$, a plaintext message $P$, and MAC size $N_M$:\n\n\n\n```text\n\nINIT('veil.akem', level=128)\n\nAD(LE_U32(N_M), meta=true)\n\nAD(Q_R)\n\nAD(Q_S)\n\n```\n\n\n\nThe static shared secret point is calculated ${ZZ_S}=[{d_S}]{Q_R}=[{d_R}{d_S}]G$ and used as a key to encrypt the\n\nephemeral public key $Q_E$:\n\n\n\n```text\n\nKEY(ZZ_S)\n\nSEND_ENC(Q_E) -> E\n\n```\n\n\n\nThe ephemeral shared secret point is calculated ${ZZ_E}=[{d_E}]{Q_R}=[{d_R}{d_E}]G$ and used as a key:\n\n\n\n```text\n\nKEY(ZZ_E)\n\n```\n\n\n\nThis is effectively an authenticated ECDH KEM, but instead of returning KDF output for use in a DEM, we use the keyed\n\nprotocol to directly encrypt the ciphertext and create a MAC:\n\n\n\n```text\n\nSEND_ENC(P) -> C\n\nSEND_MAC(N_M) -> M\n\n```\n\n\n\nThe resulting ciphertext is the concatenation of $E$, $C$, and $M$.\n\n\n\n## Decapsulation\n\n\n\nDecapsulation is then the inverse of encryption, given the recipient's key pair, $d_R$ and $Q_R$, and the sender's\n\npublic key $Q_S$:\n\n\n\n```text\n\nINIT('veil.akem', level=128)\n\nAD(LE_U32(N_M), meta=true)\n\nAD(Q_R)\n\nAD(Q_S)\n\n```\n\n\n\nThe static shared secret point is calculated ${ZZ_S}=[{d_R}]{Q_S}=[{d_R}{d_S}]G$ and used as a key to decrypt the\n\nephemeral public key $Q_E$:\n\n\n\n```text\n\nKEY(ZZ_S)\n\nRECV_ENC(E) -> Q_E\n\n```\n\n\n\nThe ephemeral shared secret point is calculated ${ZZ_E}=[{d_R}]{Q_E}=[{d_R}{d_E}]G$ and used as a key to decrypt the\n\nplaintext and verify the MAC:\n\n\n\n```text\n\nKEY(ZZ_E)\n\nRECV_ENC(C) -> P\n\nRECV_MAC(M)\n\n```\n\n\n\nIf the `RECV_MAC` call is successful, the ephemeral public key $Q_E$ and the plaintext message $P$ are returned.\n\n\n", "file_path": "docs/src/design/akem.md", "rank": 58, "score": 6.4135496620786885 }, { "content": "use std::io;\n\nuse std::io::{Cursor, Read};\n\n\n\nuse criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion, Throughput};\n\n\n\nuse veil::SecretKey;\n\n\n", "file_path": "benches/benches.rs", "rank": 59, "score": 6.367763693350286 }, { "content": "# Passphrase-based Encryption\n\n\n\nVeil implements memory-hard password-based encryption via STROBE using [balloon hashing][bh].\n\n\n\n## Initialization\n\n\n\nThe protocol is initialized as follows, given a passphrase $P$, a 128-bit salt $S$, delta constant $D$, space parameter\n\n$N_S$, time parameter $N_T$, block size $N_B$, and MAC size $N_M$:\n\n\n\n```text\n\nINIT('veil.kdf.balloon', level=128)\n\nAD(LE_U32(D), meta=true)\n\nAD(LE_U32(N_B), meta=true)\n\nAD(LE_U32(N_M), meta=true)\n\nAD(LE_U32(N_T), meta=true)\n\nAD(LE_U32(N_S), meta=true)\n\nKEY(P)\n\nAD(S)\n\n```\n\n\n\nThen, for each iteration of the balloon hashing algorithm, given a counter $C$, a left block $L$, and a right block $R$:\n\n\n\n```text\n\nAD(LE_U64(C))\n\nAD(L)\n\nAD(R)\n\nPRF(N)\n\n```\n\n\n\nThe final block $B_N$ of the balloon hashing algorithm is then used to key the protocol:\n\n\n\n```text\n\nKEY(B_N)\n\n```\n\n\n\n## Encryption\n\n\n\nEncryption of a message $P$ is as follows:\n\n\n\n```text\n\nSEND_ENC(P) -> C\n\nSEND_MAC(N_M) -> M\n\n```\n\n\n\nThe returned ciphertext contains the following:\n\n\n\n```text\n\nLE_U32(N_T) || LE_U32(N_S) || S || C || M\n\n```\n\n\n\n## Decryption\n\n\n\nDecryption of a ciphertext parses $N_T$, $N_S$, $S$, $C$ and $M$, initializes the protocol, and performs the inverse of\n\nencryption:\n\n\n\n```text\n\nRECV_ENC(C) -> P\n\nRECV_MAC(M)\n\n```\n\n\n\nIf the `RECV_MAC` call is successful, the plaintext $P$ is returned.\n\n\n\nIt should be noted that there is no standard balloon hashing algorithm, so this protocol is in the very, very tall grass\n\nof cryptography and should never be used.\n\n\n\n\n", "file_path": "docs/src/design/pbenc.md", "rank": 60, "score": 6.314821475436089 }, { "content": "## Randomness Re-Use\n\n\n\nThe ephemeral key pair, $d_E$ and $Q_E$, are used multiple times: once for each `veil.akem`\n\nheader and finally once for the end signature. This improves the efficiency of the scheme without reducing its security,\n\nper [Bellare et al.'s treatment of Randomness Reusing Multi-Recipient Encryption Schemes][rr-mres].\n\n\n\n## Ephemeral Scalar Hedging\n\n\n\nIn deriving the DEK and ephemeral scalar from a cloned context, `veil.mres`\n\nuses [Aranha et al.'s \"hedged signature\" technique][hedge] to mitigate against both catastrophic randomness failures and\n\ndifferential fault attacks against purely deterministic encryption schemes.\n\n\n\n[hpke]: https://eprint.iacr.org/2020/1499.pdf\n\n\n\n[rr-mres]: http://cseweb.ucsd.edu/~Mihir/papers/bbs.pdf\n\n\n", "file_path": "docs/src/design/mres.md", "rank": 61, "score": 6.2629156760282765 }, { "content": " fn write(&mut self, buf: &[u8]) -> Result<usize> {\n\n self.schnorr.recv_clr(buf, true);\n\n Ok(buf.len())\n\n }\n\n\n\n fn flush(&mut self) -> Result<()> {\n\n Ok(())\n\n }\n\n}\n\n\n\nconst SCALAR_LEN: usize = 32;\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::io;\n\n use std::io::Write;\n\n\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "src/schnorr.rs", "rank": 62, "score": 6.2518784133051355 }, { "content": "use std::convert::TryInto;\n\n\n\nuse strobe_rs::{SecParam, Strobe};\n\nuse unicode_normalization::UnicodeNormalization;\n\n\n\nuse crate::util::{self, StrobeExt, MAC_LEN, U32_LEN, U64_LEN};\n\n\n\n/// Encrypt the given plaintext using the given passphrase.\n\n#[must_use]\n", "file_path": "src/pbenc.rs", "rank": 63, "score": 6.163751242863739 }, { "content": "//! The Veil hybrid cryptosystem.\n\n//!\n\n//! Veil is an incredibly experimental hybrid cryptosystem for sending and receiving confidential,\n\n//! authentic multi-recipient messages which are indistinguishable from random noise by an attacker.\n\n//! Unlike e.g. GPG messages, Veil messages contain no metadata or format details which are not\n\n//! encrypted. As a result, a global passive adversary would be unable to gain any information from\n\n//! a Veil message beyond traffic analysis. Messages can be padded with random bytes to disguise\n\n//! their true length, and fake recipients can be added to disguise their true number from other\n\n//! recipients.\n\n//!\n\n//! You should not use this.\n\n//!\n\n//!\n\n//! ```\n\n//! use std::io;\n\n//! use std::io::Cursor;\n\n//! use veil::SecretKey;\n\n//!\n\n//! // Alice creates a secret key.\n\n//! let alice_sk = SecretKey::new();\n", "file_path": "src/lib.rs", "rank": 64, "score": 6.126102746560441 }, { "content": "use curve25519_dalek::ristretto::{CompressedRistretto, RistrettoPoint};\n\nuse curve25519_dalek::scalar::Scalar;\n\nuse curve25519_dalek::traits::IsIdentity;\n\nuse strobe_rs::{SecParam, Strobe};\n\n\n\nuse crate::util::{StrobeExt, MAC_LEN, POINT_LEN};\n\n\n\n/// The number of bytes encapsulation adds to a plaintext.\n\npub const OVERHEAD: usize = POINT_LEN + MAC_LEN;\n\n\n\n/// Given a sender's key pair, an ephemeral key pair, and the recipient's public key, encrypt the\n\n/// given plaintext.\n\n#[must_use]\n", "file_path": "src/akem.rs", "rank": 65, "score": 5.412378098919621 }, { "content": "# Digital Signatures\n\n\n\n## Signing A Message\n\n\n\nSigning is as follows, given a message in blocks $M_0...M_N$, a private scalar $d$, and a public point $Q$:\n\n\n\n```text\n\nINIT('veil.schnorr', level=128)\n\nSEND_CLR('', more=false)\n\nSEND_CLR(M_0, more=true)\n\nSEND_CLR(M_1, more=true)\n\n…\n\nSEND_CLR(M_N, more=true)\n\nAD(Q)\n\n```\n\n\n\n(The signer's public key is included after the message to allow `veil.mres` to search for a header without having to\n\nbuffer the results.)\n\n\n\nThe protocol's state is then cloned, the clone is keyed with both 64 bytes of random data and the signer's private key,\n\nan ephemeral scalar is derived from PRF output:\n\n\n\n```text\n\nKEY(rand(64))\n\nKEY(d)\n\nPRF(64) -> r\n\n```\n\n\n\nThe clone's state is discarded, and $r$ is returned to the parent along with $R = [r]G$:\n\n\n\n```text\n\nAD(R)\n\nPRF(64) -> c\n\n```\n\n\n\nThe resulting signature consists of the two scalars, $c$ and $s = dc + r$.\n\n\n\n## Verifying A Signature\n\n\n\nTo verify, `veil.schnorr` is run with a message in blocks $M_0...M_N$ and a public point $Q$:\n\n\n\n```text\n\nINIT('veil.schnorr', level=128)\n\nRECV_CLR('', more=false)\n\nRECV_CLR(M_0, more=true)\n\nRECV_CLR(M_1, more=true)\n\n…\n\nRECV_CLR(M_N, more=true)\n\nAD(Q)\n\n```\n\n\n\nThe public ephemeral is re-calculated as $R' = [{-c}]Q + [s]G$ and the challenge scalar is re-derived from PRF output:\n\n\n\n```\n\nAD(R')\n\nPRF(64) -> c'\n\n```\n\n\n\nFinally, the verifier compares $c' \\equiv c$. If the two scalars are equivalent, the signature is valid.\n\n\n\n## Security, Forgeability, and Malleability\n\n\n\nThis construction is equivalent to Construction 13.12 of Modern Cryptography 3e, and is the combination of the\n\nFiat-Shamir transform applied to the Schnorr identification scheme, and per Theorem 13.11, secure if the\n\ndiscrete-logarithm problem is hard relative to ristretto255.\n\n\n\nThe Schnorr signature scheme\n\nis [strongly unforgeable under chosen message attack (SUF-CMA) in the random oracle model][schnorr-cma]\n\nand [even with practical cryptographic hash functions][schnorr-hash]. As a consequence, the signatures are\n\nnon-malleable.\n\n\n", "file_path": "docs/src/design/schnorr.md", "rank": 66, "score": 5.410015316119486 }, { "content": " }\n\n}\n\n\n\nimpl<W> Write for Signer<W>\n\nwhere\n\n W: Write,\n\n{\n\n fn write(&mut self, buf: &[u8]) -> Result<usize> {\n\n self.schnorr.send_clr(buf, true);\n\n self.writer.write(buf)\n\n }\n\n\n\n fn flush(&mut self) -> Result<()> {\n\n self.writer.flush()\n\n }\n\n}\n\n\n\n/// A writer which accumulates message contents for verifying.\n\npub struct Verifier {\n\n schnorr: Strobe,\n", "file_path": "src/schnorr.rs", "rank": 67, "score": 5.301576364517134 }, { "content": "use std::convert::TryInto;\n\nuse std::fmt::{Debug, Formatter};\n\nuse std::io::{BufWriter, Read, Write};\n\nuse std::str::FromStr;\n\nuse std::{fmt, io, iter};\n\n\n\nuse curve25519_dalek::ristretto::{CompressedRistretto, RistrettoPoint};\n\nuse curve25519_dalek::scalar::Scalar;\n\nuse rand::prelude::SliceRandom;\n\nuse thiserror::Error;\n\nuse zeroize::Zeroize;\n\n\n\nuse crate::schnorr::{Signer, Verifier, SIGNATURE_LEN};\n\nuse crate::util::{G, POINT_LEN};\n\nuse crate::{mres, pbenc, scaldf, util};\n\n\n\n/// Error due to invalid public key format.\n\n#[derive(Error, Debug, Eq, PartialEq, Copy, Clone)]\n\n#[error(\"invalid public key\")]\n\npub struct PublicKeyError;\n", "file_path": "src/veil.rs", "rank": 68, "score": 5.240141040413157 }, { "content": " // Add the ephemeral public key as associated data.\n\n let r_g = G * &r;\n\n self.schnorr.ad_point(&r_g);\n\n\n\n // Derive a challenge scalar from PRF output.\n\n let c = self.schnorr.prf_scalar();\n\n\n\n // Calculate the signature scalar.\n\n let s = d * c + r;\n\n\n\n // Return the challenge and signature scalars.\n\n let mut sig = [0u8; SIGNATURE_LEN];\n\n sig[..SCALAR_LEN].copy_from_slice(c.as_bytes());\n\n sig[SCALAR_LEN..].copy_from_slice(s.as_bytes());\n\n sig\n\n }\n\n\n\n /// Unwrap the signer, returning the inner writer.\n\n pub fn into_inner(self) -> W {\n\n self.writer\n", "file_path": "src/schnorr.rs", "rank": 69, "score": 5.195621781176593 }, { "content": "\n\n #[test]\n\n pub fn sign_and_verify() -> Result<(), VerificationError> {\n\n let sk = SecretKey::new();\n\n let priv_a = sk.private_key(\"/one/two\");\n\n let pub_a = priv_a.public_key();\n\n\n\n let message = b\"this is a thingy\";\n\n let mut src = Cursor::new(message);\n\n\n\n let sig = priv_a.sign(&mut src)?;\n\n\n\n let mut src = Cursor::new(message);\n\n pub_a.verify(&mut src, &sig)\n\n }\n\n\n\n fn assert_failed_decryption(\n\n result: Result<u64, DecryptionError>,\n\n ) -> Result<(), DecryptionError> {\n\n match result {\n\n Ok(_) => panic!(\"decrypted but shouldn't have\"),\n\n Err(DecryptionError::InvalidCiphertext) => Ok(()),\n\n Err(e) => Err(e),\n\n }\n\n }\n\n}\n", "file_path": "src/veil.rs", "rank": 70, "score": 5.179043532083777 }, { "content": " where\n\n W: Write;\n\n}\n\n\n\nimpl StrobeExt for Strobe {\n\n fn meta_ad_u32(&mut self, n: u32) {\n\n self.meta_ad(&n.to_le_bytes(), false);\n\n }\n\n\n\n #[inline]\n\n fn ad_point(&mut self, q: &RistrettoPoint) {\n\n self.ad(q.compress().as_bytes(), false);\n\n }\n\n\n\n fn prf_scalar(&mut self) -> Scalar {\n\n Scalar::from_bytes_mod_order_wide(&self.prf_array())\n\n }\n\n\n\n fn prf_array<const N: usize>(&mut self) -> [u8; N] {\n\n let mut out = [0u8; N];\n", "file_path": "src/util.rs", "rank": 71, "score": 4.997878242351943 }, { "content": " W: Write,\n\n {\n\n let (verified, written) =\n\n mres::decrypt(reader, &mut BufWriter::new(writer), &self.d, &self.pk.q, &sender.q)?;\n\n\n\n if verified {\n\n Ok(written)\n\n } else {\n\n Err(DecryptionError::InvalidCiphertext)\n\n }\n\n }\n\n\n\n /// Read the contents of the reader and returns a digital signature.\n\n pub fn sign<R>(&self, reader: &mut R) -> io::Result<Signature>\n\n where\n\n R: Read,\n\n {\n\n let mut signer = Signer::new(io::sink());\n\n io::copy(reader, &mut signer)?;\n\n Ok(Signature { sig: signer.sign(&self.d, &self.pk.q) })\n", "file_path": "src/veil.rs", "rank": 72, "score": 4.943355273614865 }, { "content": " fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", bs58::encode(self.q.compress().as_bytes()).into_string())\n\n }\n\n}\n\n\n\nimpl FromStr for PublicKey {\n\n type Err = PublicKeyError;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n bs58::decode(s)\n\n .into_vec()\n\n .ok()\n\n .filter(|b| b.len() == POINT_LEN)\n\n .map(|b| CompressedRistretto::from_slice(&b))\n\n .and_then(|p| p.decompress())\n\n .map(|q| PublicKey { q })\n\n .ok_or(PublicKeyError)\n\n }\n\n}\n\n\n", "file_path": "src/veil.rs", "rank": 73, "score": 4.886071538320075 }, { "content": "## Multi-Recipient Authenticity\n\n\n\nSimilarly, an attacker engaged in parallel CMA games with recipients has negligible advantage in forging messages.\n\nThe `veil.schnorr` signature covers the entirety of the ciphertext.\n\n\n\nThe standard KEM/DEM hybrid construction (i.e. Construction 12.20 from Modern Cryptography 3e)\n\nprovides strong confidentiality (per Theorem 12.14), but no authenticity. A compromised recipient can replace the DEM\n\ncomponent of the ciphertext with an arbitrary message encrypted with the same DEK. Even if the KEM provides strong\n\nauthenticity against insider attacks, the KEM/DEM construction does not. [Alwen et al.][hpke] detail this attack against\n\nthe proposed HPKE standard.\n\n\n\nIn the single-recipient setting, the practical advantages of this attack are limited: the attacker can forge messages\n\nwhich appear to be from a sender but are only decryptable by the attacker. In the multi-recipient setting, however, the\n\npractical advantage is much greater: the attacker can present forged messages which appear to be from a sender to other,\n\nhonest recipients.\n\n\n\n`veil.mres` eliminates this attack by using the ephemeral key pair to sign the entire ciphertext and including only the\n\npublic key in the KEM ciphertext. Re-using the KEM ciphertexts with a new message requires forging a new signature for a\n\nSUF-CMA-secure scheme. The use of an authenticated KEM serves to authenticate the ephemeral public key and thus the\n\nmessage: only the possessor of the sender's private key can calculate the static shared secret used to encrypt the\n\nephemeral public key, and the recipient can only forge KEM ciphertexts with themselves as the intended recipient.\n\n\n\n## Repudiability\n\n\n\nBecause the sender's private key is only used to calculate shared secrets, a `veil.mres` ciphertext is entirely\n\nrepudiable unless a recipient reveals their public key. The `veil.schnorr` keys are randomly generated for each message\n\nand all other forms of sender identity which are transmitted are only binding on public information.\n\n\n", "file_path": "docs/src/design/mres.md", "rank": 74, "score": 4.745570483812875 }, { "content": " unused_qualifications,\n\n missing_copy_implementations,\n\n missing_debug_implementations,\n\n clippy::cognitive_complexity,\n\n clippy::missing_const_for_fn,\n\n clippy::needless_borrow\n\n)]\n\n\n\npub use self::veil::*;\n\n\n\nmod akem;\n\nmod mres;\n\nmod pbenc;\n\nmod scaldf;\n\nmod schnorr;\n\nmod util;\n\nmod veil;\n", "file_path": "src/lib.rs", "rank": 75, "score": 4.533374548249561 }, { "content": " F: Fn(&mut Strobe) -> R;\n\n\n\n /// Create a writer which passes writes through `SEND_CLR` before passing them to the given\n\n /// writer.\n\n #[must_use]\n\n fn send_clr_writer<W>(self, w: W) -> SendClrWriter<W>\n\n where\n\n W: Write;\n\n\n\n /// Create a writer which passes writes through `SEND_ENC` before passing them to the given\n\n /// writer.\n\n #[must_use]\n\n fn send_enc_writer<W>(self, w: W) -> SendEncWriter<W>\n\n where\n\n W: Write;\n\n\n\n /// Create a writer which passes writes through `RECV_CLR` before passing them to the given\n\n /// writer.\n\n #[must_use]\n\n fn recv_clr_writer<W>(self, w: W) -> RecvClrWriter<W>\n", "file_path": "src/util.rs", "rank": 76, "score": 4.528232595755145 }, { "content": " }\n\n\n\n /// Derive a private key with the given key ID.\n\n ///\n\n /// `key_id` should be slash-separated string (e.g. `/one/two/three`) which define a path of\n\n /// derived keys (e.g. root -> `one` -> `two` -> `three`).\n\n #[must_use]\n\n pub fn derive(&self, key_id: &str) -> PrivateKey {\n\n let d = scaldf::derive_scalar(self.d, key_id);\n\n PrivateKey { d, pk: PublicKey { q: G * &d } }\n\n }\n\n}\n\n\n\nimpl PartialEq for PrivateKey {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.pk == other.pk\n\n }\n\n}\n\n\n\nimpl Debug for PrivateKey {\n", "file_path": "src/veil.rs", "rank": 77, "score": 4.520056827424102 }, { "content": "\n\n/// A derived private key, used to encrypt, decrypt, and sign messages.\n\n#[derive(Copy, Clone)]\n\npub struct PrivateKey {\n\n d: Scalar,\n\n pk: PublicKey,\n\n}\n\n\n\nimpl PrivateKey {\n\n /// Return the corresponding public key.\n\n #[must_use]\n\n pub const fn public_key(&self) -> PublicKey {\n\n self.pk\n\n }\n\n\n\n /// Encrypt the contents of the reader such that any of the recipients will be able to decrypt\n\n /// it with authenticity and writes the ciphertext to the writer.\n\n ///\n\n /// Optionally add a number of fake recipients to disguise the number of true recipients and/or\n\n /// random padding to disguise the message length.\n", "file_path": "src/veil.rs", "rank": 78, "score": 4.4611571728670665 }, { "content": "# veil-rs\n\n\n\n_Stupid crypto tricks._\n\n\n\nWARNING: You should, under no circumstances, use this.\n\n\n\nVeil is an incredibly experimental hybrid cryptosystem for sending and receiving confidential, authentic multi-recipient\n\nmessages which are indistinguishable from random noise by an attacker. Unlike e.g. GPG messages, Veil messages contain\n\nno metadata or format details which are not encrypted. As a result, a global passive adversary would be unable to gain\n\nany information from a Veil message beyond traffic analysis. Messages can be padded with random bytes to disguise their\n\ntrue length, and fake recipients can be added to disguise their true number from other recipients. Further, Veil\n\nsupports hierarchical key derivation, allowing for domain-separated and disposable keys.\n\n\n\nSee the `docs` directory for more.\n\n\n\n## License\n\n\n\nCopyright © 2021 Coda Hale\n\n\n\nDistributed under the Apache License 2.0 or MIT License.\n", "file_path": "README.md", "rank": 79, "score": 4.460147908960973 }, { "content": "# Creating A Secret Key\n\n\n\nTo create a secret key, use the `secret-key` command:\n\n\n\n```shell\n\nveil secret-key ./my-secret-key\n\n```\n\n\n\nYou'll be prompted for a passphrase, and `veil` will write the encrypted secret key to `./my-secret-key`.\n\n\n", "file_path": "docs/src/cli/secret-key.md", "rank": 80, "score": 4.440509960265601 }, { "content": " let sig = Signature { sig: [69u8; SIGNATURE_LEN] };\n\n\n\n assert_eq!(\"2PKwbVQ1YMFEexCmUDyxy8cuwb69VWcvoeodZCLegqof62ro8siurvh9QCnFzdsdTixDC94tCMzH7dMuqL5Gi2CC\", sig.to_string());\n\n\n\n let decoded = \"2PKwbVQ1YMFEexCmUDyxy8cuwb69VWcvoeodZCLegqof62ro8siurvh9QCnFzdsdTixDC94tCMzH7dMuqL5Gi2CC\".parse::<Signature>();\n\n assert_eq!(Ok(sig), decoded);\n\n\n\n assert_eq!(Err(SignatureError), \"woot woot\".parse::<Signature>());\n\n }\n\n\n\n #[test]\n\n pub fn round_trip() -> Result<(), DecryptionError> {\n\n let sk_a = SecretKey::new();\n\n let priv_a = sk_a.private_key(\"/one/two\");\n\n\n\n let sk_b = SecretKey::new();\n\n let priv_b = sk_b.private_key(\"/a/b\");\n\n\n\n let message = b\"this is a thingy\";\n\n let mut src = Cursor::new(message);\n", "file_path": "src/veil.rs", "rank": 81, "score": 4.387970908284553 }, { "content": "## Indistinguishability and Pseudorandomness\n\n\n\nPer [Fleischhacker et al.][ind-sig], this construction produces indistinguishable signatures (i.e., signatures which do\n\nnot reveal anything about the signing key or signed message). When encrypted with an unrelated key (i.e.,\n\nvia `veil.mres`), the construction is isomorphic to Fleischhacker et al.'s DRPC compiler for producing pseudorandom\n\nsignatures, which are indistinguishable from random.\n\n\n\n## Ephemeral Scalar Hedging\n\n\n\nIn deriving the ephemeral scalar from a cloned context, `veil.schnorr` uses [Aranha et al.'s\n\n\"hedged signature\" technique][hedge] to mitigate against both catastrophic randomness failures and differential fault\n\nattacks against purely deterministic signature schemes.\n\n\n\n\n\n[schnorr-cma]: https://www.di.ens.fr/david.pointcheval/Documents/Papers/2000_joc.pdf\n\n\n\n[schnorr-hash]: http://www.neven.org/papers/schnorr.pdf\n\n\n\n[ind-sig]: https://eprint.iacr.org/2011/673.pdf\n\n\n", "file_path": "docs/src/design/schnorr.md", "rank": 82, "score": 4.385843475690191 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n pub fn round_trip() {\n\n let passphrase = \"this is a secret\";\n\n let message = b\"this is too\";\n\n let ciphertext = encrypt(passphrase, 5, 3, message);\n\n let plaintext = decrypt(passphrase, &ciphertext);\n\n\n\n assert_eq!(Some(message.to_vec()), plaintext);\n\n }\n\n\n\n #[test]\n\n pub fn bad_time() {\n\n let passphrase = \"this is a secret\";\n\n let message = b\"this is too\";\n\n let mut ciphertext = encrypt(passphrase, 5, 3, message);\n", "file_path": "src/pbenc.rs", "rank": 83, "score": 4.3805587242101875 }, { "content": "# Multi-recipient Messages\n\n\n\n## Encryption\n\n\n\nEncrypting a message begins as follows, given the sender's key pair, $d_S$ and $Q_S$, a plaintext message in blocks\n\n$P_0...P_N$, a list of recipient public keys, $Q_{R^0}...Q_{R^M}$, and a DEK size $N_{DEK}$:\n\n\n\n```text\n\nINIT('veil.mres', level=128)\n\nAD(LE_32(N_DEK), meta=true)\n\nAD(Q_s)\n\n```\n\n\n\nThe protocol context is cloned and keyed with the sender's private key and a random nonce and used to derive a data\n\nencryption key, $K_{DEK}$, and an ephemeral private key, $d_E$:\n\n\n\n```text\n\nKEY(d_S)\n\nKEY(rand())\n\nPRF(32) -> K_DEK\n\nPRF(64) -> d_E\n\n```\n\n\n\nThe ephemeral public key is computed as $Q_E = [{d_E}]G$, and the cloned context is discarded:\n\n\n\nThe data encryption key and message offset are encoded into a fixed-length header and copies of it are encrypted\n\nwith `veil.akem` for each recipient using $d_E$ and $Q_E$. Optional random padding is added to the end, and the\n\nresulting blocks $H_0..H_N,H_{pad}$ is written:\n\n\n\n```text\n\nSEND_CLR('')\n\nSEND_CLR(H_0, more=true)\n\n…\n\nSEND_CLR(H_N, more=true)\n\nSEND_CLR(H_pad, more=true)\n\n```\n\n\n\nThe protocol is keyed with the DEK and the encrypted message is written:\n\n\n\n```text\n\nKEY(K_dek)\n\nSEND_ENC('')\n\nSEND_ENC(P_0, more=true)\n\n…\n\nSEND_ENC(P_N, more=true)\n\n```\n\n\n\nFinally, a Schnorr signature $S$ of the entire ciphertext (headers, padding, and DEM ciphertext) is created with $d_E$\n\nand encrypted:\n\n\n\n```text\n\nSEND_ENC(S)\n\n```\n\n\n\nThe resulting ciphertext then contains, in order: the `veil.akem`-encrypted headers, random padding, message ciphertext,\n\nand a Schnorr signature of the headers, padding, and ciphertext.\n\n\n", "file_path": "docs/src/design/mres.md", "rank": 84, "score": 4.2554533702181345 }, { "content": " hash_counter!(pbenc, ctr, passphrase, salt, buf[0]);\n\n for m in 1..space as usize {\n\n hash_counter!(pbenc, ctr, buf[m - 1], [], buf[m]);\n\n }\n\n\n\n // Step 2: Mix buffer contents.\n\n for t in 0..time as usize {\n\n for m in 0..space as usize {\n\n // Step 2a: Hash last and current blocks.\n\n let prev = (m as isize - 1).rem_euclid(space as isize) as usize; // wrap 0 to last block\n\n hash_counter!(pbenc, ctr, buf[prev], buf[m], buf[m]);\n\n\n\n // Step 2b: Hash in pseudo-randomly chosen blocks.\n\n for i in 0..DELTA {\n\n // Map indexes to a block and hash it and the salt.\n\n idx[..U64_LEN].copy_from_slice(&(t as u64).to_le_bytes());\n\n idx[U64_LEN..U64_LEN * 2].copy_from_slice(&(m as u64).to_le_bytes());\n\n idx[U64_LEN * 2..U64_LEN * 3].copy_from_slice(&(i as u64).to_le_bytes());\n\n hash_counter!(pbenc, ctr, salt, idx, idx);\n\n\n", "file_path": "src/pbenc.rs", "rank": 85, "score": 4.225192353476632 }, { "content": "## Insider Authenticity\n\n\n\nThis construction is not secure against insider attacks on authenticity, nor is it intended to be. A recipient can forge\n\nciphertexts which appear to be from a sender by re-using the ephemeral public key and encrypting an alternate plaintext,\n\nbut the forgeries will only be decryptable by the forger. Because this type of forgery is possible, `veil.akem`\n\nciphertexts are therefore repudiable.\n\n\n\n## Randomness Re-Use\n\n\n\nThe ephemeral key pair, $d_E$ and $Q_E$, are generated outside of this construction and can be used multiple times for\n\nmultiple recipients. This improves the efficiency of the scheme without reducing its security, per Bellare et al.'s\n\ntreatment of [Randomness Reusing Multi-Recipient Encryption Schemes][rr-mres].\n\n\n\n\n\n[ik-cca]: https://iacr.org/archive/asiacrypt2001/22480568.pdf\n\n\n", "file_path": "docs/src/design/akem.md", "rank": 86, "score": 4.055518585283377 }, { "content": "# Command Line Tool\n\n\n\nThe Veil cryptosystem is implemented as a command line tool `veil`.\n\n\n\n## Installation\n\n\n\nTo install it, check out this repository and build it yourself:\n\n\n\n```shell\n\ngit clone https://github.com/codahale/veil-rs\n\ncargo install\n\n```\n\n\n\nBecause this is a cryptosystem designed by one person with no formal training and has not been audited, it will never be\n\npackaged conveniently. Cryptographic software is primarily used in high-risk environments where strong assurances of\n\ncorrectness, confidentiality, integrity, etc. are required, and `veil` does not provide those assurances. It's more\n\nof an art installation than a practical tool.\n\n\n\n## Shell Completion\n\n\n\n`veil` can generate its own shell completion scripts for Bash, Elvish, Fish, Powershell, and Zsh:\n\n\n\n```shell\n\nveil complete zsh /usr/local/share/zsh/site-functions/\n", "file_path": "docs/src/cli.md", "rank": 87, "score": 3.8712182950676284 }, { "content": "use std::io::Write;\n\nuse std::{io, mem};\n\n\n\nuse curve25519_dalek::constants::RISTRETTO_BASEPOINT_TABLE;\n\nuse curve25519_dalek::ristretto::{RistrettoBasepointTable, RistrettoPoint};\n\nuse curve25519_dalek::scalar::Scalar;\n\nuse rand::RngCore;\n\nuse strobe_rs::Strobe;\n\n\n\n/// Generate a random `u8` array.\n\n#[must_use]\n", "file_path": "src/util.rs", "rank": 88, "score": 3.833436683210757 }, { "content": "where\n\n W: Write,\n\n{\n\n /// Create a new signer which passes writes through to the given writer.\n\n pub fn new(writer: W) -> Signer<W> {\n\n let mut schnorr = Strobe::new(b\"veil.schnorr\", SecParam::B128);\n\n schnorr.send_clr(&[], false);\n\n Signer { schnorr, writer }\n\n }\n\n\n\n /// Create a signature of the previously-written message contents using the given key pair.\n\n #[allow(clippy::many_single_char_names)]\n\n pub fn sign(&mut self, d: &Scalar, q: &RistrettoPoint) -> [u8; SIGNATURE_LEN] {\n\n // Add the signer's public key as associated data.\n\n self.schnorr.ad_point(q);\n\n\n\n // Derive an ephemeral scalar from the protocol's current state, the signer's private key,\n\n // and a random nonce.\n\n let r = self.schnorr.hedge(d.as_bytes(), StrobeExt::prf_scalar);\n\n\n", "file_path": "src/schnorr.rs", "rank": 89, "score": 3.756400859876076 }, { "content": "use curve25519_dalek::ristretto::RistrettoPoint;\n\nuse curve25519_dalek::scalar::Scalar;\n\nuse strobe_rs::{SecParam, Strobe};\n\n\n\nuse crate::util::{StrobeExt, G};\n\n\n\n/// Derive a scalar from the given secret key.\n\n#[must_use]\n", "file_path": "src/scaldf.rs", "rank": 90, "score": 3.717656864939413 }, { "content": " let mut mres_writer = mres.recv_clr_writer(verifier);\n\n\n\n // Find a header, decrypt it, and write the entirety of the headers and padding to the verifier.\n\n let (dek, q_e) = match decrypt_header(reader, &mut mres_writer, d_r, q_r, q_s)? {\n\n Some((dek, q_e)) => (dek, q_e),\n\n None => return Ok((false, 0)),\n\n };\n\n\n\n // Unwrap the received cleartext writer.\n\n let (mut mres, mut verifier) = mres_writer.into_inner();\n\n\n\n // Key the protocol with the recovered DEK.\n\n mres.key(&dek, false);\n\n\n\n // Decrypt the message and get the signature.\n\n let (written, sig) = decrypt_message(reader, writer, &mut verifier, &mut mres)?;\n\n\n\n // Return the signature's validity and the number of bytes of plaintext written.\n\n Ok((verifier.verify(&q_e, &sig), written))\n\n}\n\n\n\nconst DEK_LEN: usize = 32;\n\nconst HEADER_LEN: usize = DEK_LEN + U64_LEN;\n\nconst ENC_HEADER_LEN: usize = HEADER_LEN + akem::OVERHEAD;\n\n\n", "file_path": "src/mres.rs", "rank": 91, "score": 3.664251425484677 }, { "content": "# What is Veil?\n\n\n\nVeil is an incredibly experimental hybrid cryptosystem for sending and receiving confidential, authentic multi-recipient\n\nmessages which are indistinguishable from random noise by an attacker.\n\n\n\nUnlike e.g. GPG messages, Veil messages contain no metadata or format details which are not encrypted. As a result, a\n\nglobal passive adversary would be unable to gain any information from a Veil message beyond traffic analysis. Messages\n\ncan be padded with random bytes to disguise their true length, and fake recipients can be added to disguise their true\n\nnumber from other recipients.\n\n\n\nFurther, Veil supports hierarchical key derivation, allowing for domain-separated and disposable keys.\n", "file_path": "docs/src/about.md", "rank": 92, "score": 3.494132525368057 }, { "content": "## Design\n\n\n\nVeil is designed to be simple, understandable, and robust.\n\n\n\n### Cryptographic Minimalism\n\n\n\nVeil uses just two distinct primitives:\n\n\n\n* [STROBE][strobe] for confidentiality, authentication, and integrity.\n\n* [ristretto255][r255] for key agreement and signing.\n\n\n\n[ristretto255][r255-why] uses a safe curve, is a prime-order cyclic group, has non-malleable encodings, and has no\n\nco-factor concerns. STROBE is built on the Keccak 𝑓-\\[1600\\] permutation, the core of SHA-3, which has\n\nseen [significant scrutiny over the last decade][keccak].\n\n\n\nThe underlying philosophy is that expressed by [Adam Langley][agl]:\n\n\n\n> There's a lesson in all this: have one joint and keep it well oiled. … \\[O\\]ne needs to minimise\n\n> complexity, concentrate all extensibility in a single place and _actively defend it_.\n\n\n\nAs a result, the constructions in Veil depend primarily on two relatively stable cryptographic assumptions: the Gap\n\nDiffie-Hellman assumption for ristretto255 and that Keccak 𝑓-\\[1600\\] is suitably close to a random permutation.\n\n\n", "file_path": "docs/src/design.md", "rank": 93, "score": 3.4184233559200865 }, { "content": " // Read a block of ciphertext and copy it to the buffer.\n\n n = reader.read(&mut input)?;\n\n buf.extend_from_slice(&input[..n]);\n\n\n\n // Process the data if we have at least a signature's worth.\n\n if buf.len() > SIGNATURE_LEN {\n\n // Pop the first N-64 bytes off the buffer.\n\n let mut block: Vec<u8> = buf.drain(..buf.len() - SIGNATURE_LEN).collect();\n\n\n\n // Verify the ciphertext.\n\n verifier.write_all(&block)?;\n\n\n\n // Decrypt the ciphertext.\n\n mres.recv_enc(&mut block, true);\n\n\n\n // Write the plaintext.\n\n writer.write_all(&block)?;\n\n written += block.len() as u64;\n\n }\n\n }\n\n\n\n // Keep the last 64 bytes as the encrypted signature.\n\n let mut sig: [u8; SIGNATURE_LEN] = buf.try_into().expect(\"invalid sig len\");\n\n mres.recv_enc(&mut sig, false);\n\n\n\n // Return the bytes written and the decrypted signature.\n\n Ok((written, sig))\n\n}\n\n\n", "file_path": "src/mres.rs", "rank": 94, "score": 3.3950684583957753 }, { "content": "\n\n/// The error type for message verification.\n\n#[derive(Error, Debug)]\n\npub enum VerificationError {\n\n /// Error due to signature/message/public key mismatch.\n\n ///\n\n /// The message or signature may have been altered, or the message may not have been signed with\n\n /// the given key.\n\n #[error(\"invalid signature\")]\n\n InvalidSignature,\n\n\n\n /// The reader containing the message returned an IO error.\n\n #[error(\"error verifying: {0}\")]\n\n IoError(#[from] io::Error),\n\n}\n\n\n\n/// A 512-bit secret from which multiple private keys can be derived.\n\n#[derive(Zeroize)]\n\n#[zeroize(drop)]\n\npub struct SecretKey {\n", "file_path": "src/veil.rs", "rank": 95, "score": 3.3906734615682583 }, { "content": "# Deriving A Public Key\n\n\n\nIn the same way you can use a secret key to generate a public key with a key ID, you can also derive a public key from\n\nanother public key using a sub key ID.\n\n\n\nLet's say someone creates a public key with the ID `/one/two`.\n\n\n\n```shell\n\nveil public-key ./secret-key /one/two\n\n\n\n#=> TkUWybv8fAvsHPhauPj7edUTVdCHuCFHazA6RjnvwJa\n\n```\n\n\n\nBut they sign a message using a key ID of `/one/two/three`. We can compute the public key `/one/two/three` given the\n\npublic key `/one/two`:\n\n\n\n```shell\n\nveil derive-key TkUWybv8fAvsHPhauPj7edUTVdCHuCFHazA6RjnvwJa /more\n\n\n\n#=> BfksdzSKbmcS2Suav16dmYE2WxifqauPRL6FZpJt1476\n\n```\n\n\n\nThis produces the same public key as if the owner of the secret key had generated the public key `/one/two/more`:\n\n\n\n```shell\n\nveil public-key ./secret-key /one/two/more\n\n\n\n#=> BfksdzSKbmcS2Suav16dmYE2WxifqauPRL6FZpJt1476\n\n```\n", "file_path": "docs/src/cli/derive-key.md", "rank": 96, "score": 3.280133427482589 }, { "content": "# Summary\n\n\n\n- [What is Veil?](./about.md)\n\n- [Design](./design.md)\n\n - [Key Derivation](./design/hkd.md)\n\n - [Key Encapsulation](./design/akem.md)\n\n - [Digital Signatures](./design/schnorr.md)\n\n - [Multi-recipient Messages](./design/mres.md)\n\n - [Passphrase-based Encryption](./design/pbenc.md)\n\n- [Command Line Tool](./cli.md)\n\n - [Creating A Secret Key](./cli/secret-key.md)\n\n - [Generating A Public Key](./cli/public-key.md)\n\n - [Deriving A Public Key](./cli/derive-key.md)\n\n - [Encrypting A Message](./cli/encrypt.md)\n\n - [Decrypting A Message](./cli/decrypt.md)\n\n - [Signing A Message](./cli/sign.md)\n\n - [Verifying A Message](./cli/verify.md)\n", "file_path": "docs/src/SUMMARY.md", "rank": 97, "score": 3.1802195758305984 }, { "content": "\n\n/// Error due to invalid signature format.\n\n#[derive(Error, Debug, Eq, PartialEq, Copy, Clone)]\n\n#[error(\"invalid signature\")]\n\npub struct SignatureError;\n\n\n\n/// The error type for message decryption.\n\n#[derive(Error, Debug)]\n\npub enum DecryptionError {\n\n /// Error due to message/private key/public key mismatch.\n\n ///\n\n /// The ciphertext may have been altered, the message may not have been encrypted by the given\n\n /// sender, or the message may not have been encrypted for the given recipient.\n\n #[error(\"invalid ciphertext\")]\n\n InvalidCiphertext,\n\n\n\n /// An error returned when there was an underlying IO error during decryption.\n\n #[error(\"error decrypting: {0}\")]\n\n IoError(#[from] io::Error),\n\n}\n", "file_path": "src/veil.rs", "rank": 98, "score": 2.9686443152321074 }, { "content": "# Verifying A Message\n\n\n\nTo verify a signature of a message, you'll need the signer's public key, the message, and the signature:\n\n\n\n```shell\n\nveil verify TkUWybv8fAvsHPhauPj7edUTVdCHuCFHazA6RjnvwJa announcement.txt \\\n\n 3yjygj91feSFzp3HJ7x1SuhBYxD3kdJEQGUCLASaiNxnPSgtCu5vjyDgHNrbAA2Qn94KHtwUesL4mv4MPYXo4kYZ \n\n```\n\n\n\nIf the signature is from the given public key and the message hasn't been altered, `veil` will exit with a status\n", "file_path": "docs/src/cli/verify.md", "rank": 99, "score": 2.8894513566322804 } ]
Rust
src/lib.rs
phaylon/resorb
4b36e3da17b66c97e28b86ee23f7db9d77d8a38d
use std::rc; use std::sync; use std::error; use std::fmt; macro_rules! assert_parse_ok { ($parser:expr, $input:expr, $expected:expr $(,)*) => {{ let input: String = ($input).into(); assert_eq!( ::apply(&input, ::Options::default(), $parser), Ok($expected) ); drop(input); }} } macro_rules! assert_parse_no_match { ($parser:expr, $input:expr $(,)*) => {{ let input: String = ($input).into(); assert_eq!( ::apply(&input, ::Options::default(), $parser), Err(::ApplyError::NoMatch) ); drop(input); }} } macro_rules! assert_parse_fail { ($parser:expr, $input:expr, $value:expr, ($($l:tt)+) $(,)*) => {{ let input: String = ($input).into(); assert_eq!( ::apply(&input, ::Options::default(), $parser), Err(::ApplyError::Fail(::Error::new(::Location::new($($l)+), $value))) ); drop(input); }} } macro_rules! assert_parse_partial { ($parser:expr, $input:expr, $value:expr, $rest:expr, ($($l:tt)+) $(,)*) => {{ let input: String = ($input).into(); assert_eq!( ::apply(&input, ::Options::default(), $parser), Err(::ApplyError::Unparsed($value, ::Location::new($($l)+), $rest)) ); drop(input); }} } pub mod util; pub mod parse; pub fn apply<'src, P>(input: &'src str, options: Options, parser: P) -> Result<P::Output, ApplyError<'src, P::Output, P::Error>> where P: Parser<'src> { let depth_limit = options.depth_limit; let ctx = Context { options: options, depth: 1, }; let input = Input { rest: input, location: Location { line: 1, column: 1, offset: 0, }, }; match parser.parse(input, ctx) { Ok((value, input)) => match input.unpack_if_nonempty() { None => Ok(value), Some((location, rest)) => Err(ApplyError::Unparsed(value, location, rest)), }, Err(Fail::NoMatch) => Err(ApplyError::NoMatch), Err(Fail::DepthLimit(location)) => Err(ApplyError::DepthLimit(location, depth_limit.unwrap())), Err(Fail::Error(error)) => Err(ApplyError::Fail(error)), Err(Fail::ZeroLengthRepeat(location)) => Err(ApplyError::ZeroLengthRepeat(location)), } } #[derive( Debug, Clone )] pub struct Context { options: Options, depth: usize, } impl Context { pub fn descend<'src, E>(&self, input: &Input<'src>) -> Result<Context, Fail<E>> { let new_depth = self.depth + 1; if let Some(limit) = self.options.depth_limit { if new_depth > limit { return Err(Fail::DepthLimit(input.location)); } } Ok(Context { options: self.options.clone(), depth: new_depth, }) } } #[derive( Debug, Clone )] pub struct Options { pub depth_limit: Option<usize>, } impl Default for Options { fn default() -> Options { Options { depth_limit: Some(64), } } } #[derive( Debug, Clone, PartialEq, Eq )] pub enum ApplyError<'src, T, E> { NoMatch, Fail(Error<E>), DepthLimit(Location, usize), Unparsed(T, Location, &'src str), ZeroLengthRepeat(Location), } pub type Outcome<'src, O, E> = Result<(O, Input<'src>), Fail<E>>; pub trait Parser<'src> { type Output; type Error; fn parse(&self, input: Input<'src>, ctx: Context) -> Outcome<'src, Self::Output, Self::Error>; } impl<'src, P> Parser<'src> for sync::Arc<P> where P: Parser<'src> { type Output = P::Output; type Error = P::Error; fn parse(&self, input: Input<'src>, ctx: Context) -> Outcome<'src, Self::Output, Self::Error> { (**self).parse(input, ctx) } } impl<'src, P> Parser<'src> for rc::Rc<P> where P: Parser<'src> { type Output = P::Output; type Error = P::Error; fn parse(&self, input: Input<'src>, ctx: Context) -> Outcome<'src, Self::Output, Self::Error> { (**self).parse(input, ctx) } } impl<'src, P> Parser<'src> for Box<P> where P: Parser<'src> { type Output = P::Output; type Error = P::Error; fn parse(&self, input: Input<'src>, ctx: Context) -> Outcome<'src, Self::Output, Self::Error> { (**self).parse(input, ctx) } } impl<'src, 'p, P> Parser<'src> for &'p P where P: Parser<'src> { type Output = P::Output; type Error = P::Error; fn parse(&self, input: Input<'src>, ctx: Context) -> Outcome<'src, Self::Output, Self::Error> { (**self).parse(input, ctx) } } #[derive( Debug, Clone, PartialEq, Eq )] pub enum Fail<E> { NoMatch, Error(Error<E>), DepthLimit(Location), ZeroLengthRepeat(Location), } #[derive( Debug, Clone, Copy, PartialEq, Eq )] pub struct Location { line: usize, column: usize, offset: usize, } impl Location { pub fn new(line: usize, column: usize, offset: usize) -> Location { Location { line: line, column: column, offset: offset, } } pub fn line(&self) -> usize { self.line } pub fn column(&self) -> usize { self.column } pub fn offset(&self) -> usize { self.offset } fn advance(&self, parsed: &str) -> Location { let nl_count = parsed .chars() .filter(|c| *c == '\n') .count(); let last_line = parsed .rfind('\n') .map(|nl_pos| &parsed[(nl_pos+1)..]) .unwrap_or(&parsed); let last_line_chars = last_line .chars() .count(); Location { offset: self.offset + parsed.len(), line: self.line + nl_count, column: last_line_chars + if nl_count > 0 { 1 } else { self.column }, } } } #[derive( Debug, Clone )] pub struct Input<'src> { rest: &'src str, location: Location, } impl<'src> Input<'src> { pub fn location(&self) -> Location { self.location } pub fn consume_len_via<F>(self, find_len: F) -> Option<(&'src str, Input<'src>)> where F: FnOnce(&'src str) -> Option<usize> { let len = match find_len(self.rest) { None => return None, Some(len) => len, }; let consumed = &self.rest[..len]; Some((consumed, self.advance(len))) } pub fn consume_char(self) -> Option<(char, Input<'src>)> { let chr = match self.rest.chars().nth(0) { None => return None, Some(chr) => chr, }; Some((chr, self.advance(chr.len_utf8()))) } fn advance(self, len: usize) -> Input<'src> { let consumed = &self.rest[..len]; let new_rest = &self.rest[len..]; let new_location = self.location.advance(consumed); Input { rest: new_rest, location: new_location, } } fn unpack_if_nonempty(self) -> Option<(Location, &'src str)> { if self.rest.len() > 0 { Some((self.location, self.rest)) } else { None } } } #[derive( Debug, Clone, PartialEq, Eq )] pub struct Error<E> { location: Location, value: E, cause: Option<Box<Error<E>>>, } impl<E> Error<E> { pub fn new(location: Location, value: E) -> Error<E> { Error { location: location, value: value, cause: None, } } pub fn new_with_cause(location: Location, value: E, cause: Error<E>) -> Error<E> { Error { location: location, value: value, cause: Some(Box::new(cause)), } } pub fn location(&self) -> Location { self.location } pub fn value(&self) -> &E { &self.value } pub fn cause(&self) -> Option<&Error<E>> { match self.cause { Some(ref error) => Some(&*error), None => None, } } } impl<E> fmt::Display for Error<E> where E: fmt::Display { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&self.value, fmt) } } impl<E> error::Error for Error<E> where E: error::Error { fn description(&self) -> &str { self.value.description() } fn cause(&self) -> Option<&error::Error> { match self.cause { Some(ref error) => Some(&*error), None => None, } } } #[cfg(test)] mod tests { use parse; #[test] fn apply() { #[derive( Debug, Clone, PartialEq, Eq )] struct ErrMarker; let parser = parse::pair( parse::exact_str("foo"), parse::require(parse::exact_str("bar"), || ErrMarker), ); assert_parse_ok!(&parser, "foobar", ("foo", "bar")); assert_parse_no_match!(&parser, "qux"); assert_parse_fail!(&parser, "fooqux", ErrMarker, (1, 4, 3)); } #[test] fn pointers() { use std::rc::Rc; use std::sync::Arc; let plain = parse::no_custom_error(parse::exact_str("foo")); assert_eq!(::apply("foo", ::Options::default(), &plain), Ok("foo")); let boxed = Box::new(parse::no_custom_error(parse::exact_str("foo"))); assert_eq!(::apply("foo", ::Options::default(), boxed), Ok("foo")); let rced = Rc::new(parse::no_custom_error(parse::exact_str("foo"))); assert_eq!(::apply("foo", ::Options::default(), rced), Ok("foo")); let arced = Arc::new(parse::no_custom_error(parse::exact_str("foo"))); assert_eq!(::apply("foo", ::Options::default(), arced), Ok("foo")); } #[test] fn location() { use super::Location as L; let loc = L::new(1, 1, 0).advance("foo"); assert_eq!(loc, L::new(1, 4, 3)); let loc = loc.advance(" \t"); assert_eq!(loc, L::new(1, 6, 5)); let loc = loc.advance("\n"); assert_eq!(loc, L::new(2, 1, 6)); let loc = loc.advance("a\nb\nc"); assert_eq!(loc, L::new(4, 2, 11)); } }
use std::rc; use std::sync; use std::error; use std::fmt; macro_rules! assert_parse_ok { ($parser:expr, $input:expr, $expected:expr $(,)*) => {{ let input: String = ($input).into(); assert_eq!( ::apply(&input, ::Options::default(), $parser), Ok($expected) ); drop(input); }} } macro_rules! assert_parse_no_match { ($parser:expr, $input:expr $(,)*) => {{ let input: String = ($input).into(); assert_eq!( ::apply(&input, ::Options::default(), $parser), Err(::ApplyError::NoMatch) ); drop(input); }} } macro_rules! assert_parse_fail { ($parser:expr, $input:expr, $value:expr, ($($l:tt)+) $(,)*) => {{ let input: String = ($input).into(); assert_eq!( ::apply(&input, ::Options::default(), $parser), Err(::ApplyError::Fail(::Error::new(::Location::new($($l)+), $value))) ); drop(input); }} } macro_rules! assert_parse_partial { ($parser:expr, $input:expr, $value:expr, $rest:expr, ($($l:tt)+) $(,)*) => {{ let input: String = ($input).into(); assert_eq!( ::apply(&input, ::Options::default(), $parser), Err(::ApplyError::Unparsed($value, ::Location::new($($l)+), $rest)) ); drop(input); }} } pub mod util; pub mod parse; pub fn apply<'src, P>(input: &'src str, options: Options, parser: P) -> Result<P::Output, ApplyError<'src, P::Output, P::Error>> where P: Parser<'src> { let depth_limit = options.depth_limit; let ctx = Context { options: options, depth: 1, }; let input = Input { rest: input, location: Location { line: 1, column: 1, offset: 0, }, }; match parser.parse(input, ctx) { Ok((value, input)) => match input.unpack_if_nonempty() { None => Ok(value), Some((location, rest)) => Err(ApplyError::Unparsed(value, location, rest)), }, Err(Fail::NoMatch) => Err(ApplyError::NoMatch), Err(Fail::DepthLimit(location)) => Err(ApplyError::DepthLimit(location, depth_limit.unwrap())), Err(Fail::Error(error)) => Err(ApplyError::Fail(error)), Err(Fail::ZeroLengthRepeat(location)) => Err(ApplyError::ZeroLengthRepeat(location)), } } #[derive( Debug, Clone )] pub struct Context { options: Options, depth: usize, } impl Context { pub fn descend<'src, E>(&self, input: &Input<'src>) -> Result<Context, Fail<E>> { let new_depth = self.depth + 1; if let Some(limit) = self.options.depth_limit { if new_depth > limit { return Err(Fail::DepthLimit(input.location)); } } Ok(Context { options: self.options.clone(), depth: new_depth, }) } } #[derive( Debug, Clone )] pub struct Options { pub depth_limit: Option<usize>, } impl Default for Options { fn default() -> Options { Options { depth_limit: Some(64), } } } #[derive( Debug, Clone, PartialEq, Eq )] pub enum ApplyError<'src, T, E> { NoMatch, Fail(Error<E>), DepthLimit(Location, usize), Unparsed(T, Location, &'src str), ZeroLengthRepeat(Location), } pub type Outcome<'src, O, E> = Result<(O, Input<'src>), Fail<E>>; pub trait Parser<'src> { type Output; type Error; fn parse(&self, input: Input<'src>, ctx: Context) -> Outcome<'src, Self::Output, Self::Error>; } impl<'src, P> Parser<'src> for sync::Arc<P> where P: Parser<'src> { type Output = P::Output; type Error = P::Error; fn parse(&self, input: Input<'src>, ctx: Context) -> Outcome<'src, Self::Output, Self::Error> { (**self).parse(input, ctx) } } impl<'src, P> Parser<'src> for rc::Rc<P> where P: Parser<'src> { type Output = P::Output; type Error = P::Error; fn parse(&self, input: Input<'src>, ctx: Context) -> Outcome<'src, Self::Output, Self::Error> { (**self).parse(input, ctx) } } impl<'src, P> Parser<'src> for Box<P> where P: Parser<'src> { type Output = P::Output; type Error = P::Error; fn parse(&self, input: Input<'src>, ctx: Context) -> Outcome<'src, Self::Output, Self::Error> { (**self).parse(input, ctx) } } impl<'src, 'p, P> Parser<'src> for &'p P where P: Parser<'src> { type Output = P::Output; type Error = P::Error; fn parse(&self, input: Input<'src>, ctx: Context) -> Outcome<'src, Self::Output, Self::Error> { (**self).parse(input, ctx) } } #[derive( Debug, Clone, PartialEq, Eq )] pub enum Fail<E> { NoMatch, Error(Error<E>), DepthLimit(Location), ZeroLengthRepeat(Location), } #[derive( Debug, Clone, Copy, PartialEq, Eq )] pub struct Location { line: usize, column: usize, offset: usize, } impl Location {
pub fn line(&self) -> usize { self.line } pub fn column(&self) -> usize { self.column } pub fn offset(&self) -> usize { self.offset } fn advance(&self, parsed: &str) -> Location { let nl_count = parsed .chars() .filter(|c| *c == '\n') .count(); let last_line = parsed .rfind('\n') .map(|nl_pos| &parsed[(nl_pos+1)..]) .unwrap_or(&parsed); let last_line_chars = last_line .chars() .count(); Location { offset: self.offset + parsed.len(), line: self.line + nl_count, column: last_line_chars + if nl_count > 0 { 1 } else { self.column }, } } } #[derive( Debug, Clone )] pub struct Input<'src> { rest: &'src str, location: Location, } impl<'src> Input<'src> { pub fn location(&self) -> Location { self.location } pub fn consume_len_via<F>(self, find_len: F) -> Option<(&'src str, Input<'src>)> where F: FnOnce(&'src str) -> Option<usize> { let len = match find_len(self.rest) { None => return None, Some(len) => len, }; let consumed = &self.rest[..len]; Some((consumed, self.advance(len))) } pub fn consume_char(self) -> Option<(char, Input<'src>)> { let chr = match self.rest.chars().nth(0) { None => return None, Some(chr) => chr, }; Some((chr, self.advance(chr.len_utf8()))) } fn advance(self, len: usize) -> Input<'src> { let consumed = &self.rest[..len]; let new_rest = &self.rest[len..]; let new_location = self.location.advance(consumed); Input { rest: new_rest, location: new_location, } } fn unpack_if_nonempty(self) -> Option<(Location, &'src str)> { if self.rest.len() > 0 { Some((self.location, self.rest)) } else { None } } } #[derive( Debug, Clone, PartialEq, Eq )] pub struct Error<E> { location: Location, value: E, cause: Option<Box<Error<E>>>, } impl<E> Error<E> { pub fn new(location: Location, value: E) -> Error<E> { Error { location: location, value: value, cause: None, } } pub fn new_with_cause(location: Location, value: E, cause: Error<E>) -> Error<E> { Error { location: location, value: value, cause: Some(Box::new(cause)), } } pub fn location(&self) -> Location { self.location } pub fn value(&self) -> &E { &self.value } pub fn cause(&self) -> Option<&Error<E>> { match self.cause { Some(ref error) => Some(&*error), None => None, } } } impl<E> fmt::Display for Error<E> where E: fmt::Display { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&self.value, fmt) } } impl<E> error::Error for Error<E> where E: error::Error { fn description(&self) -> &str { self.value.description() } fn cause(&self) -> Option<&error::Error> { match self.cause { Some(ref error) => Some(&*error), None => None, } } } #[cfg(test)] mod tests { use parse; #[test] fn apply() { #[derive( Debug, Clone, PartialEq, Eq )] struct ErrMarker; let parser = parse::pair( parse::exact_str("foo"), parse::require(parse::exact_str("bar"), || ErrMarker), ); assert_parse_ok!(&parser, "foobar", ("foo", "bar")); assert_parse_no_match!(&parser, "qux"); assert_parse_fail!(&parser, "fooqux", ErrMarker, (1, 4, 3)); } #[test] fn pointers() { use std::rc::Rc; use std::sync::Arc; let plain = parse::no_custom_error(parse::exact_str("foo")); assert_eq!(::apply("foo", ::Options::default(), &plain), Ok("foo")); let boxed = Box::new(parse::no_custom_error(parse::exact_str("foo"))); assert_eq!(::apply("foo", ::Options::default(), boxed), Ok("foo")); let rced = Rc::new(parse::no_custom_error(parse::exact_str("foo"))); assert_eq!(::apply("foo", ::Options::default(), rced), Ok("foo")); let arced = Arc::new(parse::no_custom_error(parse::exact_str("foo"))); assert_eq!(::apply("foo", ::Options::default(), arced), Ok("foo")); } #[test] fn location() { use super::Location as L; let loc = L::new(1, 1, 0).advance("foo"); assert_eq!(loc, L::new(1, 4, 3)); let loc = loc.advance(" \t"); assert_eq!(loc, L::new(1, 6, 5)); let loc = loc.advance("\n"); assert_eq!(loc, L::new(2, 1, 6)); let loc = loc.advance("a\nb\nc"); assert_eq!(loc, L::new(4, 2, 11)); } }
pub fn new(line: usize, column: usize, offset: usize) -> Location { Location { line: line, column: column, offset: offset, } }
function_block-full_function
[ { "content": "pub fn fail<E>(location: ::Location, error: E) -> ::Fail<E> {\n\n ::Fail::Error(::Error::new(location, error))\n\n}\n", "file_path": "src/util.rs", "rank": 0, "score": 211455.97158554505 }, { "content": "pub fn no_custom_error<'src, P>(parser: P) -> P where P: ::Parser<'src, Error=()> {\n\n parser\n\n}\n\n\n", "file_path": "src/parse/error.rs", "rank": 2, "score": 173734.91698894335 }, { "content": "pub fn depth_check<'src, P>(parser: P) -> DepthCheck<P>\n\nwhere P: ::Parser<'src> {\n\n DepthCheck { parser: parser }\n\n}\n\n\n\nimpl<'src, P> ::Parser<'src> for DepthCheck<P>\n\nwhere P: ::Parser<'src> {\n\n\n\n type Output = P::Output;\n\n type Error = P::Error;\n\n\n\n fn parse(&self, input: ::Input<'src>, ctx: ::Context)\n\n -> ::Outcome<'src, Self::Output, Self::Error> {\n\n let deeper_ctx = ctx.descend(&input)?;\n\n self.parser.parse(input, deeper_ctx)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/parse/limit.rs", "rank": 3, "score": 165910.7922216698 }, { "content": "pub fn error_context<'src, P, F>(parser: P, error_gen: F) -> ErrorContext<P, F>\n\nwhere\n\n P: ::Parser<'src>,\n\n F: Fn() -> P::Error,\n\n{\n\n ErrorContext {\n\n parser: parser,\n\n error_gen: error_gen,\n\n }\n\n}\n\n\n\n#[derive( Debug, Clone )]\n\npub struct ErrorContext<P, F> {\n\n parser: P,\n\n error_gen: F,\n\n}\n\n\n\nimpl<'src, P, F> ::Parser<'src> for ErrorContext<P, F>\n\nwhere\n\n P: ::Parser<'src>,\n", "file_path": "src/parse/error.rs", "rank": 4, "score": 157956.70929129847 }, { "content": "pub fn optional<'src, P>(parser: P) -> Optional<P>\n\nwhere P: ::Parser<'src> {\n\n Optional { parser: parser }\n\n}\n\n\n\n#[derive( Debug, Clone )]\n\npub struct Optional<P> {\n\n parser: P,\n\n}\n\n\n\nimpl<'src, P> ::Parser<'src> for Optional<P>\n\nwhere P: ::Parser<'src> {\n\n \n\n type Output = Option<P::Output>;\n\n type Error = P::Error;\n\n\n\n fn parse(&self, input: ::Input<'src>, ctx: ::Context)\n\n -> ::Outcome<'src, Self::Output, Self::Error> {\n\n match self.parser.parse(input.clone(), ctx) {\n\n Ok((value, input)) => Ok((Some(value), input)),\n\n Err(::Fail::NoMatch) => Ok((None, input)),\n\n Err(other) => Err(other),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/parse/structure.rs", "rank": 5, "score": 156185.34438101645 }, { "content": "pub fn require<'src, P, F>(parser: P, error_gen: F) -> Require<P, F>\n\nwhere\n\n P: ::Parser<'src>,\n\n F: Fn() -> P::Error,\n\n{\n\n Require {\n\n parser: parser,\n\n error_gen: error_gen,\n\n }\n\n}\n\n\n\n#[derive( Debug, Clone )]\n\npub struct Require<P, F> {\n\n parser: P,\n\n error_gen: F,\n\n}\n\n\n\nimpl<'src, P, F> ::Parser<'src> for Require<P, F>\n\nwhere\n\n P: ::Parser<'src>,\n", "file_path": "src/parse/error.rs", "rank": 6, "score": 135149.8522369547 }, { "content": "pub fn either<'src, P>(parser: P) -> Either<P>\n\nwhere P: ::Parser<'src> {\n\n Either { parser: parser }\n\n}\n\n\n\n#[derive( Debug, Clone )]\n\npub struct Either<P> {\n\n parser: P,\n\n}\n\n\n\nimpl<'src, P> Either<P> where P: ::Parser<'src> {\n\n\n\n pub fn or<PO>(self, other: PO) -> EitherOr<Self, PO> {\n\n EitherOr {\n\n parser1: self,\n\n parser2: other,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/parse/structure.rs", "rank": 7, "score": 132224.4917941175 }, { "content": "pub fn restrict<'src, P, F>(parser: P, test: F) -> Restrict<P, F>\n\nwhere\n\n P: ::Parser<'src>,\n\n F: Fn(&P::Output) -> bool,\n\n{\n\n Restrict {\n\n parser: parser,\n\n test: test,\n\n }\n\n}\n\n\n\n#[derive( Debug, Clone )]\n\npub struct Restrict<P, F> {\n\n parser: P,\n\n test: F,\n\n}\n\n\n\nimpl<'src, P, F> ::Parser<'src> for Restrict<P, F>\n\nwhere\n\n P: ::Parser<'src>,\n", "file_path": "src/parse/error.rs", "rank": 8, "score": 130678.9659205661 }, { "content": "pub fn verify<'src, P, F>(parser: P, test: F) -> Verify<P, F>\n\nwhere\n\n P: ::Parser<'src>,\n\n F: Fn(&P::Output) -> Option<P::Error>,\n\n{\n\n Verify {\n\n parser: parser,\n\n test: test,\n\n }\n\n}\n\n\n\n#[derive( Debug, Clone )]\n\npub struct Verify<P, F> {\n\n parser: P,\n\n test: F,\n\n}\n\n\n\nimpl<'src, P, F> ::Parser<'src> for Verify<P, F>\n\nwhere\n\n P: ::Parser<'src>,\n", "file_path": "src/parse/error.rs", "rank": 9, "score": 130678.96592056612 }, { "content": "pub fn repeat1<'src, P>(parser: P) -> RepeatMin<P>\n\nwhere P: ::Parser<'src> {\n\n RepeatMin {\n\n parser: parser,\n\n min: 1,\n\n }\n\n}\n\n\n\n#[derive( Debug, Clone )]\n\npub struct RepeatMin<P> {\n\n parser: P,\n\n min: usize,\n\n}\n\n\n\nimpl<'src, P> ::Parser<'src> for RepeatMin<P>\n\nwhere P: ::Parser<'src> {\n\n\n\n type Output = Vec<P::Output>;\n\n type Error = P::Error;\n\n\n", "file_path": "src/parse/repeat.rs", "rank": 10, "score": 129822.73678228773 }, { "content": "pub fn repeat0<'src, P>(parser: P) -> RepeatMin<P>\n\nwhere P: ::Parser<'src> {\n\n RepeatMin {\n\n parser: parser,\n\n min: 0,\n\n }\n\n}\n\n\n", "file_path": "src/parse/repeat.rs", "rank": 11, "score": 129822.73678228773 }, { "content": "pub fn option_to_outcome<'src, T, U, E, F>(opt: Option<T>, mapper: F)\n\n-> ::Outcome<'src, U, E>\n\nwhere F: FnOnce(T) -> (U, ::Input<'src>) {\n\n match opt {\n\n None => Err(::Fail::NoMatch),\n\n Some(value) => Ok(mapper(value)),\n\n }\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 12, "score": 126685.94721514374 }, { "content": "pub fn exact_str<'cmp, E>(content: &'cmp str) -> ExactStr<'cmp, E> {\n\n ExactStr {\n\n content: content,\n\n _error: marker::PhantomData,\n\n }\n\n}\n\n\n\n#[derive( Debug, Clone )]\n\npub struct ExactStr<'cmp, E> {\n\n content: &'cmp str,\n\n _error: marker::PhantomData<E>,\n\n}\n\n\n\nimpl<'cmp, 'src, E> ::Parser<'src> for ExactStr<'cmp, E> {\n\n\n\n type Output = &'src str;\n\n type Error = E;\n\n\n\n fn parse(&self, input: ::Input<'src>, _ctx: ::Context)\n\n -> ::Outcome<'src, Self::Output, Self::Error> {\n", "file_path": "src/parse/content.rs", "rank": 14, "score": 121759.32878349132 }, { "content": "pub fn current_location<E>() -> CurrentLocation<E> {\n\n CurrentLocation {\n\n _error: marker::PhantomData,\n\n }\n\n}\n\n\n\n#[derive( Debug, Clone )]\n\npub struct CurrentLocation<E> {\n\n _error: marker::PhantomData<E>,\n\n}\n\n\n\nimpl<'src, E> ::Parser<'src> for CurrentLocation<E> {\n\n \n\n type Output = ::Location;\n\n type Error = E;\n\n\n\n fn parse(&self, input: ::Input<'src>, _ctx: ::Context)\n\n -> ::Outcome<'src, Self::Output, Self::Error> {\n\n let location = input.location();\n\n Ok((location, input))\n", "file_path": "src/parse/structure.rs", "rank": 15, "score": 121743.42292112517 }, { "content": "pub fn fold0<'src, T, P, I, F>(parser: P, init: I, fold: F) -> FoldMin<P, I, F>\n\nwhere\n\n P: ::Parser<'src>,\n\n I: Fn() -> T,\n\n F: Fn(T, P::Output) -> T,\n\n{\n\n FoldMin {\n\n parser: parser,\n\n init: init,\n\n fold: fold,\n\n min: Minimum::Zero,\n\n }\n\n}\n\n\n\n#[derive( Debug, Clone )]\n\npub struct FoldMin<P, I, F> {\n\n parser: P,\n\n init: I,\n\n fold: F,\n\n min: Minimum,\n", "file_path": "src/parse/repeat.rs", "rank": 16, "score": 105334.53422942755 }, { "content": "pub fn any_char<E>() -> AnyChar<E> {\n\n AnyChar {\n\n _error: marker::PhantomData,\n\n }\n\n}\n\n\n\n#[derive( Debug, Clone )]\n\npub struct AnyChar<E> {\n\n _error: marker::PhantomData<E>,\n\n}\n\n\n\nimpl<'src, E> ::Parser<'src> for AnyChar<E> {\n\n\n\n type Output = char;\n\n type Error = E;\n\n\n\n fn parse(&self, input: ::Input<'src>, _ctx: ::Context)\n\n -> ::Outcome<'src, Self::Output, Self::Error> {\n\n util::option_to_outcome(\n\n input.consume_char(),\n", "file_path": "src/parse/content.rs", "rank": 17, "score": 101514.70627953325 }, { "content": "pub fn pair<'src, PL, PR>(parser_l: PL, parser_r: PR)\n\n-> Pair<PL, PR>\n\nwhere\n\n PL: ::Parser<'src>,\n\n PR: ::Parser<'src, Error=PL::Error>,\n\n{\n\n Pair {\n\n parser_l: parser_l,\n\n parser_r: parser_r,\n\n }\n\n}\n\n\n\n#[derive( Debug, Clone )]\n\npub struct Pair<PL, PR> {\n\n parser_l: PL,\n\n parser_r: PR,\n\n}\n\n\n\nimpl<'src, PL, PR> ::Parser<'src> for Pair<PL, PR> \n\nwhere\n", "file_path": "src/parse/structure.rs", "rank": 18, "score": 79515.01207990637 }, { "content": "pub fn delimited<'src, PL, PC, PR>(parser_l: PL, parser_c: PC, parser_r: PR)\n\n-> Delimited<PL, PC, PR>\n\nwhere\n\n PL: ::Parser<'src>,\n\n PC: ::Parser<'src, Error=PL::Error>,\n\n PR: ::Parser<'src, Error=PL::Error>,\n\n{\n\n Delimited {\n\n parser_l: parser_l,\n\n parser_c: parser_c,\n\n parser_r: parser_r,\n\n }\n\n}\n\n\n\n#[derive( Debug, Clone )]\n\npub struct Delimited<PL, PC, PR> {\n\n parser_l: PL,\n\n parser_c: PC,\n\n parser_r: PR,\n\n}\n", "file_path": "src/parse/structure.rs", "rank": 19, "score": 76163.50714616248 }, { "content": "#[derive( Debug, Clone, Copy )]\n\nenum Minimum {\n\n Zero,\n\n One,\n\n}\n\n\n", "file_path": "src/parse/repeat.rs", "rank": 20, "score": 47864.813575085456 }, { "content": "\n", "file_path": "src/util.rs", "rank": 21, "score": 29418.29560563471 }, { "content": "\n\nmod content;\n\nmod error;\n\nmod structure;\n\nmod limit;\n\nmod repeat;\n\n\n\npub use self::content::*;\n\npub use self::error::*;\n\npub use self::structure::*;\n\npub use self::limit::*;\n\npub use self::repeat::*;\n", "file_path": "src/parse/mod.rs", "rank": 22, "score": 27990.73943486318 }, { "content": " use parse;\n\n\n\n #[test]\n\n fn depth_check() {\n\n let parser = parse::no_custom_error(\n\n parse::depth_check(parse::depth_check(parse::depth_check(\n\n parse::exact_str(\"foo\"),\n\n ))),\n\n );\n\n assert_eq!(\n\n ::apply(\"foo\", ::Options { depth_limit: None }, &parser),\n\n Ok(\"foo\")\n\n );\n\n assert_eq!(\n\n ::apply(\"foo\", ::Options { depth_limit: Some(10) }, &parser),\n\n Ok(\"foo\")\n\n );\n\n assert_eq!(\n\n ::apply(\"foo\", ::Options { depth_limit: Some(2) }, &parser),\n\n Err(::ApplyError::DepthLimit(::Location::new(1, 1, 0), 2))\n\n );\n\n assert_eq!(\n\n ::apply(\"bar\", ::Options { depth_limit: Some(2) }, &parser),\n\n Err(::ApplyError::DepthLimit(::Location::new(1, 1, 0), 2))\n\n );\n\n }\n\n}\n", "file_path": "src/parse/limit.rs", "rank": 23, "score": 27827.8979334456 }, { "content": "\n\n#[derive( Debug, Clone )]\n\npub struct DepthCheck<P> {\n\n parser: P,\n\n}\n\n\n", "file_path": "src/parse/limit.rs", "rank": 24, "score": 27826.410101950984 }, { "content": " F: Fn() -> P::Error,\n\n{\n\n type Output = P::Output;\n\n type Error = P::Error;\n\n\n\n fn parse(&self, input: ::Input<'src>, ctx: ::Context)\n\n -> ::Outcome<'src, Self::Output, Self::Error> {\n\n let location = input.location();\n\n match self.parser.parse(input, ctx) {\n\n Err(::Fail::NoMatch) => Err(util::fail(location, (self.error_gen)())),\n\n other => other,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/parse/error.rs", "rank": 25, "score": 26808.173938166525 }, { "content": " F: Fn() -> P::Error,\n\n{\n\n type Output = P::Output;\n\n type Error = P::Error;\n\n\n\n fn parse(&self, input: ::Input<'src>, ctx: ::Context)\n\n -> ::Outcome<'src, Self::Output, Self::Error> {\n\n let location = input.location();\n\n match self.parser.parse(input, ctx) {\n\n Err(::Fail::Error(error)) => Err(::Fail::Error(::Error::new_with_cause(\n\n location,\n\n (self.error_gen)(),\n\n error,\n\n ))),\n\n other => other,\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/parse/error.rs", "rank": 26, "score": 26805.107064816235 }, { "content": " F: Fn(&P::Output) -> Option<P::Error>,\n\n{\n\n type Output = P::Output;\n\n type Error = P::Error;\n\n\n\n fn parse(&self, input: ::Input<'src>, ctx: ::Context)\n\n -> ::Outcome<'src, Self::Output, Self::Error> {\n\n let location = input.location();\n\n let (res, input) = self.parser.parse(input, ctx)?;\n\n if let Some(error) = (self.test)(&res) {\n\n Err(::Fail::Error(::Error::new(location, error)))\n\n }\n\n else {\n\n Ok((res, input))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/parse/error.rs", "rank": 27, "score": 26805.0914188104 }, { "content": " F: Fn(&P::Output) -> bool,\n\n{\n\n type Output = P::Output;\n\n type Error = P::Error;\n\n\n\n fn parse(&self, input: ::Input<'src>, ctx: ::Context)\n\n -> ::Outcome<'src, Self::Output, Self::Error> {\n\n let (res, input) = self.parser.parse(input, ctx)?;\n\n if (self.test)(&res) {\n\n Ok((res, input))\n\n }\n\n else {\n\n Err(::Fail::NoMatch)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/parse/error.rs", "rank": 28, "score": 26803.745139401 }, { "content": "mod tests {\n\n use parse;\n\n\n\n #[derive( Debug, Clone, PartialEq, Eq )]\n\n struct ErrMarker;\n\n\n\n #[test]\n\n fn error_context() {\n\n\n\n #[derive( Debug, Clone, PartialEq, Eq )]\n\n enum ErrStack {\n\n Original,\n\n Context,\n\n }\n\n\n\n let parser = parse::error_context(\n\n parse::pair(\n\n parse::exact_str(\"foo\"),\n\n parse::require(parse::exact_str(\"bar\"), || ErrStack::Original),\n\n ),\n", "file_path": "src/parse/error.rs", "rank": 29, "score": 26800.691243522007 }, { "content": " || ErrStack::Context,\n\n );\n\n assert_parse_ok!(&parser, \"foobar\", (\"foo\", \"bar\"));\n\n assert_parse_no_match!(&parser, \"\");\n\n\n\n assert_eq!(\n\n ::apply(\"fooqux\", ::Options::default(), &parser),\n\n Err(::ApplyError::Fail(::Error::new_with_cause(\n\n ::Location::new(1, 1, 0),\n\n ErrStack::Context,\n\n ::Error::new(\n\n ::Location::new(1, 4, 3),\n\n ErrStack::Original,\n\n ),\n\n )))\n\n );\n\n }\n\n\n\n #[test]\n\n fn verify() {\n", "file_path": "src/parse/error.rs", "rank": 30, "score": 26795.66086470975 }, { "content": " let parser = parse::verify(\n\n parse::any_char(),\n\n |&c| if c == 'X' { Some(ErrMarker) } else { None },\n\n );\n\n assert_parse_ok!(&parser, \"Y\", 'Y');\n\n assert_parse_no_match!(&parser, \"\");\n\n assert_parse_fail!(&parser, \"X\", ErrMarker, (1, 1, 0));\n\n }\n\n\n\n #[test]\n\n fn restrict() {\n\n let parser = parse::no_custom_error(parse::restrict(\n\n parse::any_char(),\n\n |&c| c != 'X',\n\n ));\n\n assert_parse_ok!(&parser, \"Y\", 'Y');\n\n assert_parse_no_match!(&parser, \"X\");\n\n assert_parse_no_match!(&parser, \"\");\n\n }\n\n\n", "file_path": "src/parse/error.rs", "rank": 31, "score": 26790.650159258556 }, { "content": " #[test]\n\n fn require() {\n\n let parser = parse::pair(\n\n parse::exact_str(\"foo\"),\n\n parse::require(parse::exact_str(\"bar\"), || ErrMarker),\n\n );\n\n assert_parse_ok!(&parser, \"foobar\", (\"foo\", \"bar\"));\n\n assert_parse_no_match!(&parser, \"foXbar\");\n\n assert_parse_fail!(&parser, \"foobaX\", ErrMarker, (1, 4, 3));\n\n assert_parse_fail!(&parser, \"foo\", ErrMarker, (1, 4, 3));\n\n }\n\n}\n", "file_path": "src/parse/error.rs", "rank": 32, "score": 26789.98398561586 }, { "content": "\n\nuse util;\n\n\n", "file_path": "src/parse/error.rs", "rank": 33, "score": 26788.46457264017 }, { "content": "}\n\n\n\nimpl<'src, T, P, I, F> ::Parser<'src> for FoldMin<P, I, F>\n\nwhere\n\n P: ::Parser<'src>,\n\n I: Fn() -> T,\n\n F: Fn(T, P::Output) -> T,\n\n{\n\n type Output = T;\n\n type Error = P::Error;\n\n\n\n fn parse(&self, mut input: ::Input<'src>, ctx: ::Context)\n\n -> ::Outcome<'src, Self::Output, Self::Error> {\n\n let mut value = None;\n\n loop {\n\n let (item, next_input) = match self.parser.parse(input.clone(), ctx.clone()) {\n\n Ok(res) => res,\n\n Err(::Fail::NoMatch) => break,\n\n Err(other) => return Err(other),\n\n };\n", "file_path": "src/parse/repeat.rs", "rank": 52, "score": 3918.1087712277504 }, { "content": "impl<'src, P> ::Parser<'src> for Either<P>\n\nwhere P: ::Parser<'src> {\n\n\n\n type Output = P::Output;\n\n type Error = P::Error;\n\n\n\n fn parse(&self, input: ::Input<'src>, ctx: ::Context)\n\n -> ::Outcome<'src, Self::Output, Self::Error> {\n\n self.parser.parse(input, ctx)\n\n }\n\n}\n\n\n\n#[derive( Debug, Clone )]\n\npub struct EitherOr<P1, P2> {\n\n parser1: P1,\n\n parser2: P2,\n\n}\n\n\n\nimpl<'src, P1, P2> EitherOr<P1, P2>\n\nwhere\n", "file_path": "src/parse/structure.rs", "rank": 53, "score": 3916.817847390434 }, { "content": " P1: ::Parser<'src>,\n\n P2: ::Parser<'src, Output=P1::Output, Error=P1::Error>,\n\n{\n\n pub fn or<PO>(self, other: PO) -> EitherOr<Self, PO> {\n\n EitherOr {\n\n parser1: self,\n\n parser2: other,\n\n }\n\n }\n\n}\n\n\n\nimpl<'src, P1, P2> ::Parser<'src> for EitherOr<P1, P2>\n\nwhere\n\n P1: ::Parser<'src>,\n\n P2: ::Parser<'src, Output=P1::Output, Error=P1::Error>,\n\n{\n\n type Output = P1::Output;\n\n type Error = P1::Error;\n\n\n\n fn parse(&self, input: ::Input<'src>, ctx: ::Context)\n\n -> ::Outcome<'src, Self::Output, Self::Error> {\n\n match self.parser1.parse(input.clone(), ctx.clone()) {\n\n Err(::Fail::NoMatch) => (),\n\n other => return other,\n\n }\n\n self.parser2.parse(input, ctx)\n\n }\n\n}\n\n\n", "file_path": "src/parse/structure.rs", "rank": 54, "score": 3913.65623820155 }, { "content": " fn parse(&self, mut input: ::Input<'src>, ctx: ::Context)\n\n -> ::Outcome<'src, Self::Output, Self::Error> {\n\n let mut items = Vec::new();\n\n loop {\n\n let (item, next_input) = match self.parser.parse(input.clone(), ctx.clone()) {\n\n Ok(res) => res,\n\n Err(::Fail::NoMatch) => break,\n\n Err(other) => return Err(other),\n\n };\n\n if input.location().offset() == next_input.location().offset() {\n\n return Err(::Fail::ZeroLengthRepeat(input.location()));\n\n }\n\n items.push(item);\n\n input = next_input;\n\n }\n\n if items.len() >= self.min {\n\n Ok((items, input))\n\n }\n\n else {\n\n Err(::Fail::NoMatch)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/parse/repeat.rs", "rank": 55, "score": 3912.2732376596978 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use parse;\n\n\n\n #[derive( Debug, Clone, PartialEq, Eq )]\n\n pub struct ErrMarker;\n\n\n\n #[derive( Debug, Clone, PartialEq, Eq )]\n\n pub enum MultiErrMarker {\n\n Mark1,\n\n Mark2,\n\n }\n\n\n\n #[test]\n\n fn current_location() {\n\n let parser = parse::no_custom_error(parse::pair(\n\n parse::exact_str(\"foo\"),\n", "file_path": "src/parse/structure.rs", "rank": 56, "score": 3911.794256215983 }, { "content": "\n\nimpl<'src, PL, PC, PR> ::Parser<'src> for Delimited<PL, PC, PR> \n\nwhere\n\n PL: ::Parser<'src>,\n\n PC: ::Parser<'src, Error=PL::Error>,\n\n PR: ::Parser<'src, Error=PL::Error>,\n\n{\n\n type Output = PC::Output;\n\n type Error = PC::Error;\n\n\n\n fn parse(&self, input: ::Input<'src>, ctx: ::Context)\n\n -> ::Outcome<'src, Self::Output, Self::Error> {\n\n let (_, input) = self.parser_l.parse(input, ctx.clone())?;\n\n let (res, input) = self.parser_c.parse(input, ctx.clone())?;\n\n let (_, input) = self.parser_r.parse(input, ctx)?;\n\n Ok((res, input))\n\n }\n\n}\n\n\n", "file_path": "src/parse/structure.rs", "rank": 57, "score": 3911.4011532316126 }, { "content": " if input.location().offset() == next_input.location().offset() {\n\n return Err(::Fail::ZeroLengthRepeat(input.location()));\n\n }\n\n value = Some((self.fold)(value.unwrap_or_else(&self.init), item));\n\n input = next_input;\n\n }\n\n match self.min {\n\n Minimum::Zero => Ok((value.unwrap_or_else(&self.init), input)),\n\n Minimum::One => match value {\n\n Some(value) => Ok((value, input)),\n\n None => Err(::Fail::NoMatch),\n\n },\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use parse;\n\n\n", "file_path": "src/parse/repeat.rs", "rank": 58, "score": 3910.497252292677 }, { "content": " PL: ::Parser<'src>,\n\n PR: ::Parser<'src, Error=PL::Error>,\n\n{\n\n type Output = (PL::Output, PR::Output);\n\n type Error = PL::Error;\n\n\n\n fn parse(&self, input: ::Input<'src>, ctx: ::Context)\n\n -> ::Outcome<'src, Self::Output, Self::Error> {\n\n let (res_l, input) = self.parser_l.parse(input, ctx.clone())?;\n\n let (res_r, input) = self.parser_r.parse(input, ctx)?;\n\n Ok(((res_l, res_r), input))\n\n }\n\n}\n\n\n", "file_path": "src/parse/structure.rs", "rank": 59, "score": 3910.3861993701234 }, { "content": " #[derive( Debug, Clone, PartialEq, Eq )]\n\n pub struct ErrMarker;\n\n\n\n #[test]\n\n fn fold0() {\n\n let parser = parse::no_custom_error(parse::fold0(\n\n parse::exact_str(\"3\"),\n\n || (),\n\n |_, _| (),\n\n ));\n\n let content: String = \"335\".into();\n\n let _ = ::apply(&content, ::Options::default(), parser);\n\n\n\n\n\n /*let parser = parse::no_custom_error(parse::fold0(\n\n parse::either(parse::exact_str(\"3\")).or(parse::exact_str(\"5\")),\n\n move || 1000_i32,\n\n move |value, new| value + new.parse::<i32>().unwrap(),\n\n ));*/\n\n// assert_eq!(\n", "file_path": "src/parse/repeat.rs", "rank": 60, "score": 3909.9648923058553 }, { "content": " parse::current_location(),\n\n ));\n\n assert_parse_ok!(&parser, \"foo\", (\"foo\", ::Location::new(1, 4, 3)));\n\n assert_parse_no_match!(&parser, \"\");\n\n }\n\n\n\n #[test]\n\n fn optional() {\n\n\n\n let parser = parse::no_custom_error(parse::optional(parse::exact_str(\"foo\")));\n\n assert_parse_ok!(&parser, \"foo\", Some(\"foo\"));\n\n assert_parse_ok!(&parser, \"\", None);\n\n\n\n let parser = parse::pair(\n\n parse::optional(parse::exact_str(\"foo\")),\n\n parse::require(parse::exact_str(\"bar\"), || ErrMarker),\n\n );\n\n assert_parse_ok!(&parser, \"foobar\", (Some(\"foo\"), \"bar\"));\n\n assert_parse_ok!(&parser, \"bar\", (None, \"bar\"));\n\n assert_parse_fail!(&parser, \"qux\", ErrMarker, (1, 1, 0));\n", "file_path": "src/parse/structure.rs", "rank": 61, "score": 3904.9026855604334 }, { "content": " util::option_to_outcome(\n\n input.consume_len_via(|rest| {\n\n if rest.starts_with(self.content) {\n\n Some(self.content.len())\n\n }\n\n else {\n\n None\n\n }\n\n }),\n\n |res| res,\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/parse/content.rs", "rank": 62, "score": 3904.240855346952 }, { "content": "\n\n#[derive( Debug, Clone, Copy )]\n", "file_path": "src/parse/repeat.rs", "rank": 63, "score": 3903.82655806584 }, { "content": " |res| res,\n\n )\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use parse;\n\n\n\n #[test]\n\n fn any_char() {\n\n let parser = parse::no_custom_error(parse::any_char());\n\n assert_parse_ok!(&parser, \"x\", 'x');\n\n assert_parse_no_match!(&parser, \"\");\n\n }\n\n\n\n #[test]\n\n fn exact_str() {\n\n let parser = parse::no_custom_error(parse::exact_str(\"foo\"));\n\n assert_parse_ok!(&parser, \"foo\", \"foo\");\n\n assert_parse_no_match!(&parser, \"foX\");\n\n }\n\n}\n\n\n", "file_path": "src/parse/content.rs", "rank": 64, "score": 3903.4922503329 }, { "content": " (\"foo\", \"bar\"),\n\n ]);\n\n assert_parse_ok!(&parser, \"foobar\", vec![(\"foo\", \"bar\")]);\n\n assert_parse_ok!(&parser, \"\", vec![]);\n\n assert_parse_fail!(&parser, \"fooqux\", ErrMarker, (1, 4, 3));\n\n assert_parse_partial!(&parser, \"qux\", vec![], \"qux\", (1, 1, 0));\n\n\n\n let parser = parse::no_custom_error(parse::repeat0(parse::exact_str(\"\")));\n\n assert_eq!(\n\n ::apply(\"foo\", ::Options::default(), &parser),\n\n Err(::ApplyError::ZeroLengthRepeat(::Location::new(1, 1, 0)))\n\n );\n\n }\n\n}\n", "file_path": "src/parse/repeat.rs", "rank": 65, "score": 3902.894653272403 }, { "content": " assert_parse_fail!(&parser, \"fooqux\", ErrMarker, (1, 4, 3));\n\n assert_parse_partial!(&parser, \"foobarqux\", vec![(\"foo\", \"bar\")], \"qux\", (1, 7, 6));\n\n\n\n let parser = parse::no_custom_error(parse::repeat1(parse::exact_str(\"\")));\n\n assert_eq!(\n\n ::apply(\"foo\", ::Options::default(), &parser),\n\n Err(::ApplyError::ZeroLengthRepeat(::Location::new(1, 1, 0)))\n\n );\n\n }\n\n\n\n #[test]\n\n fn repeat0() {\n\n\n\n let parser = parse::repeat0(parse::pair(\n\n parse::exact_str(\"foo\"),\n\n parse::require(parse::exact_str(\"bar\"), || ErrMarker),\n\n ));\n\n assert_parse_ok!(&parser, \"foobarfoobarfoobar\", vec![\n\n (\"foo\", \"bar\"),\n\n (\"foo\", \"bar\"),\n", "file_path": "src/parse/repeat.rs", "rank": 66, "score": 3902.6409346651812 }, { "content": " #[test]\n\n fn delimited() {\n\n let parser = parse::no_custom_error(parse::delimited(\n\n parse::exact_str(\"(\"),\n\n parse::exact_str(\"foo\"),\n\n parse::exact_str(\")\"),\n\n ));\n\n assert_parse_ok!(&parser, \"(foo)\", \"foo\");\n\n assert_parse_no_match!(&parser, \"X\");\n\n assert_parse_no_match!(&parser, \"(\");\n\n assert_parse_no_match!(&parser, \"(X\");\n\n assert_parse_no_match!(&parser, \"(foo\");\n\n assert_parse_no_match!(&parser, \"(fooX\");\n\n }\n\n}\n", "file_path": "src/parse/structure.rs", "rank": 67, "score": 3901.3954031118033 }, { "content": "\n\nuse std::marker;\n\nuse std::str;\n\n\n\nuse util;\n\n\n", "file_path": "src/parse/content.rs", "rank": 68, "score": 3901.330140015465 }, { "content": " parse::require(parse::exact_str(\"qux\"), || MultiErrMarker::Mark2),\n\n ));\n\n assert_parse_ok!(&parser, \"foobar\", (\"foo\", \"bar\"));\n\n assert_parse_ok!(&parser, \"bazqux\", (\"baz\", \"qux\"));\n\n assert_parse_no_match!(&parser, \"fnord\");\n\n assert_parse_fail!(&parser, \"fooqux\", MultiErrMarker::Mark1, (1, 4, 3));\n\n assert_parse_fail!(&parser, \"bazbar\", MultiErrMarker::Mark2, (1, 4, 3));\n\n }\n\n\n\n #[test]\n\n fn pair() {\n\n let parser = parse::no_custom_error(parse::pair(\n\n parse::exact_str(\"foo\"),\n\n parse::exact_str(\"bar\"),\n\n ));\n\n assert_parse_ok!(&parser, \"foobar\", (\"foo\", \"bar\"));\n\n assert_parse_no_match!(&parser, \"foXbar\");\n\n assert_parse_no_match!(&parser, \"foobaX\");\n\n }\n\n\n", "file_path": "src/parse/structure.rs", "rank": 69, "score": 3901.2683947752 }, { "content": " }\n\n\n\n #[test]\n\n fn either() {\n\n\n\n let parser = parse::either(parse::pair(\n\n parse::exact_str(\"foo\"),\n\n parse::require(parse::exact_str(\"bar\"), || ErrMarker),\n\n ));\n\n assert_parse_ok!(&parser, \"foobar\", (\"foo\", \"bar\"));\n\n assert_parse_no_match!(&parser, \"qux\");\n\n assert_parse_fail!(&parser, \"fooqux\", ErrMarker, (1, 4, 3));\n\n\n\n let parser =\n\n parse::either(parse::pair(\n\n parse::exact_str(\"foo\"),\n\n parse::require(parse::exact_str(\"bar\"), || MultiErrMarker::Mark1),\n\n ))\n\n .or(parse::pair(\n\n parse::exact_str(\"baz\"),\n", "file_path": "src/parse/structure.rs", "rank": 70, "score": 3899.912507622143 }, { "content": "// result,\n\n // Ok(1011)\n\n // );\n\n// assert_parse_ok!(&parser, \"335\", ());\n\n }\n\n\n\n #[test]\n\n fn repeat1() {\n\n\n\n let parser = parse::repeat1(parse::pair(\n\n parse::exact_str(\"foo\"),\n\n parse::require(parse::exact_str(\"bar\"), || ErrMarker),\n\n ));\n\n assert_parse_ok!(&parser, \"foobarfoobarfoobar\", vec![\n\n (\"foo\", \"bar\"),\n\n (\"foo\", \"bar\"),\n\n (\"foo\", \"bar\"),\n\n ]);\n\n assert_parse_ok!(&parser, \"foobar\", vec![(\"foo\", \"bar\")]);\n\n assert_parse_no_match!(&parser, \"\");\n", "file_path": "src/parse/repeat.rs", "rank": 71, "score": 3898.6057206340806 }, { "content": "\n\nuse std::marker;\n\n\n", "file_path": "src/parse/structure.rs", "rank": 72, "score": 3896.158133495826 } ]
Rust
src/config.rs
drogue-iot/drogue-event-source
6f96f14d95de0bd914f523c6cb2b284bd24cf57b
use anyhow::Result; use reqwest::Method; use serde::Deserialize; use serde_with::serde_as; use std::collections::HashMap; use std::time::Duration; pub trait ConfigFromEnv<'de>: Sized + Deserialize<'de> { fn from_env() -> Result<Self, config::ConfigError> { Self::from(config::Environment::default()) } fn from_env_source(source: HashMap<String, String>) -> Result<Self, config::ConfigError> { Self::from(config::Environment::default().source(Some(source))) } fn from_env_prefix<S: AsRef<str>>(prefix: S) -> Result<Self, config::ConfigError> { Self::from(config::Environment::with_prefix(prefix.as_ref())) } fn from(env: config::Environment) -> Result<Self, config::ConfigError>; } impl<'de, T: Deserialize<'de> + Sized> ConfigFromEnv<'de> for T { fn from(env: config::Environment) -> Result<T, config::ConfigError> { let cfg = config::Config::builder() .add_source(env.separator("__")) .build()?; cfg.try_deserialize() } } #[derive(Clone, Debug, Deserialize, Eq, PartialEq)] #[serde(tag = "mode")] #[serde(rename_all = "lowercase")] pub enum Mode { Kafka(KafkaConfig), #[serde(alias = "ws")] Websocket(WebsocketConfig), } #[derive(Clone, Debug, Deserialize, Eq, PartialEq)] pub struct KafkaConfig { pub topic: String, pub bootstrap_servers: String, #[serde(default)] pub properties: HashMap<String, String>, } #[derive(Clone, Debug, Deserialize, Eq, PartialEq)] pub struct WebsocketConfig { pub drogue_endpoint: String, pub drogue_app: String, pub drogue_user: String, pub drogue_token: String, } #[derive(Clone, Debug, Deserialize)] pub struct Config { #[serde(default)] pub endpoint: EndpointConfig, pub k_sink: String, #[serde(flatten)] pub mode: Mode, } #[serde_as] #[derive(Clone, Debug, Deserialize, Eq, PartialEq)] pub struct EndpointConfig { #[serde(default = "default_method")] #[serde_as(as = "serde_with::DisplayFromStr")] pub method: Method, #[serde(default)] pub username: Option<String>, #[serde(default)] pub password: Option<String>, #[serde(default)] pub token: Option<String>, #[serde(default)] pub tls_insecure: bool, #[serde(default)] pub tls_certificate: Option<String>, #[serde(default)] pub headers: HashMap<String, String>, #[serde(default, with = "humantime_serde")] pub timeout: Option<Duration>, #[serde(default = "default_error_delay", with = "humantime_serde")] pub error_delay: Duration, #[serde(default = "default_retries")] pub retries: usize, } impl Default for EndpointConfig { fn default() -> Self { Self { method: default_method(), username: None, password: None, token: None, tls_insecure: false, tls_certificate: None, headers: Default::default(), timeout: None, error_delay: default_error_delay(), retries: default_retries(), } } } const fn default_error_delay() -> Duration { Duration::from_secs(1) } const fn default_retries() -> usize { 5 } const fn default_method() -> Method { Method::POST } #[cfg(test)] mod test { use super::*; use maplit::*; #[test] fn test_cfg_kafka() { let env = convert_args!(hashmap!( "MODE" => "kafka", "BOOTSTRAP_SERVERS" => "bootstrap:9091", "TOPIC" => "topic", "PROPERTIES__FOO_BAR" => "baz", "K_SINK" => "http://localhost", )); let cfg = Config::from_env_source(env).unwrap(); assert_eq!( cfg.mode, Mode::Kafka(KafkaConfig { topic: "topic".to_string(), bootstrap_servers: "bootstrap:9091".into(), properties: convert_args!(hashmap!( "foo_bar" => "baz", )) }) ); } #[test] fn test_cfg_ws() { let env = convert_args!(hashmap!( "MODE" => "ws", "DROGUE_APP" => "app", "DROGUE_ENDPOINT" => "endpoint", "DROGUE_USER" => "user", "DROGUE_TOKEN" => "token", "K_SINK" => "http://localhost", )); let cfg = Config::from_env_source(env).unwrap(); assert_eq!( cfg.mode, Mode::Websocket(WebsocketConfig { drogue_app: "app".into(), drogue_endpoint: "endpoint".into(), drogue_user: "user".into(), drogue_token: "token".into(), }) ); } #[test] fn test_cfg_endpoint() { let env = convert_args!(hashmap!( "MODE" => "ws", "DROGUE_APP" => "app", "DROGUE_ENDPOINT" => "endpoint", "DROGUE_USER" => "user", "DROGUE_TOKEN" => "token", "K_SINK" => "http://localhost", "ENDPOINT__METHOD" => "GET", "ENDPOINT__HEADERS__foo" => "bar", )); let cfg = Config::from_env_source(env).unwrap(); assert_eq!( cfg.endpoint, EndpointConfig { method: Method::GET, username: None, password: None, token: None, tls_insecure: false, tls_certificate: None, headers: convert_args!(hashmap!( "foo" => "bar" )), timeout: None, error_delay: default_error_delay(), retries: 5 } ); } }
use anyhow::Result; use reqwest::Method; use serde::Deserialize; use serde_with::serde_as; use std::collections::HashMap; use std::time::Duration; pub trait ConfigFromEnv<'de>: Sized + Deserialize<'de> { fn from_env() -> Result<Self, config::ConfigError> { Self::from(config::Environment::default()) } fn from_env_source(source: HashMap<String, String>) -> Result<Self, config::ConfigError> { Self::from(config::Environment::default().source(Some(source))) } fn from_env_prefix<S: AsRef<str>>(prefix: S) -> Result<Self, config::ConfigError> { Self::from(config::Environment::with_prefix(prefix.as_ref())) } fn from(env: config::Environment) -> Result<Self, config::ConfigError>; } impl<'de, T: Deserialize<'de> + Sized> ConfigFromEnv<'de> for T { fn from(env: config::Environment) -> Result<T, config::ConfigError> {
} #[derive(Clone, Debug, Deserialize, Eq, PartialEq)] #[serde(tag = "mode")] #[serde(rename_all = "lowercase")] pub enum Mode { Kafka(KafkaConfig), #[serde(alias = "ws")] Websocket(WebsocketConfig), } #[derive(Clone, Debug, Deserialize, Eq, PartialEq)] pub struct KafkaConfig { pub topic: String, pub bootstrap_servers: String, #[serde(default)] pub properties: HashMap<String, String>, } #[derive(Clone, Debug, Deserialize, Eq, PartialEq)] pub struct WebsocketConfig { pub drogue_endpoint: String, pub drogue_app: String, pub drogue_user: String, pub drogue_token: String, } #[derive(Clone, Debug, Deserialize)] pub struct Config { #[serde(default)] pub endpoint: EndpointConfig, pub k_sink: String, #[serde(flatten)] pub mode: Mode, } #[serde_as] #[derive(Clone, Debug, Deserialize, Eq, PartialEq)] pub struct EndpointConfig { #[serde(default = "default_method")] #[serde_as(as = "serde_with::DisplayFromStr")] pub method: Method, #[serde(default)] pub username: Option<String>, #[serde(default)] pub password: Option<String>, #[serde(default)] pub token: Option<String>, #[serde(default)] pub tls_insecure: bool, #[serde(default)] pub tls_certificate: Option<String>, #[serde(default)] pub headers: HashMap<String, String>, #[serde(default, with = "humantime_serde")] pub timeout: Option<Duration>, #[serde(default = "default_error_delay", with = "humantime_serde")] pub error_delay: Duration, #[serde(default = "default_retries")] pub retries: usize, } impl Default for EndpointConfig { fn default() -> Self { Self { method: default_method(), username: None, password: None, token: None, tls_insecure: false, tls_certificate: None, headers: Default::default(), timeout: None, error_delay: default_error_delay(), retries: default_retries(), } } } const fn default_error_delay() -> Duration { Duration::from_secs(1) } const fn default_retries() -> usize { 5 } const fn default_method() -> Method { Method::POST } #[cfg(test)] mod test { use super::*; use maplit::*; #[test] fn test_cfg_kafka() { let env = convert_args!(hashmap!( "MODE" => "kafka", "BOOTSTRAP_SERVERS" => "bootstrap:9091", "TOPIC" => "topic", "PROPERTIES__FOO_BAR" => "baz", "K_SINK" => "http://localhost", )); let cfg = Config::from_env_source(env).unwrap(); assert_eq!( cfg.mode, Mode::Kafka(KafkaConfig { topic: "topic".to_string(), bootstrap_servers: "bootstrap:9091".into(), properties: convert_args!(hashmap!( "foo_bar" => "baz", )) }) ); } #[test] fn test_cfg_ws() { let env = convert_args!(hashmap!( "MODE" => "ws", "DROGUE_APP" => "app", "DROGUE_ENDPOINT" => "endpoint", "DROGUE_USER" => "user", "DROGUE_TOKEN" => "token", "K_SINK" => "http://localhost", )); let cfg = Config::from_env_source(env).unwrap(); assert_eq!( cfg.mode, Mode::Websocket(WebsocketConfig { drogue_app: "app".into(), drogue_endpoint: "endpoint".into(), drogue_user: "user".into(), drogue_token: "token".into(), }) ); } #[test] fn test_cfg_endpoint() { let env = convert_args!(hashmap!( "MODE" => "ws", "DROGUE_APP" => "app", "DROGUE_ENDPOINT" => "endpoint", "DROGUE_USER" => "user", "DROGUE_TOKEN" => "token", "K_SINK" => "http://localhost", "ENDPOINT__METHOD" => "GET", "ENDPOINT__HEADERS__foo" => "bar", )); let cfg = Config::from_env_source(env).unwrap(); assert_eq!( cfg.endpoint, EndpointConfig { method: Method::GET, username: None, password: None, token: None, tls_insecure: false, tls_certificate: None, headers: convert_args!(hashmap!( "foo" => "bar" )), timeout: None, error_delay: default_error_delay(), retries: 5 } ); } }
let cfg = config::Config::builder() .add_source(env.separator("__")) .build()?; cfg.try_deserialize() }
function_block-function_prefix_line
[ { "content": "use crate::EndpointConfig;\n\nuse anyhow::bail;\n\nuse cloudevents::binding::reqwest::RequestBuilderExt;\n\nuse reqwest::{Certificate, Url};\n\nuse std::time::Duration;\n\nuse thiserror::Error;\n\n\n\npub struct Sender {\n\n config: EndpointConfig,\n\n url: Url,\n\n client: reqwest::Client,\n\n}\n\n\n\n#[derive(Clone, Debug, Error)]\n\npub enum SendError {\n\n #[error(\"Temporary publish error: {0}\")]\n\n Temporary(String),\n\n #[error(\"Permanent publish error: {0}\")]\n\n Permanent(String),\n\n}\n", "file_path": "src/sender.rs", "rank": 4, "score": 6.0677265518018215 }, { "content": "mod config;\n\nmod sender;\n\n\n\nuse crate::{config::*, sender::Sender};\n\nuse anyhow::{bail, Context as AnyhowContext, Result};\n\nuse cloudevents::binding::rdkafka::MessageExt;\n\nuse futures_util::stream::StreamExt;\n\nuse rdkafka::{\n\n config::FromClientConfig,\n\n consumer::{Consumer, StreamConsumer},\n\n util::DefaultRuntime,\n\n};\n\nuse thiserror::Error;\n\nuse tokio_tungstenite::tungstenite::{\n\n connect,\n\n http::{header, Request},\n\n Message,\n\n};\n\n\n\n#[tokio::main]\n", "file_path": "src/main.rs", "rank": 7, "score": 2.6053584690188214 }, { "content": "\n\nimpl Sender {\n\n pub fn new(url: String, config: EndpointConfig) -> anyhow::Result<Self> {\n\n let mut client = reqwest::Client::builder();\n\n\n\n if config.tls_insecure {\n\n client = client\n\n .danger_accept_invalid_certs(true)\n\n .danger_accept_invalid_hostnames(true);\n\n }\n\n if let Some(cert) = &config.tls_certificate {\n\n client = client\n\n .tls_built_in_root_certs(false)\n\n .add_root_certificate(Certificate::from_pem(cert.as_bytes())?);\n\n }\n\n\n\n client = client.timeout(\n\n config\n\n .timeout\n\n .clone()\n", "file_path": "src/sender.rs", "rank": 8, "score": 2.4203375689561657 }, { "content": "# Drogue Cloud Event Source\n\n\n\nConnects to [Drogue Cloud](https://github.com/drogue-iot/drogue-cloud) integration endpoint, consumes cloud events sent by devices and forwards them to the next service.\n\n\n\nCurrently, it only connects to the Websocket Endpoint.\n\n\n\nIt can be used in combination with https://github.com/drogue-iot/drogue-postgresql-pusher\n\n\n\n## Configuration\n\n\n\n| Name | Description | Example |\n\n|---------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------|----------------------------------|\n\n| MODE | The source mode | `kafka` of `ws` |\n\n| K_SINK | The URL of the service to forward events to | http://timescaledb-pusher |\n\n| ENDPOINT__METHOD | The HTTP method to use, defaults to `POST` | `POST` |\n\n| ENDPOINT__USERNAME | Enables basic auth support using the provided username | `foo` |\n\n| ENDPOINT__PASSWORD | Use as password when basic auth is enabled | `bar` |\n\n| ENDPOINT__TOKEN | Enables bearer auth using the provided token | `e234c376f48e` |\n\n| ENDPOINT__HEADERS__* | Additional HTTP headers, prefixed with `ENDPOINT__HEADERS__` | `ENDPOINT_HEADERS_AUTHORIZATION` |\n\n| ENDPOINT__TLS_INSECURE | Disable TLS validations | `false` |\n\n| ENDPOINT__TLS_CERTIFICATE | The certificate of the (only) trust anchor to use for TLS. By default it will use the system's trust anchors. The certificate must in the PEM PKCS#1 format. | |\n\n| ENDPOINT__TIMEOUT | The timeout of the send operation | `15s` |\n\n| ENDPOINT__ERROR_DELAY | The delay before re-trying a failed operation | `1s` |\n\n| ENDPOINT__RETRIES | The number of re-tries before giving up | 5 |\n\n\n", "file_path": "README.md", "rank": 9, "score": 2.0779276806029507 }, { "content": "### Websocket\n\n\n\nThe following options are available for the `ws` mode. \n\n\n\n| Name | Description | Example |\n\n|-----------------|---------------------------------------|---------------------------------------------|\n\n| DROGUE_ENDPOINT | The URL of the endpoint to connect to | `wss://ws-integration.sandbox.drogue.cloud` |\n\n| DROGUE_APP | Drogue application to use | `drogue-public-temperature` |\n\n| DROGUE_USER | Drogue cloud user | |\n\n| DROGUE_TOKEN | Access token for Drogue cloud | Use `drg admin tokens create` to create one |\n\n\n\n## Building locally\n\n\n\nYou can build the image locally using:\n\n\n\n```shell\n\npodman build . -t drogue-event-source\n\n```\n", "file_path": "README.md", "rank": 10, "score": 2.047067368514788 }, { "content": "async fn main() -> Result<()> {\n\n env_logger::init();\n\n dotenv::dotenv().ok();\n\n log::info!(\"Starting Drogue Event Source!\");\n\n\n\n let config = Config::from_env()?;\n\n\n\n if config.endpoint.username.is_some() & config.endpoint.token.is_some() {\n\n bail!(\"You must not provide both basic auth and bearer auth\");\n\n }\n\n\n\n let sender = Sender::new(config.k_sink, config.endpoint)?;\n\n\n\n match config.mode {\n\n Mode::Websocket(config) => {\n\n log::info!(\"Using WebSocket mode\");\n\n websocket(config, sender).await\n\n }\n\n Mode::Kafka(config) => {\n\n log::info!(\"Using Kafka mode\");\n", "file_path": "src/main.rs", "rank": 12, "score": 1.7051860141855146 }, { "content": " .unwrap_or_else(|| Duration::from_secs(5)),\n\n );\n\n\n\n let client = client.build()?;\n\n\n\n let url = Url::parse(&url)?;\n\n\n\n Ok(Sender {\n\n config,\n\n url,\n\n client,\n\n })\n\n }\n\n\n\n pub async fn send_once(&self, event: cloudevents::Event) -> Result<(), SendError> {\n\n let mut request = self\n\n .client\n\n .request(self.config.method.clone(), self.url.clone());\n\n\n\n if let Some(username) = &self.config.username {\n", "file_path": "src/sender.rs", "rank": 13, "score": 1.4362362582080024 }, { "content": " kafka(config, sender).await\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Error)]\n\npub enum KafkaError {\n\n #[error(\"Failed to receive: {0}\")]\n\n Receive(#[from] rdkafka::error::KafkaError),\n\n #[error(\"Failed to parse event: {0}\")]\n\n Event(#[from] cloudevents::message::Error),\n\n}\n\n\n\nasync fn kafka(config: KafkaConfig, sender: Sender) -> Result<()> {\n\n let mut kafka_config = rdkafka::ClientConfig::new();\n\n\n\n kafka_config.set(\"bootstrap.servers\", config.bootstrap_servers);\n\n kafka_config.extend(\n\n config\n\n .properties\n", "file_path": "src/main.rs", "rank": 15, "score": 1.2149430583028418 }, { "content": " ))),\n\n Err(err) => Err(SendError::Temporary(format!(\n\n \"Failed to perform request: {err}\"\n\n ))),\n\n }\n\n }\n\n\n\n pub async fn send(&self, event: cloudevents::Event) -> anyhow::Result<()> {\n\n let mut attempts = 1;\n\n loop {\n\n match self.send_once(event.clone()).await {\n\n Ok(_) => break Ok(()),\n\n Err(SendError::Temporary(reason)) => {\n\n log::info!(\n\n \"Received temporary error, retrying: {reason}, attempt: {0}\",\n\n attempts\n\n );\n\n }\n\n Err(SendError::Permanent(reason)) => {\n\n log::info!(\"Received permanent error, skipping event: {reason}\");\n", "file_path": "src/sender.rs", "rank": 16, "score": 1.1827828008771784 } ]
Rust
src/lib.rs
DomWilliams0/panik-rs
493ac556522b245c54e070bc44119fc1c99a488c
use backtrace::Backtrace; use std::borrow::Cow; use std::fmt::Debug; use std::panic::{PanicInfo, UnwindSafe}; use std::thread::ThreadId; use std::cmp::Ordering; use std::ops::DerefMut; #[cfg(feature = "use-parking-lot")] use parking_lot::Mutex; #[cfg(not(feature = "use-parking-lot"))] use std::sync::Mutex; const DEFAULT_BACKTRACE_RESOLUTION_LIMIT: usize = 8; lazy_static::lazy_static! { static ref STATE: Mutex<State> = Mutex::new(State::default()); } macro_rules! log_warn { ($state:expr, $($arg:tt)+) => { #[cfg(feature = "use-slog")] slog::warn!(&$state.slogger, $($arg)+); #[cfg(feature = "use-log")] log::warn!($($arg)+); #[cfg(feature = "use-stderr")] eprintln!($($arg)+); } } macro_rules! log_error { ($state:expr, $($arg:tt)+) => { #[cfg(feature = "use-slog")] slog::error!(&$state.slogger, $($arg)+); #[cfg(feature = "use-log")] log::error!($($arg)+); #[cfg(feature = "use-stderr")] eprintln!($($arg)+); } } macro_rules! log_crit { ($state:expr, $($arg:tt)+) => { #[cfg(feature = "use-slog")] slog::crit!(&$state.slogger, $($arg)+); #[cfg(feature = "use-log")] log::error!($($arg)+); #[cfg(feature = "use-stderr")] eprintln!($($arg)+); } } struct State { panics: Vec<Panic>, backtrace_resolution_limit: usize, is_running: bool, #[cfg(feature = "use-slog")] slogger: slog::Logger, } #[derive(Debug, Clone)] pub struct Panic { message: String, thread_id: ThreadId, thread: String, backtrace: Backtrace, backtrace_resolved: bool, } #[derive(Clone)] pub struct Builder { #[cfg(feature = "use-slog")] slogger: Option<slog::Logger>, backtrace_resolution_limit: usize, } struct GlobalStateGuard; impl Builder { pub fn new() -> Self { Builder { #[cfg(feature = "use-slog")] slogger: None, backtrace_resolution_limit: DEFAULT_BACKTRACE_RESOLUTION_LIMIT, } } #[cfg(feature = "use-slog")] pub fn slogger(mut self, slogger: impl Into<slog::Logger>) -> Self { self.slogger = Some(slogger.into()); self } pub fn backtrace_resolution_limit(mut self, n: usize) -> Self { self.backtrace_resolution_limit = n; self } fn apply_settings(&mut self) { let mut state = state_mutex(); #[cfg(feature = "use-slog")] { state.slogger = self.slogger.take().unwrap_or_else(default_slogger); } state.backtrace_resolution_limit = self.backtrace_resolution_limit; } pub fn run_and_handle_panics<R: Debug>( mut self, do_me: impl FnOnce() -> R + UnwindSafe, ) -> Option<R> { self.apply_settings(); run_and_handle_panics(do_me) } pub fn run_and_handle_panics_no_debug<R>( mut self, do_me: impl FnOnce() -> R + UnwindSafe, ) -> Option<R> { self.apply_settings(); run_and_handle_panics_no_debug(do_me) } } impl Default for Builder { fn default() -> Self { Self::new() } } fn register_panic(panic: &PanicInfo) { let (thread, tid) = { let t = std::thread::current(); let name = t.name().unwrap_or("<unnamed>"); (format!("{:?} ({})", t.id(), name), t.id()) }; let message = panic .payload() .downcast_ref::<&str>() .map(|s| Cow::Borrowed(*s)) .unwrap_or_else(|| Cow::from(format!("{}", panic))); let backtrace = Backtrace::new_unresolved(); let mut state = state_mutex(); log_error!(&state, "handling panic on thread {}: '{}'", thread, message); state.panics.push(Panic { message: message.into_owned(), thread_id: tid, thread, backtrace, backtrace_resolved: false, }); } fn state_mutex() -> impl DerefMut<Target = State> { #[cfg(feature = "use-parking-lot")] return STATE.lock(); #[cfg(not(feature = "use-parking-lot"))] STATE.lock().unwrap() } pub fn run_and_handle_panics_no_debug<R>(do_me: impl FnOnce() -> R + UnwindSafe) -> Option<R> { run_and_handle_panics_with_maybe_debug(do_me, |_| Cow::Borrowed("<unprintable>")) } pub fn run_and_handle_panics<R: Debug>(do_me: impl FnOnce() -> R + UnwindSafe) -> Option<R> { run_and_handle_panics_with_maybe_debug(do_me, |res| Cow::Owned(format!("{:?}", res))) } fn run_and_handle_panics_with_maybe_debug<R>( do_me: impl FnOnce() -> R + UnwindSafe, format_swallowed: impl FnOnce(R) -> Cow<'static, str>, ) -> Option<R> { let _guard = GlobalStateGuard::init(); let result = std::panic::catch_unwind(|| do_me()); let mut state = state_mutex(); match (result, state.panics.is_empty()) { (Ok(res), true) => { return Some(res); } (Ok(res), false) => { let swallowed = format_swallowed(res); log_warn!( &state, "panic occurred in another thread, swallowing unpanicked result: {}", swallowed ); } (Err(_), false) => {} (Err(_), true) => unreachable!(), }; log_error!( &state, "{count} threads panicked", count = state.panics.len() ); let backtrace_resolution_limit = state.backtrace_resolution_limit; let mut panics = std::mem::take(&mut state.panics); debug_assert!(!panics.is_empty(), "panics vec should not be empty"); for ( i, Panic { message, thread, ref mut backtrace, backtrace_resolved, .. }, ) in panics.iter_mut().enumerate() { match i.cmp(&backtrace_resolution_limit) { Ordering::Less => { backtrace.resolve(); *backtrace_resolved = true; } Ordering::Equal => { #[cfg(feature = "use-log")] log::warn!( "handling more than {limit} panics, no longer resolving backtraces", limit = backtrace_resolution_limit ); #[cfg(feature = "use-stderr")] eprintln!( "handling more than {limit} panics, no longer resolving backtraces", limit = backtrace_resolution_limit ); } _ => {} }; if *backtrace_resolved { log_crit!( &state, "panic on thread {:?}: {:?}\n{:?}", thread, message, backtrace ); } else { log_crit!(&state, "panic on thread {:?}: {:?}", thread, message,); } } let empty = std::mem::replace(&mut state.panics, panics); debug_assert!(empty.is_empty()); std::mem::forget(empty); None } pub fn panics() -> Vec<Panic> { let state = state_mutex(); state.panics.clone() } pub fn has_panicked() -> bool { !state_mutex().panics.is_empty() } impl Panic { pub fn is_backtrace_resolved(&self) -> bool { self.backtrace_resolved } pub fn message(&self) -> &str { &self.message } pub fn thread_id(&self) -> ThreadId { self.thread_id } pub fn thread_name(&self) -> &str { &self.thread } pub fn backtrace(&self) -> &Backtrace { &self.backtrace } } impl GlobalStateGuard { fn init() -> Self { let mut state = state_mutex(); if state.is_running { drop(state); panic!("nested calls to panik::run_and_handle_panics are not supported") } state.panics.clear(); state.is_running = true; std::panic::set_hook(Box::new(|panic| { register_panic(panic); })); Self } } impl Drop for GlobalStateGuard { fn drop(&mut self) { let _ = std::panic::take_hook(); let mut state = state_mutex(); state.backtrace_resolution_limit = DEFAULT_BACKTRACE_RESOLUTION_LIMIT; state.is_running = false; #[cfg(feature = "use-slog")] { state.slogger = default_slogger(); } } } impl Default for State { fn default() -> Self { State { panics: Vec::new(), backtrace_resolution_limit: DEFAULT_BACKTRACE_RESOLUTION_LIMIT, is_running: false, #[cfg(feature = "use-slog")] slogger: default_slogger(), } } } #[cfg(feature = "use-slog")] fn default_slogger() -> slog::Logger { use slog::Drain; slog::Logger::root(slog_stdlog::StdLog.fuse(), slog::o!()) }
use backtrace::Backtrace; use std::borrow::Cow; use std::fmt::Debug; use std::panic::{PanicInfo, UnwindSafe}; use std::thread::ThreadId; use std::cmp::Ordering; use std::ops::DerefMut; #[cfg(feature = "use-parking-lot")] use parking_lot::Mutex; #[cfg(not(feature = "use-parking-lot"))] use std::sync::Mutex; const DEFAULT_BACKTRACE_RESOLUTION_LIMIT: usize = 8; lazy_static::lazy_static! { static ref STATE: Mutex<State> = Mutex::new(State::default()); } macro_rules! log_warn { ($state:expr, $($arg:tt)+) => { #[cfg(feature = "use-slog")] slog::warn!(&$state.slogger, $($arg)+); #[cfg(feature = "use-log")] log::warn!($($arg)+); #[cfg(feature = "use-stderr")] eprintln!($($arg)+); } } macro_rules! log_error { ($state:expr, $($arg:tt)+) => { #[cfg(feature = "use-slog")] slog::error!(&$state.slogger, $($arg)+); #[cfg(feature = "use-log")] log::error!($($arg)+); #[cfg(feature = "use-stderr")] eprintln!($($arg)+); } } macro_rules! log_crit { ($state:expr, $($arg:tt)+) => { #[cfg(feature = "use-slog")] slog::crit!(&$state.slogger, $($arg)+); #[cfg(feature = "use-log")] log::error!($($arg)+); #[cfg(feature = "use-stderr")] eprintln!($($arg)+); } } struct State { panics: Vec<Panic>, backtrace_resolution_limit: usize, is_running: bool, #[cfg(feature = "use-slog")] slogger: slog::Logger, } #[derive(Debug, Clone)] pub struct Panic { message: String, thread_id: ThreadId, thread: String, backtrace: Backtrace, backtrace_resolved: bool, } #[derive(Clone)] pub struct Builder { #[cfg(feature = "use-slog")] slogger: Option<slog::Logger>, backtrace_resolution_limit: usize, } struct GlobalStateGuard; impl Builder { pub fn new() -> Self { Builder { #[cfg(feature = "use-slog")] slogger: None, backtrace_resolution_limit: DEFAULT_BACKTRACE_RESOLUTION_LIMIT, } } #[cfg(feature = "use-slog")] pub fn slogger(mut self, slogger: impl Into<slog::Logger>) -> Self { self.slogger = Some(slogger.into()); self } pub fn backtrace_resolution_limit(mut self, n: usize) -> Self { self.backtrace_resolution_limit = n; self } fn apply_settings(&mut self) { let mut state = state_mutex(); #[cfg(feature = "use-slog")] { state.slogger = self.slogger.take().unwrap_or_else(default_slogger); } state.backtrace_resolution_limit = self.backtrace_resolution_limit; } pub fn run_and_handle_panics<R: Debug>( mut self, do_me: impl FnOnce() -> R + UnwindSafe, ) -> Option<R> { self.apply_settings(); run_and_handle_panics(do_me) } pub fn run_and_handle_panics_no_debug<R>( mut self, do_me: impl FnOnce() -> R + UnwindSafe, ) -> Option<R> { self.apply_settings(); run_and_handle_panics_no_debug(do_me) } } impl Default for Builder { fn default() -> Self { Self::new() } } fn register_panic(panic: &PanicInfo) { let (thread, tid) = { let t = std::thread::current(); let name = t.name().unwrap_or("<unnamed>"); (format!("{:?} ({})", t.id(), name), t.id()) }; let message = panic .payload() .downcast_ref::<&str>() .map(|s| Cow::Borrowed(*s)) .unwrap_or_else(|| Cow::from(format!("{}", panic))); let backtrace = Backtrace::new_unresolved(); let mut state = state_mutex(); log_error!(&state, "handling panic on thread {}: '{}'", thread, message); state.panics.push(Panic { message: message.into_owned(), thread_id: tid, thread, backtrace, backtrace_resolved: false, }); } fn state_mutex() -> impl DerefMut<Target = State> { #[cfg(feature = "use-parking-lot")] return STATE.lock(); #[cfg(not(feature = "use-parking-lot"))] STATE.lock().unwrap() } pub fn run_and_handle_panics_no_debug<R>(do_me: impl FnOnce() -> R + UnwindSafe) -> Option<R> { run_and_handle_panics_with_maybe_debug(do_me, |_| Cow::Borrowed("<unprintable>")) } pub fn run_and_handle_panics<R: Debug>(do_me: impl FnOnce() -> R + UnwindSafe) -> Option<R> { run_and_handle_panics_with_maybe_debug(do_me, |res| Cow::Owned(format!("{:?}", res))) } fn run_and_handle_panics_with_maybe_debug<R>( do_me: impl FnOnce() -> R + UnwindSafe, format_swallowed: impl FnOnce(R) -> Cow<'static, str>, ) -> Option<R> { let _guard = GlobalStateGuard::init(); let result = std::panic::catch_unwind(|| do_me()); let mut state = state_mutex(); match (result, state.panics.is_empty()) { (Ok(res), true) => { return Some(res); } (Ok(res), false) => { let swallowed = format_swallowed(res); log_warn!( &state, "panic occurred in another thread, swallowing unpanicked result: {}", swallowed ); } (Err(_), false) => {} (Err(_), true) => unreachable!(), }; log_error!( &state, "{count} threads panicked", count = state.panics.len() ); let backtrace_resolution_limit = state.backtrace_resolution_limit; let mut panics = std::mem::take(&mut state.panics); debug_assert!(!panics.is_empty(), "panics vec should not be empty"); for ( i, Panic { message, thread, ref mut backtrace, backtrace_resolved, .. }, ) in panics.iter_mut().enumerate() {
; if *backtrace_resolved { log_crit!( &state, "panic on thread {:?}: {:?}\n{:?}", thread, message, backtrace ); } else { log_crit!(&state, "panic on thread {:?}: {:?}", thread, message,); } } let empty = std::mem::replace(&mut state.panics, panics); debug_assert!(empty.is_empty()); std::mem::forget(empty); None } pub fn panics() -> Vec<Panic> { let state = state_mutex(); state.panics.clone() } pub fn has_panicked() -> bool { !state_mutex().panics.is_empty() } impl Panic { pub fn is_backtrace_resolved(&self) -> bool { self.backtrace_resolved } pub fn message(&self) -> &str { &self.message } pub fn thread_id(&self) -> ThreadId { self.thread_id } pub fn thread_name(&self) -> &str { &self.thread } pub fn backtrace(&self) -> &Backtrace { &self.backtrace } } impl GlobalStateGuard { fn init() -> Self { let mut state = state_mutex(); if state.is_running { drop(state); panic!("nested calls to panik::run_and_handle_panics are not supported") } state.panics.clear(); state.is_running = true; std::panic::set_hook(Box::new(|panic| { register_panic(panic); })); Self } } impl Drop for GlobalStateGuard { fn drop(&mut self) { let _ = std::panic::take_hook(); let mut state = state_mutex(); state.backtrace_resolution_limit = DEFAULT_BACKTRACE_RESOLUTION_LIMIT; state.is_running = false; #[cfg(feature = "use-slog")] { state.slogger = default_slogger(); } } } impl Default for State { fn default() -> Self { State { panics: Vec::new(), backtrace_resolution_limit: DEFAULT_BACKTRACE_RESOLUTION_LIMIT, is_running: false, #[cfg(feature = "use-slog")] slogger: default_slogger(), } } } #[cfg(feature = "use-slog")] fn default_slogger() -> slog::Logger { use slog::Drain; slog::Logger::root(slog_stdlog::StdLog.fuse(), slog::o!()) }
match i.cmp(&backtrace_resolution_limit) { Ordering::Less => { backtrace.resolve(); *backtrace_resolved = true; } Ordering::Equal => { #[cfg(feature = "use-log")] log::warn!( "handling more than {limit} panics, no longer resolving backtraces", limit = backtrace_resolution_limit ); #[cfg(feature = "use-stderr")] eprintln!( "handling more than {limit} panics, no longer resolving backtraces", limit = backtrace_resolution_limit ); } _ => {} }
if_condition
[ { "content": "pub fn panik_builder() -> panik::Builder {\n\n #[cfg(feature = \"use-log\")]\n\n env_logger::builder()\n\n .is_test(true)\n\n .filter_level(log::LevelFilter::Debug)\n\n .init();\n\n\n\n #[cfg(feature = \"use-slog\")]\n\n {\n\n use slog::{slog_o, Drain};\n\n\n\n let plain = slog_term::PlainSyncDecorator::new(std::io::stderr());\n\n let log = slog::Logger::root(slog_term::FullFormat::new(plain).build().fuse(), slog_o!());\n\n let guard = slog_scope::set_global_logger(log);\n\n\n\n let builder = panik::Builder::default().slogger(slog_scope::logger());\n\n std::mem::forget(guard);\n\n\n\n return builder;\n\n }\n\n\n\n panik::Builder::default()\n\n}\n", "file_path": "tests/setup.rs", "rank": 6, "score": 83475.81382737654 }, { "content": "#[test]\n\nfn non_debug_swallowed() {\n\n struct MyOpaque(i32);\n\n\n\n let result = setup::panik_builder().run_and_handle_panics_no_debug(|| {\n\n let _ = std::thread::spawn(|| panic!(\"oh no\")).join();\n\n MyOpaque(100)\n\n });\n\n\n\n assert!(result.is_none());\n\n assert!(panik::has_panicked());\n\n}\n", "file_path": "tests/non_debug_swallowed.rs", "rank": 9, "score": 73356.10222286965 }, { "content": "#[test]\n\nfn no_panic() {\n\n let result = setup::panik_builder().run_and_handle_panics(|| \"nice\");\n\n\n\n assert_eq!(result.to_owned(), Some(\"nice\"));\n\n assert!(!panik::has_panicked());\n\n\n\n let panics = panik::panics();\n\n assert!(panics.is_empty());\n\n}\n", "file_path": "tests/no_panic.rs", "rank": 12, "score": 58323.11903784306 }, { "content": "#[test]\n\nfn other_thread() {\n\n let tid = Arc::new(Mutex::new(None));\n\n let tid_2 = tid.clone();\n\n\n\n let result = setup::panik_builder().run_and_handle_panics(move || {\n\n let thread = std::thread::spawn(move || {\n\n let mut tid = tid_2.lock().unwrap();\n\n *tid = Some(std::thread::current().id());\n\n drop(tid); // avoid poison\n\n panic!(\"teehee\")\n\n });\n\n\n\n let _ = thread.join();\n\n\n\n 5\n\n });\n\n\n\n assert!(result.is_none());\n\n assert!(panik::has_panicked());\n\n\n", "file_path": "tests/other_thread.rs", "rank": 13, "score": 58169.315821929864 }, { "content": "#[test]\n\nfn max_backtraces() {\n\n let result = setup::panik_builder()\n\n .backtrace_resolution_limit(3)\n\n .run_and_handle_panics(move || {\n\n for _ in 0..5 {\n\n let thread = std::thread::spawn(|| panic!(\"uh oh\"));\n\n let _ = thread.join();\n\n }\n\n\n\n \"epic\"\n\n });\n\n\n\n assert!(result.is_none());\n\n assert!(panik::has_panicked());\n\n\n\n let panics = panik::panics();\n\n assert_eq!(panics.len(), 5);\n\n\n\n let resolved_count = panics.iter().filter(|p| p.is_backtrace_resolved()).count();\n\n let unresolved_count = panics.iter().filter(|p| !p.is_backtrace_resolved()).count();\n\n\n\n assert_eq!(resolved_count, 3);\n\n assert_eq!(unresolved_count, 2);\n\n}\n", "file_path": "tests/max_backtraces.rs", "rank": 14, "score": 54800.666870882604 }, { "content": "#[test]\n\nfn non_debug() {\n\n struct MyOpaque(i32);\n\n\n\n let result = setup::panik_builder().run_and_handle_panics_no_debug(|| MyOpaque(100));\n\n\n\n assert!(result.is_some());\n\n assert!(!panik::has_panicked());\n\n\n\n let opaque = result.unwrap();\n\n assert_eq!(opaque.0, 100);\n\n}\n", "file_path": "tests/non_debug.rs", "rank": 15, "score": 54125.93584390644 }, { "content": "#[test]\n\nfn main_thread() {\n\n let result = setup::panik_builder().run_and_handle_panics(|| panic!(\"oh no\"));\n\n assert!(result.is_none());\n\n assert!(panik::has_panicked());\n\n\n\n let panics = panik::panics();\n\n assert_eq!(panics.len(), 1);\n\n\n\n let panic = &panics[0];\n\n assert_eq!(panic.thread_id(), std::thread::current().id());\n\n assert_eq!(panic.message(), \"oh no\");\n\n assert!(panic.is_backtrace_resolved());\n\n}\n", "file_path": "tests/main_thread.rs", "rank": 16, "score": 53984.466982165366 }, { "content": "#[test]\n\nfn multiple_threads() {\n\n let result = setup::panik_builder().run_and_handle_panics(move || {\n\n for _ in 0..4 {\n\n let thread = std::thread::spawn(|| panic!(\"uh oh\"));\n\n let _ = thread.join();\n\n }\n\n\n\n panic!(\"me too\")\n\n });\n\n\n\n assert!(result.is_none());\n\n assert!(panik::has_panicked());\n\n\n\n let mut panics = panik::panics();\n\n assert_eq!(panics.len(), 5);\n\n\n\n let main_idx = panics\n\n .iter()\n\n .enumerate()\n\n .find_map(|(idx, p)| {\n", "file_path": "tests/multiple_threads.rs", "rank": 17, "score": 53984.466982165366 }, { "content": "mod setup;\n\n\n\n#[test]\n", "file_path": "tests/non_debug_swallowed.rs", "rank": 18, "score": 37327.86890910583 }, { "content": "#[test]\n\nfn nested() {\n\n let outer = setup::panik_builder().run_and_handle_panics(|| {\n\n // naughty\n\n panik::run_and_handle_panics(|| 5);\n\n\n\n 0\n\n });\n\n assert!(outer.is_none());\n\n assert!(panik::has_panicked());\n\n\n\n let panics = panik::panics();\n\n assert_eq!(panics.len(), 1);\n\n assert_eq!(\n\n panics[0].message(),\n\n \"nested calls to panik::run_and_handle_panics are not supported\"\n\n );\n\n}\n", "file_path": "tests/nested.rs", "rank": 19, "score": 32426.73478048168 }, { "content": "#[test]\n\nfn multiple_usage() {\n\n let builder = setup::panik_builder();\n\n\n\n let a = builder\n\n .clone()\n\n .run_and_handle_panics(|| panic!(\"numero one\"));\n\n assert!(a.is_none());\n\n assert!(panik::has_panicked());\n\n\n\n let panics = panik::panics();\n\n assert_eq!(panics.len(), 1);\n\n assert_eq!(panics[0].message(), \"numero one\");\n\n\n\n let b = builder.clone().run_and_handle_panics(|| 1);\n\n assert_eq!(b, Some(1));\n\n assert!(!panik::has_panicked());\n\n assert!(panik::panics().is_empty());\n\n\n\n let c = builder.run_and_handle_panics(|| panic!(\"numero two\"));\n\n assert!(c.is_none());\n\n assert!(panik::has_panicked());\n\n\n\n let panics = panik::panics();\n\n assert_eq!(panics.len(), 1);\n\n assert_eq!(panics[0].message(), \"numero two\");\n\n}\n", "file_path": "tests/multiple_usage.rs", "rank": 20, "score": 30306.329847072506 }, { "content": "mod setup;\n\n\n\n#[test]\n", "file_path": "tests/no_panic.rs", "rank": 21, "score": 21006.396752369645 }, { "content": " let panics = panik::panics();\n\n assert_eq!(panics.len(), 1);\n\n\n\n let panic_tid = {\n\n let tid = tid.lock().unwrap();\n\n tid.expect(\"tid not set\")\n\n };\n\n\n\n let panic = &panics[0];\n\n assert_eq!(panic.thread_id(), panic_tid);\n\n assert_ne!(panic.thread_id(), std::thread::current().id());\n\n assert_eq!(panic.message(), \"teehee\");\n\n assert!(panic.is_backtrace_resolved());\n\n}\n", "file_path": "tests/other_thread.rs", "rank": 22, "score": 20892.30901473231 }, { "content": "mod setup;\n\n\n\nuse std::sync::{Arc, Mutex};\n\n\n\n#[test]\n", "file_path": "tests/other_thread.rs", "rank": 23, "score": 20883.89680360695 }, { "content": "mod setup;\n\n\n\n#[test]\n", "file_path": "tests/max_backtraces.rs", "rank": 24, "score": 20174.606124014546 }, { "content": "mod setup;\n\n\n\n#[test]\n", "file_path": "tests/non_debug.rs", "rank": 25, "score": 19618.868171374157 }, { "content": " if p.thread_id() == std::thread::current().id() {\n\n Some(idx)\n\n } else {\n\n None\n\n }\n\n })\n\n .unwrap();\n\n\n\n let main = panics.remove(main_idx);\n\n assert_eq!(main.message(), \"me too\");\n\n assert!(panics.iter().all(|p| p.message() == \"uh oh\"));\n\n}\n", "file_path": "tests/multiple_threads.rs", "rank": 26, "score": 19510.92441280826 }, { "content": "mod setup;\n\n\n\n#[test]\n", "file_path": "tests/multiple_threads.rs", "rank": 27, "score": 19502.34823610648 }, { "content": "mod setup;\n\n\n\n#[test]\n", "file_path": "tests/main_thread.rs", "rank": 28, "score": 19502.34823610648 }, { "content": "```\n\n\n\nThe same example detecting and handling panics and exiting gracefully:\n\n```rust\n\nlet application_result = panik::run_and_handle_panics(|| {\n\n let (tx, rx) = std::sync::mpsc::channel();\n\n let worker = std::thread::spawn(move || {\n\n // do some work...\n\n // tx.send(5).unwrap();\n\n\n\n // ...or panic and hold up the main thread forever\n\n todo!()\n\n });\n\n\n\n // periodically check if a panic has occurred\n\n let poll_freq = Duration::from_secs(5);\n\n while !panik::has_panicked() {\n\n if let Ok(res) = rx.recv_timeout(poll_freq) {\n\n return res;\n\n }\n\n }\n\n\n\n // return value is irrelevant here, the panic on the worker\n\n // thread will clobber this when `run_and_handle_panics`\n\n // returns None\n\n 0\n\n});\n\n\n\nmatch application_result {\n\n None => {\n\n eprintln!(\"something went wrong: {:?}\", panik::panics());\n\n std::process::exit(1);\n\n },\n\n Some(result) => {\n\n println!(\"result: {}\", result);\n\n std::process::exit(0);\n\n }\n\n}\n\n```\n\n\n\nThis looks pretty heavyweight, but this intentional - this library is meant for large\n\nand heavyweight applications!\n\n\n", "file_path": "README.md", "rank": 38, "score": 14.966614866470287 }, { "content": "# panik-rs\n\n\n\n![Build Status](https://img.shields.io/github/workflow/status/DomWilliams0/panik-rs/Build%20and%20test)\n\n[![Documentation](https://docs.rs/panik/badge.svg)](https://docs.rs/panik)\n\n[![Version](https://img.shields.io/crates/v/panik)](https://crates.io/crates/panik)\n\n[![License](https://img.shields.io/crates/l/panik)](https://github.com/DomWilliams0/panik-rs/blob/master/LICENSE)\n\n\n\n<p align=\"center\"> <img src=\"panik.jpg\" width=256/> </p>\n\n\n\nThis crate enables **application-wide panic handling**, whereby panics occurring in any thread\n\nare captured and stored, and can later be queried to trigger an early application exit.\n\n\n\nThis goes against the standard panic behaviour where a panic is isolated to the thread that\n\ncaused it. This library introduces the condition that *any panic in any thread is an error*\n\nand the application cannot continue or recover.\n\n\n\n# Use case\n\n\n\nThe main use case for this crate is when a thread spawns some threads to do work, and blocks on\n\ntheir completion. If a worker thread panics before the result is posted, the waiting thread might get stuck in\n\na blocking call to `recv`, unless it specifically plans and checks for this error case (e.g. poisoned\n\nmutex, disconnected mpsc sender).\n\n\n\nIn a large application with thread pools and lots of types of work being posted to it all over\n\nthe place (like a game engine), it can be hard to handle every panic case properly. Using\n\nthis library allows the main thread to poll for panics in its core game loop and exit\n\ngracefully, rather than soldiering on without its audio/rendering/AI/worker threads.\n\n\n\n\n\nAn example that doesn't use panic detection and hangs forever:\n\n```rust\n\nlet (tx, rx) = std::sync::mpsc::channel();\n\nlet worker = std::thread::spawn(move || {\n\n // hopefully do some work...\n\n // tx.send(5).unwrap();\n\n\n\n // ...or panic and hold up the main thread forever\n\n todo!()\n\n});\n\n\n\nlet result: i32 = rx.recv().expect(\"recv failed\"); // blocks forever\n\nprintln!(\"result: {}\", result);\n", "file_path": "README.md", "rank": 46, "score": 8.572070448690571 }, { "content": "# Features\n\n* `use-stderr`: log panics to stderr\n\n* `use-log`: log panics with the `log` crate\n\n* `use-slog`: log panics with the `slog` crate (see `Builder::slogger`)\n\n* `use-parking-lot`: use `parking_lot::Mutex` instead of `std::sync::Mutex`\n", "file_path": "README.md", "rank": 47, "score": 8.179407572373426 } ]
Rust
2021/src/day19.rs
shrugalic/advent_of_code
8d18a3dbdcf847a667ab553f5441676003b9362a
use std::collections::{HashMap, HashSet, VecDeque}; const INPUT: &str = include_str!("../input/day19.txt"); pub(crate) fn day19_part1() -> usize { System::from(INPUT).beacon_count() } pub(crate) fn day19_part2() -> usize { System::from(INPUT).max_manhattan_distance() } type Coordinate = isize; type Vector = [Coordinate; 3]; #[derive(Debug)] struct System { scanners: Vec<Scanner>, } impl From<&str> for System { fn from(input: &str) -> Self { let scanners = input.trim().split("\n\n").map(Scanner::from).collect(); System { scanners } } } impl System { fn beacon_count(self) -> usize { self.align_beacons().0 } fn max_manhattan_distance(self) -> usize { self.align_beacons().1 } fn align_beacons(mut self) -> (usize, usize) { let mut distances_to_ref = vec![[0, 0, 0]; self.scanners.len()]; let reference = self.scanners.remove(0); let mut unique_beacons: HashSet<Vector> = reference.beacons.iter().cloned().collect(); let mut aligned = VecDeque::new(); aligned.push_back(reference); let mut unaligned: Vec<Scanner> = self.scanners.drain(..).collect(); while let Some(reference) = aligned.pop_front() { let mut still_unaligned = vec![]; while let Some(mut scanner) = unaligned.pop() { if let Some(offset) = scanner.align_with(&reference.beacons) { unique_beacons.extend(scanner.beacons.clone()); distances_to_ref[scanner.id] = offset; aligned.push_back(scanner); } else { still_unaligned.push(scanner); } } unaligned.append(&mut still_unaligned); aligned.push_back(reference); if unaligned.is_empty() { break; } } let distances_between_scanners = Scanner::offsets_between(&distances_to_ref) .into_iter() .map(|(d, _)| d); let max_manhattan_distance = distances_between_scanners .map(|[a, b, c]| (a.abs() + b.abs() + c.abs()) as usize) .max() .unwrap(); (unique_beacons.len(), max_manhattan_distance) } } #[derive(Debug)] struct Scanner { id: usize, beacons: Vec<Vector>, } impl From<&str> for Scanner { fn from(lines: &str) -> Self { let to_position = |line: &str| { let pos: Vec<Coordinate> = line.split(',').map(|n| n.parse().unwrap()).collect(); [pos[0], pos[1], pos[2]] }; let mut lines = lines.trim().lines(); let header = lines.next().unwrap(); let id = header .trim_start_matches("--- scanner ") .trim_end_matches(" ---") .parse() .unwrap(); let beacons: Vec<_> = lines.map(to_position).collect(); Scanner { id, beacons } } } impl Scanner { fn align_with(&mut self, ref_beacons: &[Vector]) -> Option<Vector> { for orientation in Orientation::all() { let mut offset_frequencies: HashMap<Vector, usize> = HashMap::new(); let aligned_beacons = self.aligned_beacons(&orientation); for own_beacon in &aligned_beacons { for ref_beacon in ref_beacons { let offset = Scanner::offset_between(ref_beacon, own_beacon); *offset_frequencies.entry(offset).or_default() += 1; } } if let Some((offset, _)) = offset_frequencies .into_iter() .find(|(_, count)| *count >= 12) { self.beacons = Scanner::translate_beacons(&aligned_beacons, offset); return Some(offset); } } None } fn aligned_beacons(&self, orientation: &Orientation) -> Vec<Vector> { self.beacons .iter() .map(|pos| orientation.align(*pos)) .collect() } fn translate_beacons(beacons: &[Vector], offset: Vector) -> Vec<Vector> { beacons .iter() .map(|pos| [pos[0] + offset[0], pos[1] + offset[1], pos[2] + offset[2]]) .collect() } fn offset_between(a: &Vector, b: &Vector) -> Vector { [a[0] - b[0], a[1] - b[1], a[2] - b[2]] } fn offsets_between(beacons: &[Vector]) -> HashMap<Vector, Vec<Vector>> { let mut distances: HashMap<Vector, Vec<Vector>> = HashMap::new(); for (i, a) in beacons.iter().enumerate().take(beacons.len() - 1) { for b in beacons.iter().skip(i + 1) { let diff = Scanner::offset_between(a, b); let positions = distances.entry(diff).or_default(); positions.push(*a); positions.push(*b); } } assert_eq!(distances.len(), beacons.len() * (beacons.len() - 1) / 2); distances } } #[derive(Debug, Copy, Clone)] struct Orientation { index: [usize; 3], multi: [isize; 3], } impl Orientation { fn new(index: [usize; 3], multi: [isize; 3]) -> Self { Orientation { index, multi } } fn all() -> Vec<Orientation> { vec![ Orientation::new([0, 1, 2], [1, 1, 1]), Orientation::new([0, 2, 1], [1, 1, -1]), Orientation::new([0, 1, 2], [1, -1, -1]), Orientation::new([0, 2, 1], [1, -1, 1]), Orientation::new([0, 2, 1], [-1, 1, 1]), Orientation::new([0, 1, 2], [-1, 1, -1]), Orientation::new([0, 2, 1], [-1, -1, -1]), Orientation::new([0, 1, 2], [-1, -1, 1]), Orientation::new([1, 2, 0], [1, 1, 1]), Orientation::new([1, 0, 2], [1, 1, -1]), Orientation::new([1, 2, 0], [1, -1, -1]), Orientation::new([1, 0, 2], [1, -1, 1]), Orientation::new([1, 0, 2], [-1, 1, 1]), Orientation::new([1, 2, 0], [-1, 1, -1]), Orientation::new([1, 0, 2], [-1, -1, -1]), Orientation::new([1, 2, 0], [-1, -1, 1]), Orientation::new([2, 0, 1], [1, 1, 1]), Orientation::new([2, 1, 0], [1, 1, -1]), Orientation::new([2, 0, 1], [1, -1, -1]), Orientation::new([2, 1, 0], [1, -1, 1]), Orientation::new([2, 1, 0], [-1, 1, 1]), Orientation::new([2, 0, 1], [-1, 1, -1]), Orientation::new([2, 1, 0], [-1, -1, -1]), Orientation::new([2, 0, 1], [-1, -1, 1]), ] } fn align(&self, mut pos: Vector) -> Vector { pos = [pos[self.index[0]], pos[self.index[1]], pos[self.index[2]]]; (0..3).for_each(|i| pos[i] *= self.multi[i]); pos } } #[cfg(test)] mod tests { use super::*; #[test] fn test_orientations() { let pos = [5, 6, -4]; let mut orientations: Vec<_> = Orientation::all() .into_iter() .map(|o| o.align(pos)) .collect(); orientations.sort_unstable(); assert_eq!( 24, orientations .iter() .cloned() .collect::<HashSet<Vector>>() .len() ); assert!(orientations.contains(&[5, 6, -4])); assert!(orientations.contains(&[-5, 4, -6])); assert!(orientations.contains(&[4, 6, 5])); assert!(orientations.contains(&[-4, -6, 5])); assert!(orientations.contains(&[-6, -4, -5])); let expected = vec![ [-6, -5, 4], [-6, -4, -5], [-6, 4, 5], [-6, 5, -4], [-5, -6, -4], [-5, -4, 6], [-5, 4, -6], [-5, 6, 4], [-4, -6, 5], [-4, -5, -6], [-4, 5, 6], [-4, 6, -5], [4, -6, -5], [4, -5, 6], [4, 5, -6], [4, 6, 5], [5, -6, 4], [5, -4, -6], [5, 4, 6], [5, 6, -4], [6, -5, -4], [6, -4, 5], [6, 4, -5], [6, 5, 4], ]; assert_eq!(expected, orientations); } #[test] fn part1_example() { let system = System::from(EXAMPLE); assert_eq!(79, system.beacon_count()); } #[test] fn part1() { assert_eq!(398, day19_part1()); } #[test] fn part2_example() { assert_eq!(3621, System::from(EXAMPLE).max_manhattan_distance()); } #[test] fn part2() { assert_eq!(10965, day19_part2()); } const EXAMPLE: &str = "\ --- scanner 0 --- 404,-588,-901 528,-643,409 -838,591,734 390,-675,-793 -537,-823,-458 -485,-357,347 -345,-311,381 -661,-816,-575 -876,649,763 -618,-824,-621 553,345,-567 474,580,667 -447,-329,318 -584,868,-557 544,-627,-890 564,392,-477 455,729,728 -892,524,684 -689,845,-530 423,-701,434 7,-33,-71 630,319,-379 443,580,662 -789,900,-551 459,-707,401 --- scanner 1 --- 686,422,578 605,423,415 515,917,-361 -336,658,858 95,138,22 -476,619,847 -340,-569,-846 567,-361,727 -460,603,-452 669,-402,600 729,430,532 -500,-761,534 -322,571,750 -466,-666,-811 -429,-592,574 -355,545,-477 703,-491,-529 -328,-685,520 413,935,-424 -391,539,-444 586,-435,557 -364,-763,-893 807,-499,-711 755,-354,-619 553,889,-390 --- scanner 2 --- 649,640,665 682,-795,504 -784,533,-524 -644,584,-595 -588,-843,648 -30,6,44 -674,560,763 500,723,-460 609,671,-379 -555,-800,653 -675,-892,-343 697,-426,-610 578,704,681 493,664,-388 -671,-858,530 -667,343,800 571,-461,-707 -138,-166,112 -889,563,-600 646,-828,498 640,759,510 -630,509,768 -681,-892,-333 673,-379,-804 -742,-814,-386 577,-820,562 --- scanner 3 --- -589,542,597 605,-692,669 -500,565,-823 -660,373,557 -458,-679,-417 -488,449,543 -626,468,-788 338,-750,-386 528,-832,-391 562,-778,733 -938,-730,414 543,643,-506 -524,371,-870 407,773,750 -104,29,83 378,-903,-323 -778,-728,485 426,699,580 -438,-605,-362 -469,-447,-387 509,732,623 647,635,-688 -868,-804,481 614,-800,639 595,780,-596 --- scanner 4 --- 727,592,562 -293,-554,779 441,611,-461 -714,465,-776 -743,427,-804 -660,-479,-426 832,-632,460 927,-485,-438 408,393,-506 466,436,-512 110,16,151 -258,-428,682 -393,719,612 -211,-452,876 808,-476,-593 -575,615,604 -485,667,467 -680,325,-822 -627,-443,-432 872,-547,-609 833,512,582 807,604,487 839,-516,451 891,-625,532 -652,-548,-490 30,-46,-14 "; }
use std::collections::{HashMap, HashSet, VecDeque}; const INPUT: &str = include_str!("../input/day19.txt"); pub(crate) fn day19_part1() -> usize { System::from(INPUT).beacon_count() } pub(crate) fn day19_part2() -> usize { System::from(INPUT).max_manhattan_distance() } type Coordinate = isize; type Vector = [Coordinate; 3]; #[derive(Debug)] struct System { scanners: Vec<Scanner>, } impl From<&str> for System { fn from(input: &str) -> Self { let scanners = input.trim().split("\n\n").map(Scanner::from).collect(); System { scanners } } } impl System { fn beacon_count(self) -> usize { self.align_beacons().0 } fn max_manhattan_distance(self) -> usize { self.align_beacons().1 } fn align_beacons(mut self) -> (usize, usize) { let mut distances_to_ref = vec![[0, 0, 0]; self.scanners.len()]; let reference = self.scanners.remove(0); let mut unique_beacons: HashSet<Vector> = reference.beacons.iter().cloned().collect(); let mut aligned = VecDeque::new(); aligned.push_back(reference); let mut unaligned: Vec<Scanner> = self.scanners.drain(..).collect(); while let Some(reference) = aligned.pop_front() { let mut still_unaligned = vec![]; while let Some(mut scanner) = unaligned.pop() { if let Some(offset) = scanner.align_with(&reference.beacons) { unique_beacons.extend(scanner.beacons.clone()); distances_to_ref[scanner.id] = offset; aligned.push_back(scanner); } else { still_unaligned.push(scanner); } } unaligned.append(&mut still_unaligned); aligned.push_back(reference); if unaligned.is_empty() { break; } } let distances_between_scanners = Scanner::offsets_between(&distances_to_ref) .into_iter() .map(|(d, _)| d); let max_manhattan_distance = distances_between_scanners .map(|[a, b, c]| (a.abs() + b.abs() + c.abs()) as usize) .max() .unwrap(); (unique_beacons.len(), max_manhattan_distance) } } #[derive(Debug)] struct Scanner { id: usize, beacons: Vec<Vector>, } impl From<&str> for Scanner {
} impl Scanner { fn align_with(&mut self, ref_beacons: &[Vector]) -> Option<Vector> { for orientation in Orientation::all() { let mut offset_frequencies: HashMap<Vector, usize> = HashMap::new(); let aligned_beacons = self.aligned_beacons(&orientation); for own_beacon in &aligned_beacons { for ref_beacon in ref_beacons { let offset = Scanner::offset_between(ref_beacon, own_beacon); *offset_frequencies.entry(offset).or_default() += 1; } } if let Some((offset, _)) = offset_frequencies .into_iter() .find(|(_, count)| *count >= 12) { self.beacons = Scanner::translate_beacons(&aligned_beacons, offset); return Some(offset); } } None } fn aligned_beacons(&self, orientation: &Orientation) -> Vec<Vector> { self.beacons .iter() .map(|pos| orientation.align(*pos)) .collect() } fn translate_beacons(beacons: &[Vector], offset: Vector) -> Vec<Vector> { beacons .iter() .map(|pos| [pos[0] + offset[0], pos[1] + offset[1], pos[2] + offset[2]]) .collect() } fn offset_between(a: &Vector, b: &Vector) -> Vector { [a[0] - b[0], a[1] - b[1], a[2] - b[2]] } fn offsets_between(beacons: &[Vector]) -> HashMap<Vector, Vec<Vector>> { let mut distances: HashMap<Vector, Vec<Vector>> = HashMap::new(); for (i, a) in beacons.iter().enumerate().take(beacons.len() - 1) { for b in beacons.iter().skip(i + 1) { let diff = Scanner::offset_between(a, b); let positions = distances.entry(diff).or_default(); positions.push(*a); positions.push(*b); } } assert_eq!(distances.len(), beacons.len() * (beacons.len() - 1) / 2); distances } } #[derive(Debug, Copy, Clone)] struct Orientation { index: [usize; 3], multi: [isize; 3], } impl Orientation { fn new(index: [usize; 3], multi: [isize; 3]) -> Self { Orientation { index, multi } } fn all() -> Vec<Orientation> { vec![ Orientation::new([0, 1, 2], [1, 1, 1]), Orientation::new([0, 2, 1], [1, 1, -1]), Orientation::new([0, 1, 2], [1, -1, -1]), Orientation::new([0, 2, 1], [1, -1, 1]), Orientation::new([0, 2, 1], [-1, 1, 1]), Orientation::new([0, 1, 2], [-1, 1, -1]), Orientation::new([0, 2, 1], [-1, -1, -1]), Orientation::new([0, 1, 2], [-1, -1, 1]), Orientation::new([1, 2, 0], [1, 1, 1]), Orientation::new([1, 0, 2], [1, 1, -1]), Orientation::new([1, 2, 0], [1, -1, -1]), Orientation::new([1, 0, 2], [1, -1, 1]), Orientation::new([1, 0, 2], [-1, 1, 1]), Orientation::new([1, 2, 0], [-1, 1, -1]), Orientation::new([1, 0, 2], [-1, -1, -1]), Orientation::new([1, 2, 0], [-1, -1, 1]), Orientation::new([2, 0, 1], [1, 1, 1]), Orientation::new([2, 1, 0], [1, 1, -1]), Orientation::new([2, 0, 1], [1, -1, -1]), Orientation::new([2, 1, 0], [1, -1, 1]), Orientation::new([2, 1, 0], [-1, 1, 1]), Orientation::new([2, 0, 1], [-1, 1, -1]), Orientation::new([2, 1, 0], [-1, -1, -1]), Orientation::new([2, 0, 1], [-1, -1, 1]), ] } fn align(&self, mut pos: Vector) -> Vector { pos = [pos[self.index[0]], pos[self.index[1]], pos[self.index[2]]]; (0..3).for_each(|i| pos[i] *= self.multi[i]); pos } } #[cfg(test)] mod tests { use super::*; #[test] fn test_orientations() { let pos = [5, 6, -4]; let mut orientations: Vec<_> = Orientation::all() .into_iter() .map(|o| o.align(pos)) .collect(); orientations.sort_unstable(); assert_eq!( 24, orientations .iter() .cloned() .collect::<HashSet<Vector>>() .len() ); assert!(orientations.contains(&[5, 6, -4])); assert!(orientations.contains(&[-5, 4, -6])); assert!(orientations.contains(&[4, 6, 5])); assert!(orientations.contains(&[-4, -6, 5])); assert!(orientations.contains(&[-6, -4, -5])); let expected = vec![ [-6, -5, 4], [-6, -4, -5], [-6, 4, 5], [-6, 5, -4], [-5, -6, -4], [-5, -4, 6], [-5, 4, -6], [-5, 6, 4], [-4, -6, 5], [-4, -5, -6], [-4, 5, 6], [-4, 6, -5], [4, -6, -5], [4, -5, 6], [4, 5, -6], [4, 6, 5], [5, -6, 4], [5, -4, -6], [5, 4, 6], [5, 6, -4], [6, -5, -4], [6, -4, 5], [6, 4, -5], [6, 5, 4], ]; assert_eq!(expected, orientations); } #[test] fn part1_example() { let system = System::from(EXAMPLE); assert_eq!(79, system.beacon_count()); } #[test] fn part1() { assert_eq!(398, day19_part1()); } #[test] fn part2_example() { assert_eq!(3621, System::from(EXAMPLE).max_manhattan_distance()); } #[test] fn part2() { assert_eq!(10965, day19_part2()); } const EXAMPLE: &str = "\ --- scanner 0 --- 404,-588,-901 528,-643,409 -838,591,734 390,-675,-793 -537,-823,-458 -485,-357,347 -345,-311,381 -661,-816,-575 -876,649,763 -618,-824,-621 553,345,-567 474,580,667 -447,-329,318 -584,868,-557 544,-627,-890 564,392,-477 455,729,728 -892,524,684 -689,845,-530 423,-701,434 7,-33,-71 630,319,-379 443,580,662 -789,900,-551 459,-707,401 --- scanner 1 --- 686,422,578 605,423,415 515,917,-361 -336,658,858 95,138,22 -476,619,847 -340,-569,-846 567,-361,727 -460,603,-452 669,-402,600 729,430,532 -500,-761,534 -322,571,750 -466,-666,-811 -429,-592,574 -355,545,-477 703,-491,-529 -328,-685,520 413,935,-424 -391,539,-444 586,-435,557 -364,-763,-893 807,-499,-711 755,-354,-619 553,889,-390 --- scanner 2 --- 649,640,665 682,-795,504 -784,533,-524 -644,584,-595 -588,-843,648 -30,6,44 -674,560,763 500,723,-460 609,671,-379 -555,-800,653 -675,-892,-343 697,-426,-610 578,704,681 493,664,-388 -671,-858,530 -667,343,800 571,-461,-707 -138,-166,112 -889,563,-600 646,-828,498 640,759,510 -630,509,768 -681,-892,-333 673,-379,-804 -742,-814,-386 577,-820,562 --- scanner 3 --- -589,542,597 605,-692,669 -500,565,-823 -660,373,557 -458,-679,-417 -488,449,543 -626,468,-788 338,-750,-386 528,-832,-391 562,-778,733 -938,-730,414 543,643,-506 -524,371,-870 407,773,750 -104,29,83 378,-903,-323 -778,-728,485 426,699,580 -438,-605,-362 -469,-447,-387 509,732,623 647,635,-688 -868,-804,481 614,-800,639 595,780,-596 --- scanner 4 --- 727,592,562 -293,-554,779 441,611,-461 -714,465,-776 -743,427,-804 -660,-479,-426 832,-632,460 927,-485,-438 408,393,-506 466,436,-512 110,16,151 -258,-428,682 -393,719,612 -211,-452,876 808,-476,-593 -575,615,604 -485,667,467 -680,325,-822 -627,-443,-432 872,-547,-609 833,512,582 807,604,487 839,-516,451 891,-625,532 -652,-548,-490 30,-46,-14 "; }
fn from(lines: &str) -> Self { let to_position = |line: &str| { let pos: Vec<Coordinate> = line.split(',').map(|n| n.parse().unwrap()).collect(); [pos[0], pos[1], pos[2]] }; let mut lines = lines.trim().lines(); let header = lines.next().unwrap(); let id = header .trim_start_matches("--- scanner ") .trim_end_matches(" ---") .parse() .unwrap(); let beacons: Vec<_> = lines.map(to_position).collect(); Scanner { id, beacons } }
function_block-full_function
[ { "content": "fn max_distance_to_origin(input: Vec<&str>) -> usize {\n\n curr_and_max_distances_to_origin(input).1\n\n}\n\n\n", "file_path": "2017/src/day11.rs", "rank": 0, "score": 388208.4410849393 }, { "content": "fn sum_of_valid_sector_ids(input: Vec<&str>) -> usize {\n\n input\n\n .into_iter()\n\n .filter_map(extract_valid_room)\n\n .map(|(_, id)| id)\n\n .sum()\n\n}\n\n\n", "file_path": "2016/src/day04.rs", "rank": 1, "score": 380696.6948905783 }, { "content": "fn max_points_after(duration: usize, input: Vec<&str>) -> usize {\n\n let reindeer = parse_reindeer(input);\n\n let mut time = 0;\n\n let mut distances = vec![0; reindeer.len()];\n\n let mut points = vec![0; reindeer.len()];\n\n while time < duration {\n\n for (idx, reindeer) in reindeer.iter().enumerate() {\n\n if time % reindeer.cycle_duration() < reindeer.fly_duration {\n\n distances[idx] += reindeer.speed;\n\n }\n\n }\n\n let max_distance = distances.iter().max().unwrap();\n\n distances\n\n .iter()\n\n .enumerate()\n\n .filter(|(_, dist)| dist == &max_distance)\n\n .for_each(|(idx, _)| points[idx] += 1);\n\n time += 1;\n\n }\n\n *points.iter().max().unwrap()\n\n}\n\n\n", "file_path": "2015/src/day14.rs", "rank": 2, "score": 380031.8951598094 }, { "content": "fn max_distance_after(duration: usize, input: Vec<&str>) -> usize {\n\n parse_reindeer(input)\n\n .into_iter()\n\n .map(|r| r.distance_after(duration))\n\n .max()\n\n .unwrap()\n\n}\n\n\n", "file_path": "2015/src/day14.rs", "rank": 3, "score": 380031.8951598094 }, { "content": "fn curr_and_max_distances_to_origin(input: Vec<&str>) -> (usize, usize) {\n\n let steps: Vec<Dir> = input[0].split(',').map(Dir::from).collect();\n\n let mut max_dist = 0;\n\n\n\n let mut pos = Hex::default();\n\n for dir in steps {\n\n pos.move_in(dir);\n\n max_dist = max_dist.max(pos.distance_to_origin());\n\n }\n\n\n\n (pos.distance_to_origin(), max_dist)\n\n}\n\n\n\npub(crate) fn day11_part2() -> usize {\n\n max_distance_to_origin(parse(INPUT))\n\n}\n\n\n", "file_path": "2017/src/day11.rs", "rank": 4, "score": 379851.5530205777 }, { "content": "fn parse_family(input: Vec<&str>) -> Vec<Vec<isize>> {\n\n // Vector of family members. This is only needed to get a unique index for each location\n\n let mut family: Vec<_> = vec![];\n\n // Happiness from each family member to all other family members (by index)\n\n let mut happiness: Vec<Vec<isize>> = vec![];\n\n\n\n for line in input {\n\n let parts: Vec<_> = line.split(|c| c == ' ' || c == '.').collect();\n\n\n\n // Example: Alice would gain 54 happiness units by sitting next to Bob.\n\n let center = parts[0].to_string();\n\n let sign = parts[3].parse::<isize>().unwrap() * if parts[2] == \"gain\" { 1 } else { -1 };\n\n let neighbor = parts[10].to_string();\n\n\n\n let mut get_family_mumber_id = |member| {\n\n if let Some(idx) = family.iter().position(|m| m == &member) {\n\n idx\n\n } else {\n\n family.push(member);\n\n happiness.iter_mut().for_each(|h| h.push(0));\n", "file_path": "2015/src/day13.rs", "rank": 5, "score": 366106.2547303121 }, { "content": "fn parse_distances_from(input: Vec<&str>) -> Vec<Vec<usize>> {\n\n // Vector of location names. This is only needed to get a unique index for each location\n\n let mut locations: Vec<String> = vec![];\n\n // Distances from each location to all other locations (by index)\n\n let mut distances: Vec<Vec<usize>> = vec![];\n\n\n\n input.iter().for_each(|line| {\n\n let mut get_index_for_location = |loc| {\n\n if let Some(idx) = locations.iter().position(|l| l == &loc) {\n\n idx\n\n } else {\n\n locations.push(loc);\n\n distances.iter_mut().for_each(|d| d.push(usize::MAX));\n\n distances.push(vec![0; locations.len()]);\n\n locations.len() - 1\n\n }\n\n };\n\n // Example: London to Dublin = 464\n\n let split: Vec<_> = line.split_ascii_whitespace().collect();\n\n let (loc1, loc2, distance) = (split[0], split[2], split[4].parse().unwrap());\n", "file_path": "2015/src/day09.rs", "rank": 6, "score": 365656.9485482758 }, { "content": "fn parse_containers(input: Vec<&str>) -> Vec<usize> {\n\n input\n\n .into_iter()\n\n .map(|line| line.parse().unwrap())\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::parse;\n\n\n\n const EXAMPLE: &str = \"\\\n\n20\n\n15\n\n10\n\n5\n\n5\";\n\n\n\n #[test]\n", "file_path": "2015/src/day17.rs", "rank": 7, "score": 365152.6729214514 }, { "content": "fn route_lengths(input: Vec<&str>) -> Vec<usize> {\n\n let distances = parse_distances_from(input);\n\n generate_permutations_of_n_indices(distances.len())\n\n .into_iter()\n\n .map(|order| {\n\n order\n\n .windows(2)\n\n .into_iter()\n\n .map(|d| distances[d[0]][d[1]])\n\n .sum()\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "2015/src/day09.rs", "rank": 8, "score": 365152.6729214514 }, { "content": "fn parse_supplies(input: &str) -> Vec<Vec<usize>> {\n\n input\n\n .trim()\n\n .split(\"\\n\\n\")\n\n .filter_map(|s| s.lines().map(|s| s.parse().ok()).collect())\n\n .collect()\n\n}\n\n\n", "file_path": "2022/src/day01.rs", "rank": 9, "score": 365152.6729214514 }, { "content": "fn parse(input: &str) -> Vec<isize> {\n\n input\n\n .trim()\n\n .split(',')\n\n .filter_map(|n| n.parse().ok())\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const EXAMPLE: &str = \"16,1,2,0,4,2,7,1,2,14\";\n\n\n\n #[test]\n\n fn part1_example() {\n\n let positions = parse(EXAMPLE);\n\n assert_eq!(37, minimal_fuel_to_align(positions, false));\n\n }\n\n\n", "file_path": "2021/src/day07.rs", "rank": 10, "score": 361347.9582044218 }, { "content": "fn parse(input: &str) -> Vec<usize> {\n\n input\n\n .trim()\n\n .lines()\n\n .filter_map(|s| s.parse().ok())\n\n .collect()\n\n}\n\n\n", "file_path": "2021/src/day01.rs", "rank": 11, "score": 360852.8508521026 }, { "content": "fn parse(input: &str) -> Vec<usize> {\n\n input\n\n .trim()\n\n .split(',')\n\n .filter_map(|n| n.parse().ok())\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const EXAMPLE: &str = \"3,4,3,1,2\";\n\n\n\n #[test]\n\n fn part1_example() {\n\n assert_eq!(26, multiply(parse(EXAMPLE), 18));\n\n assert_eq!(5934, multiply(parse(EXAMPLE), 80));\n\n }\n\n\n", "file_path": "2021/src/day06.rs", "rank": 12, "score": 360852.8508521026 }, { "content": "fn determine_signals(input: Vec<&str>) -> HashMap<Id, Num> {\n\n let instructions: Vec<_> = input.into_iter().map(Instruction::from).collect();\n\n signals_from(instructions)\n\n}\n\n\n", "file_path": "2015/src/day07.rs", "rank": 13, "score": 355408.72151307005 }, { "content": "fn sum_of_numbers(input: Vec<&str>) -> isize {\n\n input\n\n .iter()\n\n .map(|line| {\n\n line.split(|c| [',', '[', ']', '{', '}', ':'].contains(&c))\n\n .filter_map(|s| s.parse::<isize>().ok())\n\n .sum::<isize>()\n\n })\n\n .sum()\n\n}\n\n\n", "file_path": "2015/src/day12.rs", "rank": 14, "score": 354225.20558658463 }, { "content": "fn diagnostic_checksum(input: Vec<&str>) -> usize {\n\n let mut touring_machine = TouringMachine::from(input);\n\n touring_machine.check_sum()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::parse;\n\n\n\n const EXAMPLE: &str = \"\\\n\nBegin in state A.\n\nPerform a diagnostic checksum after 6 steps.\n\n\n\nIn state A:\n\n If the current value is 0:\n\n - Write the value 1.\n\n - Move one slot to the right.\n\n - Continue with state B.\n\n If the current value is 1:\n", "file_path": "2017/src/day25.rs", "rank": 15, "score": 353746.36871956114 }, { "content": "fn distance_to_origin(input: Vec<&str>) -> usize {\n\n curr_and_max_distances_to_origin(input).0\n\n}\n\n\n", "file_path": "2017/src/day11.rs", "rank": 16, "score": 353746.36871956114 }, { "content": "fn number_of_constellations(input: Vec<&str>) -> usize {\n\n let mut points: Vec<Point> = input.into_iter().map(Point::from).collect();\n\n\n\n let mut cons: Vec<Constellation> = vec![];\n\n while let Some(point) = points.pop() {\n\n let close_cons = point.remove_close_constellations(&mut cons);\n\n let mut new_con = Constellation::from(point);\n\n new_con.add_cons(close_cons);\n\n cons.push(new_con);\n\n }\n\n cons.len()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::parse;\n\n\n\n #[test]\n\n fn part1_example1() {\n", "file_path": "2018/src/day25.rs", "rank": 17, "score": 353746.36871956114 }, { "content": "fn trip_severity(input: Vec<&str>) -> usize {\n\n let mut firewalls = parse_firewalls(input);\n\n\n\n let mut severity = 0;\n\n for depth in 0..firewalls.len() {\n\n // Move packet along the top layer, and sum up severity if it's scanned\n\n // by the scanner of the optional firewall at the current depth\n\n if let Some(firewall) = firewalls.get(depth).unwrap() {\n\n if firewall.is_scanning_top() {\n\n severity += depth * firewall.range;\n\n }\n\n }\n\n firewalls\n\n .iter_mut()\n\n .filter_map(|f| f.as_mut())\n\n .for_each(|firewall| {\n\n if firewall.is_scanning_down {\n\n firewall.scanner += 1;\n\n if firewall.scanner == firewall.range - 1 {\n\n firewall.is_scanning_down = false;\n", "file_path": "2017/src/day13.rs", "rank": 18, "score": 353746.36871956114 }, { "content": "fn part1_hash_checksum(max_idx: u8, input: Vec<&str>) -> usize {\n\n let lengths = input[0].split(',').map(|i| i.parse().unwrap()).collect();\n\n let mut hasher = Hasher::new(max_idx, lengths);\n\n hasher.do_hash_cycle();\n\n hasher.rotate_to_start_idx();\n\n hasher.ring[0] as usize * hasher.ring[1] as usize\n\n}\n\n\n", "file_path": "2017/src/day10.rs", "rank": 19, "score": 352669.7279940273 }, { "content": "fn parse_connections(input: Vec<&str>) -> HashMap<usize, HashSet<usize>> {\n\n let mut connections = HashMap::new();\n\n for line in input {\n\n let (source, destinations) = line.split_once(\" <-> \").unwrap();\n\n let source: usize = source.parse().unwrap();\n\n let destinations: HashSet<usize> = destinations\n\n .split(\", \")\n\n .map(|n| n.parse().unwrap())\n\n .collect();\n\n connections.insert(source, destinations);\n\n }\n\n connections\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::parse;\n\n\n\n const EXAMPLE1: &str = \"\\\n", "file_path": "2017/src/day12.rs", "rank": 20, "score": 350227.89598060306 }, { "content": "fn shortest_route_length(input: Vec<&str>) -> usize {\n\n route_lengths(input).into_iter().min().unwrap()\n\n}\n", "file_path": "2015/src/day09.rs", "rank": 21, "score": 347092.09982168314 }, { "content": "fn count_unescaping_overhead(input: Vec<&str>) -> usize {\n\n let (orig_escaped_count, unescaped_count) = input\n\n .iter()\n\n .map(|s| get_unescaped_counts(s))\n\n .reduce(|a, b| (a.0 + b.0, a.1 + b.1))\n\n .unwrap();\n\n orig_escaped_count - unescaped_count\n\n}\n\n\n", "file_path": "2015/src/day08.rs", "rank": 22, "score": 347092.0998216831 }, { "content": "fn longest_route_length(input: Vec<&str>) -> usize {\n\n route_lengths(input).into_iter().max().unwrap()\n\n}\n", "file_path": "2015/src/day09.rs", "rank": 23, "score": 347092.09982168314 }, { "content": "fn total_ribbon_needed(input: Vec<&str>) -> usize {\n\n input.iter().map(|s| Box::from(s).ribbon_needed()).sum()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn part1_examples() {\n\n assert_eq!(58, Box::from(\"2x3x4\").wrapping_paper_needed());\n\n assert_eq!(43, Box::from(\"1x1x10\").wrapping_paper_needed());\n\n }\n\n\n\n #[test]\n\n fn part1() {\n\n assert_eq!(1588178, day02_part1());\n\n }\n\n\n\n #[test]\n", "file_path": "2015/src/day02.rs", "rank": 24, "score": 347092.0998216831 }, { "content": "fn count_mul_instructions(input: Vec<&str>) -> usize {\n\n let instr = input.into_iter().map(Instr::from).collect::<Vec<_>>();\n\n let mut program = Program::new(0, &instr);\n\n let mut mul_count = 0;\n\n loop {\n\n let instr = program.instruction();\n\n if matches!(instr, Some(Instr::Mul(_, _))) {\n\n mul_count += 1;\n\n }\n\n match program.step() {\n\n Running => {}\n\n SentOutput(_) => (),\n\n AwaitingInput | Terminated => {\n\n return mul_count;\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "2017/src/day23.rs", "rank": 25, "score": 347092.0998216831 }, { "content": "fn count_escaping_overhead(input: Vec<&str>) -> usize {\n\n let (orig_unescaped_count, escaped_count) = input\n\n .iter()\n\n .map(|s| get_escaped_counts(s))\n\n .reduce(|a, b| (a.0 + b.0, a.1 + b.1))\n\n .unwrap();\n\n escaped_count - orig_unescaped_count\n\n}\n\n\n", "file_path": "2015/src/day08.rs", "rank": 26, "score": 347092.0998216831 }, { "content": "fn count_total_groups(input: Vec<&str>) -> usize {\n\n count_group0_members_or_total_groups(input, false)\n\n}\n\n\n", "file_path": "2017/src/day12.rs", "rank": 27, "score": 347092.0998216831 }, { "content": "fn delay_to_get_through_safely(input: Vec<&str>) -> usize {\n\n // We only care about firewalls when they're scanning the top layer, where packets travel\n\n // These are the periods where their scanner is at the top\n\n let firewall_periods: Vec<Option<usize>> = parse_firewalls(input)\n\n .into_iter()\n\n .map(|fw| fw.map(|firewall| (firewall.range - 1) * 2))\n\n .collect();\n\n\n\n let mut packet_depths: VecDeque<Option<usize>> = VecDeque::new();\n\n for starting_delay in 0..usize::MAX {\n\n // Add a new packet, which also moves the traveling packets along\n\n packet_depths.push_front(Some(starting_delay));\n\n\n\n // Check if a packet made it to the other side\n\n if let Some(Some(winners_delay)) = packet_depths.get(firewall_periods.len()) {\n\n return *winners_delay;\n\n }\n\n\n\n // Remove any packets scanned by a firewall\n\n for (depth, delay) in packet_depths\n", "file_path": "2017/src/day13.rs", "rank": 28, "score": 347092.0998216831 }, { "content": "fn count_programs_in_group0(input: Vec<&str>) -> usize {\n\n count_group0_members_or_total_groups(input, true)\n\n}\n\n\n", "file_path": "2017/src/day12.rs", "rank": 29, "score": 347092.0998216831 }, { "content": "fn pixels_after_n_iterations(input: Vec<&str>, n: usize) -> usize {\n\n let rules: Vec<Rule> = input.iter().map(Rule::from).collect();\n\n let pattern = Pattern::from(STARTING_PATTERN);\n\n let mut cache: HashMap<(Pattern, usize), usize> = HashMap::new();\n\n iterate(n, &rules, pattern, &mut cache)\n\n}\n\n\n", "file_path": "2017/src/day21.rs", "rank": 30, "score": 346299.85733393684 }, { "content": "fn parse(input: &str) -> Vec<&str> {\n\n input.trim().lines().collect()\n\n}\n\n\n\nmod day01;\n\nuse crate::day01::{day01_part1, day01_part2};\n", "file_path": "2016/src/main.rs", "rank": 31, "score": 342479.96086981345 }, { "content": "fn parse(input: &str) -> Vec<&str> {\n\n input.lines().collect()\n\n}\n\n\n\nuse crate::day01::{day1_part1, day1_part2};\n\nmod day01;\n", "file_path": "2018/src/main.rs", "rank": 32, "score": 342479.96086981345 }, { "content": "fn parse(input: &str) -> Vec<&str> {\n\n input.lines().collect()\n\n}\n\n\n\nmod day01;\n\nuse crate::day01::{day1_part1, day1_part2};\n", "file_path": "2017/src/main.rs", "rank": 33, "score": 342479.96086981345 }, { "content": "fn parse(input: &str) -> Vec<&str> {\n\n input.lines().collect()\n\n}\n\n\n\n#[macro_use]\n\nextern crate lazy_static;\n\n\n\nuse crate::day01::day1_part2;\n\nmod day01;\n", "file_path": "2019/src/main.rs", "rank": 34, "score": 342479.9608698135 }, { "content": "fn parse(input: &str) -> Vec<&str> {\n\n input.trim().lines().collect()\n\n}\n\n\n\nmod day01;\n\nmod permutation;\n\nuse crate::day01::{day01_part1, day01_part2};\n", "file_path": "2015/src/main.rs", "rank": 35, "score": 342479.96086981345 }, { "content": "fn parse_module_types(input: &'static str) -> Vec<ModuleType> {\n\n let mut modules: Vec<_> = input.trim().lines().map(ModuleType::from).collect();\n\n // Initialize conjunction inputs\n\n let mut inputs_by_conjunction_name: HashMap<ModuleName, Vec<ModuleName>> = HashMap::new();\n\n modules.iter().for_each(|module| {\n\n module.outputs().iter().for_each(|output| {\n\n inputs_by_conjunction_name\n\n .entry(output)\n\n .or_default()\n\n .push(module.name());\n\n });\n\n });\n\n modules.iter_mut().for_each(|module| {\n\n if let Conjunction(name, inputs, ..) = module {\n\n *inputs = inputs_by_conjunction_name\n\n .remove(name)\n\n .unwrap_or_else(|| panic!(\"every conjunction has inputs, missing {name}\"))\n\n .into_iter()\n\n .map(|input| (input, Low))\n\n .collect();\n", "file_path": "2023/src/day20.rs", "rank": 36, "score": 341481.9489620256 }, { "content": "fn sum_of_numbers_without_red(input: Vec<&str>) -> isize {\n\n input.iter().map(|s| sum_without_red(s)).sum()\n\n}\n\n\n", "file_path": "2015/src/day12.rs", "rank": 37, "score": 341297.5172001405 }, { "content": "fn size_of_largest_finite_area(input: Vec<&str>) -> usize {\n\n let coords: Vec<_> = input.iter().map(Loc::from).collect();\n\n let (min, max) = (Loc::min(&coords), Loc::max(&coords));\n\n // println!(\"min = {}, max = {}\", min, max);\n\n\n\n // Calculate the manhattan distances from all locations within the min/max rectangle\n\n // to the closest coordinate. It will be None if it's equally close to multiple locations.\n\n let closest_coord_idx_by_loc = indices_of_closest_coordinate(&coords, &min, &max);\n\n // Remove coordinate indices of infinite areas\n\n let finite_area_coord_indices =\n\n remove_infinite_areas(coords, min, max, &closest_coord_idx_by_loc);\n\n // println!(\"Finite area coord indices = {:?}\", finite_area_coord_indices);\n\n\n\n let mut count_by_index: HashMap<Index, usize> = HashMap::new();\n\n closest_coord_idx_by_loc.iter().for_each(|(_, idx)| {\n\n if let Some(idx) = idx {\n\n if finite_area_coord_indices.contains(idx) {\n\n *count_by_index.entry(*idx).or_insert(0) += 1;\n\n }\n\n }\n\n });\n\n // println!(\"{:?}\", count_by_index);\n\n *count_by_index\n\n .iter()\n\n .max_by_key(|(_idx, count)| *count)\n\n .unwrap()\n\n .1\n\n}\n\n\n", "file_path": "2018/src/day06.rs", "rank": 38, "score": 340848.21101810416 }, { "content": "fn find_matching_memory_part2(input: Vec<&str>) -> usize {\n\n let analysis_result = analysis_result();\n\n for line in input {\n\n let (sue_number, memories) = extract_memories(line);\n\n if analysis_result.part2_matches(memories) {\n\n return sue_number;\n\n }\n\n }\n\n unreachable!()\n\n}\n\n\n", "file_path": "2015/src/day16.rs", "rank": 39, "score": 340848.2110181042 }, { "content": "fn count_possible_triangle_columns(input: Vec<&str>) -> usize {\n\n let input: Vec<Vec<usize>> = input\n\n .iter()\n\n .map(|line| {\n\n line.split_ascii_whitespace()\n\n .map(|n| n.parse().unwrap())\n\n .collect()\n\n })\n\n .collect();\n\n input\n\n .windows(3)\n\n .step_by(3)\n\n .map(|n| {\n\n // println!(\"{:?}\", n);\n\n (0..3)\n\n .into_iter()\n\n .filter(move |col| is_triangle(n[0][*col], n[1][*col], n[2][*col]))\n\n .count()\n\n })\n\n .sum()\n", "file_path": "2016/src/day03.rs", "rank": 40, "score": 340848.2110181042 }, { "content": "fn count_possible_triangle_rows(input: Vec<&str>) -> usize {\n\n input\n\n .iter()\n\n .map(|line| {\n\n line.split_ascii_whitespace()\n\n .map(|n| n.parse().unwrap())\n\n .collect::<Vec<_>>()\n\n })\n\n .filter(|s| is_triangle(s[0], s[1], s[2]))\n\n .count()\n\n}\n\n\n", "file_path": "2016/src/day03.rs", "rank": 41, "score": 340848.21101810416 }, { "content": "fn total_wrapping_paper_needed(input: Vec<&str>) -> usize {\n\n input\n\n .iter()\n\n .map(|s| Box::from(s).wrapping_paper_needed())\n\n .sum()\n\n}\n\n\n", "file_path": "2015/src/day02.rs", "rank": 42, "score": 340848.21101810416 }, { "content": "fn find_matching_memory_part1(input: Vec<&str>) -> usize {\n\n let analysis_result = analysis_result();\n\n for line in input {\n\n let (sue_number, memories) = extract_memories(line);\n\n if analysis_result.part1_matches(memories) {\n\n return sue_number;\n\n }\n\n }\n\n unreachable!()\n\n}\n\n\n", "file_path": "2015/src/day16.rs", "rank": 43, "score": 340848.2110181042 }, { "content": "fn number_of_particles_remaining_after_collisions(input: Vec<&str>) -> usize {\n\n let mut particles = parse_input(input);\n\n while particles.iter().any(Particle::is_decelerating)\n\n // This second condition is only for the part 2 example ;)\n\n || particles.len() == 4\n\n {\n\n let mut count_by_position = HashMap::new();\n\n particles.iter_mut().for_each(|particle| {\n\n particle.tick();\n\n *count_by_position.entry(particle.pos).or_insert(0) += 1;\n\n });\n\n for particle_pos in count_by_position\n\n .iter()\n\n .filter(|(_, count)| **count > 1)\n\n .map(|(pos, _)| pos)\n\n {\n\n while let Some(idx) = particles\n\n .iter()\n\n .position(|particle| particle.pos == *particle_pos)\n\n {\n\n particles.swap_remove(idx);\n\n }\n\n }\n\n }\n\n particles.len()\n\n}\n\n\n", "file_path": "2017/src/day20.rs", "rank": 44, "score": 340848.2110181042 }, { "content": "fn size_of_area_with_max_total_distance_to_all_coords(input: Vec<&str>, total: Distance) -> usize {\n\n let coords: Vec<_> = input.iter().map(Loc::from).collect();\n\n let (min, max) = (Loc::min(&coords), Loc::max(&coords));\n\n count_locations_with_sum_of_distances_to_all_cords_within_total(&coords, &min, &max, total)\n\n}\n\n\n", "file_path": "2018/src/day06.rs", "rank": 45, "score": 340461.13939591957 }, { "content": "fn parse_input(input: Vec<&str>) -> Vec<Vec<char>> {\n\n input.iter().map(|line| line.chars().collect()).collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::parse;\n\n\n\n const EXAMPLE: &str = \" | \\\n\n \\n | +--+ \\\n\n \\n A | C \\\n\n \\n F---|----E|--+ \\\n\n \\n | | | D \\\n\n \\n +B-+ +--+ \\\n\n \\n \";\n\n\n\n #[test]\n\n fn part1_example() {\n\n assert_eq!(\n", "file_path": "2017/src/day19.rs", "rank": 46, "score": 340065.54181322234 }, { "content": "fn parse_and_settle(input: &str) -> (Vec<Brick>, usize) {\n\n let bricks = parse(input);\n\n settle(bricks)\n\n}\n\n\n", "file_path": "2023/src/day22.rs", "rank": 47, "score": 339563.242047036 }, { "content": "fn parse_input(input: Vec<&str>) -> Vec<Particle> {\n\n input\n\n .iter()\n\n .map(|line| Particle::parse(line).unwrap())\n\n .collect()\n\n}\n\n\n", "file_path": "2017/src/day20.rs", "rank": 48, "score": 338182.38698442484 }, { "content": "fn parse_input(input: Vec<&str>) -> Vec<Component> {\n\n input\n\n .iter()\n\n .map(|s| {\n\n let (l, r) = s.split_once('/').unwrap();\n\n (l.parse().unwrap(), r.parse().unwrap())\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "2017/src/day24.rs", "rank": 49, "score": 338182.3869844249 }, { "content": "fn find_high_score_ignore_calories(input: Vec<&str>) -> isize {\n\n find_high_score(input, false)\n\n}\n\n\n", "file_path": "2015/src/day15.rs", "rank": 50, "score": 335413.74054813595 }, { "content": "fn find_high_score_fix_calories(input: Vec<&str>) -> isize {\n\n find_high_score(input, true)\n\n}\n\n\n\nconst TOTAL_AMOUNT: isize = 100;\n", "file_path": "2015/src/day15.rs", "rank": 51, "score": 335413.74054813595 }, { "content": "fn process_int_code_with_input(v: &mut Vec<isize>, input: isize) -> Option<isize> {\n\n let mut idx = 0;\n\n let mut output = None;\n\n while idx < v.len() {\n\n let s = to_5_digit_string_padded_with_leading_zeroes(v[idx]);\n\n let code = to_num(&s[(s.len() - 2)..s.len()]);\n\n let op = Op::from_code(code);\n\n let modes = extract_modes(&s);\n\n // let pre = format!(\"{:?}: {:?}\", s, op);\n\n match op {\n\n Add | Multiply | LessThan | Equals => {\n\n let p1 = param_value(v, idx + 1, &modes[0]);\n\n let p2 = param_value(v, idx + 2, &modes[1]);\n\n let res = match op {\n\n Add => p1 + p2,\n\n Multiply => p1 * p2,\n\n LessThan => eval(p1 < p2),\n\n Equals => eval(p1 == p2),\n\n _ => unreachable!(),\n\n };\n", "file_path": "2019/src/day05.rs", "rank": 52, "score": 335252.1460130362 }, { "content": "fn index_of_particle_staying_closest_to_origin(input: Vec<&str>) -> usize {\n\n let particles = parse_input(input);\n\n // The particle that will stay closest to the origin is the one with the smallest acceleration\n\n particles\n\n .iter()\n\n .enumerate()\n\n .min_by_key(|(_, p)| p.acc.abs())\n\n .unwrap()\n\n .0\n\n}\n\n\n", "file_path": "2017/src/day20.rs", "rank": 53, "score": 334977.87466181617 }, { "content": "fn number_of_times_program_1_sent_a_value(input: Vec<&str>) -> usize {\n\n let instr = input.into_iter().map(Instr::from).collect::<Vec<_>>();\n\n let mut programs = [Program::new(0, &instr), Program::new(1, &instr)];\n\n let mut send_count = [0, 0];\n\n let mut is_blocked = [false, false];\n\n while !(is_blocked[0] && is_blocked[1]) {\n\n for id in 0..=1 {\n\n match programs[id].step() {\n\n Running => {}\n\n SentOutput(value) => {\n\n send_count[id] += 1;\n\n programs[(id + 1) % 2].receive(value);\n\n }\n\n AwaitingInput | Terminated => {\n\n is_blocked[id] = true;\n\n }\n\n }\n\n }\n\n }\n\n send_count[1]\n", "file_path": "2017/src/day18.rs", "rank": 54, "score": 334977.87466181617 }, { "content": "fn steps_to_reach_the_exit(mut jump_offsets: Vec<isize>, use_part2_rule: bool) -> usize {\n\n let mut counter = 0;\n\n let mut ptr = 0isize;\n\n while 0 <= ptr && (ptr as usize) < jump_offsets.len() {\n\n let offset = jump_offsets[ptr as usize];\n\n if use_part2_rule && offset >= 3 {\n\n jump_offsets[ptr as usize] -= 1;\n\n } else {\n\n jump_offsets[ptr as usize] += 1;\n\n }\n\n ptr += offset;\n\n counter += 1;\n\n }\n\n counter\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "2017/src/day05.rs", "rank": 55, "score": 332808.84208154783 }, { "content": "fn to_vec(input: &str) -> Vec<Entry> {\n\n input.split(' ').map(|s| s.parse().unwrap()).collect()\n\n}\n\n\n", "file_path": "2018/src/day08.rs", "rank": 56, "score": 330740.0742874202 }, { "content": "fn discs_from(input: Vec<&str>) -> Vec<Disc> {\n\n input.into_iter().map(Disc::from).collect()\n\n}\n\n\n", "file_path": "2016/src/day15.rs", "rank": 57, "score": 329750.4619563146 }, { "content": "fn parse(input: &str) -> Vec<Vec<char>> {\n\n input.trim().lines().map(|s| s.chars().collect()).collect()\n\n}\n\n\n\nuse ChunkType::*;\n", "file_path": "2021/src/day10.rs", "rank": 58, "score": 329750.4619563146 }, { "content": "fn parse(input: &str) -> Vec<Vec<u8>> {\n\n input\n\n .trim()\n\n .lines()\n\n .map(|s| s.bytes().collect::<Vec<_>>())\n\n .collect()\n\n}\n\n\n", "file_path": "2022/src/day03.rs", "rank": 59, "score": 329750.4619563146 }, { "content": "fn infections_after_bursts_part2(input: Vec<&str>, burst_count: usize) -> usize {\n\n infections_after_bursts(input, burst_count, Part::Two)\n\n}\n\n\n", "file_path": "2017/src/day22.rs", "rank": 60, "score": 328955.0684712877 }, { "content": "fn infections_after_bursts_part1(input: Vec<&str>, burst_count: usize) -> usize {\n\n infections_after_bursts(input, burst_count, Part::One)\n\n}\n\n\n", "file_path": "2017/src/day22.rs", "rank": 61, "score": 328955.0684712877 }, { "content": "/// find factors f[i] such that all values are the same, where value[i] = f[i] * base[i] - offset[i]\n\nfn find_factors(base: Vec<usize>, offset: Vec<isize>) -> (Vec<usize>, usize) {\n\n assert_eq!(base.len(), offset.len());\n\n let len = base.len();\n\n let mut factor = vec![1; len];\n\n let mut value = vec![0; len];\n\n for i in 0..len {\n\n // Make sure value never becomes negative (stays usize)\n\n while offset[i] > (factor[i] * base[i]) as isize {\n\n // println!(\n\n // \"factor * base < offset: {} * {} < {}\",\n\n // factor[i], base[i], offset[i]\n\n // );\n\n factor[i] += 1;\n\n }\n\n value[i] = (((factor[i] * base[i]) as isize) - offset[i]) as usize;\n\n }\n\n while !value.iter().all(|v| v == &value[0]) {\n\n // increase the factor of the smallest value and check again\n\n let (index_of_min, _) = value\n\n .iter()\n\n .enumerate()\n\n .min_by(|(_, v1), (_, v2)| v1.cmp(v2))\n\n .unwrap();\n\n factor[index_of_min] += 1;\n\n value[index_of_min] = (((factor[index_of_min] * base[index_of_min]) as isize)\n\n - offset[index_of_min]) as usize;\n\n }\n\n (factor, value[0])\n\n}\n", "file_path": "2020/src/day13.rs", "rank": 62, "score": 327062.8468385803 }, { "content": "fn parse_input(input: &[&str]) -> (Vec<Sample>, Vec<Instruction>) {\n\n // samples are divided by single empty lines from each other\n\n let parts = input.split(|line| line.is_empty());\n\n // after the samples there are 3 empty lines, so the parts from above\n\n // will be contain two empty slices between samples and program\n\n let samples: Vec<Sample> = parts\n\n .clone()\n\n .take_while(|slice| !slice.is_empty())\n\n .map(|sample| parse_sample(sample))\n\n .collect();\n\n let instructions: &[&str] = parts.skip_while(|slice| !slice.is_empty()).nth(2).unwrap();\n\n let program = parse_program(instructions);\n\n (samples, program)\n\n}\n\n\n", "file_path": "2018/src/day16.rs", "rank": 63, "score": 326805.2297037762 }, { "content": "fn shortest_path(input: Vec<&str>, part: Part) -> usize {\n\n let maze = Maze::from(input);\n\n maze.length_of_shortest_path_from_start_to_end(part)\n\n}\n\n\n", "file_path": "2019/src/day20.rs", "rank": 64, "score": 326535.6816166745 }, { "content": "fn parse_input(input: &str) -> Vec<bool> {\n\n input.chars().map(|c| c == '^').collect()\n\n}\n\n\n", "file_path": "2016/src/day18.rs", "rank": 65, "score": 325281.30004883406 }, { "content": "fn parse_reindeer(input: Vec<&str>) -> Vec<Reindeer> {\n\n let reindeer: Vec<_> = input.iter().map(Reindeer::from).collect();\n\n reindeer\n\n}\n\n\n", "file_path": "2015/src/day14.rs", "rank": 66, "score": 324590.68871977454 }, { "content": "fn parse_operations(input: Vec<&str>) -> Vec<Op> {\n\n input.into_iter().map(Op::from).collect()\n\n}\n\n\n", "file_path": "2016/src/day21.rs", "rank": 67, "score": 324590.68871977454 }, { "content": "fn parse_ingredients(input: Vec<&str>) -> Vec<Ingredient> {\n\n input.into_iter().map(Ingredient::from).collect()\n\n}\n\n\n", "file_path": "2015/src/day15.rs", "rank": 68, "score": 324590.68871977454 }, { "content": "fn parse_instructions(input: Vec<&str>) -> Vec<Instruction> {\n\n input.into_iter().map(Instruction::from).collect()\n\n}\n\n\n", "file_path": "2015/src/day23.rs", "rank": 69, "score": 324590.68871977454 }, { "content": "fn parse_weights(input: Vec<&str>) -> Vec<Weight> {\n\n input.into_iter().map(|l| l.parse().unwrap()).collect()\n\n}\n\n\n", "file_path": "2015/src/day24.rs", "rank": 70, "score": 324590.68871977454 }, { "content": "fn get_offsets_and_bases_as_separate_vecs(input: &[String]) -> (Vec<usize>, Vec<usize>) {\n\n input[1]\n\n .split(',')\n\n .enumerate()\n\n .filter_map(|(idx, id)| {\n\n // get rid of the 'x' entries\n\n if let Ok(id) = id.parse::<usize>() {\n\n Some((idx, id))\n\n } else {\n\n None\n\n }\n\n })\n\n .unzip()\n\n}\n\n\n\n#[allow(unused)]\n", "file_path": "2020/src/day13.rs", "rank": 71, "score": 324513.5521031311 }, { "content": "fn bathroom_code(input: Vec<&str>, numpad_type: NumPadType) -> String {\n\n let dirs: Vec<Vec<_>> = input\n\n .iter()\n\n .map(|line| line.chars().map(Dir::from).collect())\n\n .collect();\n\n let mut numpad = NumPad::default();\n\n let mut code: Vec<char> = vec![];\n\n for pattern in dirs {\n\n match numpad_type {\n\n NumPadType::Simple => numpad.apply_simple_pattern(pattern),\n\n NumPadType::Complex => numpad.apply_complex_pattern(pattern),\n\n }\n\n code.push(numpad.to_char());\n\n }\n\n\n\n code.iter().collect()\n\n}\n\n\n", "file_path": "2016/src/day02.rs", "rank": 72, "score": 324064.4528643942 }, { "content": "fn count_used_cells(input: &str) -> usize {\n\n let grid = generate_grid(input);\n\n grid.iter()\n\n .map(|row| row.iter().filter(|&s| s == &State::Used).count())\n\n .sum()\n\n}\n\n\n", "file_path": "2017/src/day14.rs", "rank": 73, "score": 323508.9439927166 }, { "content": "fn shuffle_deck(size: usize, input: Vec<&str>) -> FullDeck {\n\n let mut deck: Vec<_> = (0..size).into_iter().collect();\n\n let techniques = parse_shuffle_techniques(input);\n\n deck.shuffle_with(&techniques);\n\n deck\n\n}\n\n\n", "file_path": "2019/src/day22.rs", "rank": 74, "score": 320665.3452603865 }, { "content": "fn parse(input: &str) -> impl Iterator<Item = Grid> + '_ {\n\n input.trim().split(\"\\n\\n\").map(Grid::from)\n\n}\n\n\n\nimpl Grid {\n\n fn score(&self) -> usize {\n\n let rows = self.rows_of_reflection();\n\n let columns = self.columns_of_reflection();\n\n Grid::score_rows_and_columns(rows, columns)\n\n }\n\n fn rows_of_reflection(&self) -> BTreeSet<usize> {\n\n self.indices_where(Grid::rows_are_reflected, self.height())\n\n }\n\n fn columns_of_reflection(&self) -> BTreeSet<usize> {\n\n self.indices_where(Grid::columns_are_reflected, self.width())\n\n }\n\n fn score_rows_and_columns(rows: BTreeSet<usize>, columns: BTreeSet<usize>) -> usize {\n\n match (rows.len(), columns.len()) {\n\n (1, 0) => 100 * *rows.first().unwrap(),\n\n (0, 1) => *columns.first().unwrap(),\n", "file_path": "2023/src/day13.rs", "rank": 75, "score": 319991.3695000912 }, { "content": "fn parse(input: &str) -> impl Iterator<Item = Hailstone> + '_ {\n\n input.trim().lines().map(Hailstone::from)\n\n}\n\n\n", "file_path": "2023/src/day24.rs", "rank": 76, "score": 319991.3695000912 }, { "content": "fn parse_rules(input: Vec<&str>) -> Vec<IpRange> {\n\n input\n\n .into_iter()\n\n .map(|s| {\n\n let (from, to) = s.split_once('-').unwrap();\n\n from.parse().unwrap()..=to.parse().unwrap()\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "2016/src/day20.rs", "rank": 77, "score": 319725.45182769693 }, { "content": "fn split_into_guard_shifts(input: &[&str]) -> Vec<Vec<String>> {\n\n let mut sorted = input.to_vec();\n\n sorted.sort();\n\n let joined = sorted.join(\"\\n\");\n\n joined\n\n .split(\"Guard #\")\n\n .skip(1)\n\n .map(|shift| shift.split('\\n').map(str::to_string).collect())\n\n .collect()\n\n}\n\n\n", "file_path": "2018/src/day04.rs", "rank": 78, "score": 319725.45182769693 }, { "content": "fn parse(input: &str) -> impl Iterator<Item = &str> {\n\n input.trim().split(',')\n\n}\n\n\n", "file_path": "2023/src/day15.rs", "rank": 79, "score": 319238.5741625251 }, { "content": "fn parse(input: &str) -> Vec<char> {\n\n input.trim().chars().collect()\n\n}\n\n\n", "file_path": "2022/src/day06.rs", "rank": 80, "score": 317534.34052235505 }, { "content": "fn parse(input: &str) -> Vec<Brick> {\n\n input.trim().lines().map(Brick::from).collect()\n\n}\n\n\n\nimpl Brick {\n\n #[cfg(test)]\n\n fn single(x: Coord, y: Coord, z: Coord) -> Self {\n\n Brick {\n\n top: Position { x, y, z },\n\n bottom: Position { x, y, z },\n\n }\n\n }\n\n #[cfg(test)]\n\n fn new(x1: Coord, y1: Coord, z1: Coord, x2: Coord, y2: Coord, z2: Coord) -> Self {\n\n if x1 == x2 && (y1 == y2 || z1 == z2) || y1 == y2 && z1 == z2 {\n\n let p1 = Position::new(x1, y1, z1);\n\n let p2 = Position::new(x2, y2, z2);\n\n let (top, bottom) = match p1.z > p2.z {\n\n true => (p1, p2),\n\n false => (p2, p1),\n", "file_path": "2023/src/day22.rs", "rank": 81, "score": 317534.34052235505 }, { "content": "fn parse(input: &str) -> Vec<Voxel> {\n\n input.trim().lines().map(Voxel::from).collect()\n\n}\n\n\n", "file_path": "2022/src/day18.rs", "rank": 82, "score": 317534.34052235505 }, { "content": "fn parse(input: &str) -> Vec<Line> {\n\n input.trim().lines().map(Line::from).collect()\n\n}\n\n\n", "file_path": "2021/src/day05.rs", "rank": 83, "score": 317534.34052235505 }, { "content": "fn parse(input: &str) -> Vec<Signal> {\n\n input\n\n .trim()\n\n .lines()\n\n .map(|s| {\n\n let (inputs, outputs) = s.split_once(\" | \").unwrap();\n\n let inputs = inputs\n\n .split_ascii_whitespace()\n\n .map(|s| s.chars().collect())\n\n .collect();\n\n let outputs = outputs\n\n .split_ascii_whitespace()\n\n .map(|s| s.chars().collect())\n\n .collect();\n\n (inputs, outputs)\n\n })\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "2021/src/day08.rs", "rank": 84, "score": 317534.34052235505 }, { "content": "fn to_points(input: &[&str]) -> Vec<Point> {\n\n input\n\n .iter()\n\n .map(|line| Point::parse(line).unwrap())\n\n .collect()\n\n}\n\n\n", "file_path": "2018/src/day10.rs", "rank": 85, "score": 317534.34052235505 }, { "content": "fn parse(input: &str) -> Vec<Monkey> {\n\n input.trim().split(\"\\n\\n\").map(Monkey::from).collect()\n\n}\n\n\n", "file_path": "2022/src/day11.rs", "rank": 86, "score": 317534.34052235505 }, { "content": "fn parse(input: &str) -> Vec<Race> {\n\n let (times, distances) = input.trim().split_once('\\n').expect(\"2 lines\");\n\n let times: Vec<_> = times\n\n .strip_prefix(\"Time:\")\n\n .expect(\"'Time:' prefix\")\n\n .trim()\n\n .split_ascii_whitespace()\n\n .filter_map(|n| n.parse().ok())\n\n .collect();\n\n let distances: Vec<_> = distances\n\n .strip_prefix(\"Distance:\")\n\n .expect(\"'Distance:' prefix\")\n\n .trim()\n\n .split_ascii_whitespace()\n\n .filter_map(|n| n.parse().ok())\n\n .collect();\n\n\n\n times\n\n .into_iter()\n\n .zip(distances)\n\n .map(|(total_time, distance)| Race {\n\n total_time,\n\n distance,\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "2023/src/day06.rs", "rank": 87, "score": 317534.34052235505 }, { "content": "fn parse(input: &str) -> Vec<Command> {\n\n input.trim().lines().map(Command::from).collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const EXAMPLE: &str = \"\\\n\nforward 5\n\ndown 5\n\nforward 8\n\nup 3\n\ndown 8\n\nforward 2\";\n\n\n\n #[test]\n\n fn example1() {\n\n let commands = parse(EXAMPLE);\n\n assert_eq!(150, follow_part1_commands(commands));\n", "file_path": "2021/src/day02.rs", "rank": 88, "score": 317534.34052235505 }, { "content": "fn parse(input: &str) -> Vec<Op> {\n\n input.trim().lines().map(Op::from).collect()\n\n}\n\n\n", "file_path": "2022/src/day10.rs", "rank": 89, "score": 317534.34052235505 }, { "content": "fn parse(input: &str) -> Vec<Instructions> {\n\n input.trim().lines().map(Instructions::from).collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const EXAMPLE: &str = \"\\\n\nA Y\n\nB X\n\nC Z\";\n\n\n\n #[test]\n\n fn example1() {\n\n let rounds = parse(EXAMPLE);\n\n assert_eq!(15, follow_part1_rounds(rounds));\n\n }\n\n\n\n #[test]\n", "file_path": "2022/src/day02.rs", "rank": 90, "score": 317534.34052235505 }, { "content": "fn parse(input: &str) -> Vec<Command> {\n\n input.trim().lines().map(Command::from).collect()\n\n}\n\n\n", "file_path": "2022/src/day09.rs", "rank": 91, "score": 317534.34052235505 }, { "content": "fn parse(input: &str) -> Vec<Direction> {\n\n input\n\n .trim()\n\n .chars()\n\n .map(|c| if c == '<' { Left } else { Right })\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const EXAMPLE: &str = \">>><<><>><<<>><>>><<<>>><<<><<<>><>><<>>\";\n\n\n\n #[test]\n\n fn part1_example() {\n\n let directions = parse(EXAMPLE);\n\n assert_eq!(3_068, tower_height(directions, P1_ROUNDS));\n\n }\n\n #[test]\n", "file_path": "2022/src/day17.rs", "rank": 92, "score": 317534.34052235505 }, { "content": "fn solve_captcha(mut numbers: Vec<u32>, offset: usize) -> u32 {\n\n numbers.extend_from_within(0..offset);\n\n\n\n numbers\n\n .iter()\n\n .zip(numbers.iter().skip(offset))\n\n .filter_map(|(a, b)| {\n\n // println!(\"{} <> {}\", a, b);\n\n if a == b {\n\n Some(a)\n\n } else {\n\n None\n\n }\n\n })\n\n .sum::<u32>()\n\n}\n\n\n", "file_path": "2017/src/day01.rs", "rank": 93, "score": 316845.47779589053 }, { "content": "fn count_regions_of_used_cells(input: &str) -> usize {\n\n let mut grid = generate_grid(input);\n\n let mut locations_to_visit = locations_to_visit(&mut grid);\n\n\n\n let mut unique_region_count = 0;\n\n while let Some((x, y)) = locations_to_visit.pop() {\n\n if grid[y][x] == State::Labeled {\n\n continue;\n\n }\n\n unique_region_count += 1;\n\n let mut this_region = vec![(x, y)];\n\n while let Some((x, y)) = this_region.pop() {\n\n grid[y][x] = State::Labeled;\n\n safe_adjacent_neighbors(x, y)\n\n .into_iter()\n\n .filter(|(x, y)| grid[*y][*x] == State::Used)\n\n .for_each(|(x, y)| this_region.push((x, y)));\n\n }\n\n }\n\n unique_region_count\n\n}\n\n\n", "file_path": "2017/src/day14.rs", "rank": 94, "score": 316840.41144118615 }, { "content": "fn id_of_non_overlapping_claim(input: &[&str]) -> usize {\n\n let claims: Vec<_> = input.iter().map(Claim::from).collect();\n\n let count_by_coordinate = get_counts_by_coordinate(&claims);\n\n claims\n\n .iter()\n\n .find(|claim| {\n\n claim\n\n .to_coordinates()\n\n .iter()\n\n .all(|coord| count_by_coordinate.get(coord).unwrap() == &1)\n\n })\n\n .unwrap()\n\n .id\n\n}\n\n\n", "file_path": "2018/src/day03.rs", "rank": 95, "score": 316818.8297428762 }, { "content": "fn follow_path_and_return_letters_and_step_count(input: Vec<&str>) -> (String, usize) {\n\n let grid = parse_input(input);\n\n let mut y = 0;\n\n let mut x = grid[y].iter().position(|c| c == &'|').unwrap();\n\n let mut dir = Dir::South;\n\n let mut collected = vec![];\n\n let mut step_count = 0;\n\n let tile_at = |x, y| grid.get(y).and_then(|line: &Vec<char>| line.get(x));\n\n while let Some(tile) = tile_at(x, y) {\n\n // println!(\"({}, {}) {} {}\", x, y, dir, tile);\n\n match tile {\n\n '+' => match dir {\n\n Dir::South | Dir::North => {\n\n if let Some(' ' | '|') = tile_at(x + 1, y) {\n\n x -= 1;\n\n dir = Dir::West\n\n } else {\n\n x += 1;\n\n dir = Dir::East\n\n }\n", "file_path": "2017/src/day19.rs", "rank": 96, "score": 316702.48001766845 }, { "content": "fn find_high_score(input: Vec<&str>, fix_calories: bool) -> isize {\n\n let ingredients = parse_ingredients(input);\n\n let mut score = 0;\n\n for i0 in 0..=TOTAL_AMOUNT {\n\n if ingredients.len() == 2 {\n\n let i1 = TOTAL_AMOUNT - i0;\n\n let amounts = vec![i0, i1];\n\n score = score.max(calc_score(&ingredients, &amounts, fix_calories));\n\n } else {\n\n for i1 in 0..=(TOTAL_AMOUNT - i0) {\n\n for i2 in 0..=(TOTAL_AMOUNT - i0 - i1) {\n\n let i3 = TOTAL_AMOUNT - i0 - i1 - i2;\n\n let amounts = vec![i0, i1, i2, i3];\n\n score = score.max(calc_score(&ingredients, &amounts, fix_calories));\n\n }\n\n }\n\n }\n\n }\n\n score\n\n}\n\n\n\nconst TARGET_CALORIES: isize = 500;\n", "file_path": "2015/src/day15.rs", "rank": 97, "score": 315559.218611642 }, { "content": "fn generate_boolvec_of_length(input: &str, min_len: usize) -> Vec<bool> {\n\n let mut output = input.to_boolvec();\n\n while output.len() < min_len {\n\n output = lengthen_boolvec(output);\n\n }\n\n output\n\n}\n", "file_path": "2016/src/day16.rs", "rank": 98, "score": 315136.0122844686 }, { "content": "fn parse_shuffle_techniques(input: Vec<&str>) -> Vec<ShuffleTechnique> {\n\n input.into_iter().map(ShuffleTechnique::from).collect()\n\n}\n\n\n", "file_path": "2019/src/day22.rs", "rank": 99, "score": 315129.72151066153 } ]
Rust
src/update_status.rs
FutureTVGroup/update-broker
2d8468473b5b72a228a5bdae95f50789e0bbf3b4
use futures::Future; use futures::Stream; use inotify::wrapper::Event; use slog::Logger; use std::ffi::OsString; use std::io::Result; use std::io::{Error, ErrorKind}; use std::ops::Deref; use std::path::Path; use std::process::Command; use std::rc::Rc; use std::time::{SystemTime, UNIX_EPOCH}; use tokio_core::reactor::Handle; use tokio_inotify::AsyncINotify; use tokio_inotify::{IN_CREATE, IN_DELETE}; static VERSION_ZERO: &'static str = "0.0.0"; #[derive(Debug, PartialEq, Eq)] #[allow(dead_code)] pub enum UpdateStatus { Idle, CheckingForUpdate, UpdateAvailable, Downloading, Verifying, Finalizing, UpdatedNeedReboot, ReportingErrorEvent, } impl Deref for UpdateStatus { type Target = str; fn deref(&self) -> &'static str { match *self { UpdateStatus::Idle => &"UPDATE_STATUS_IDLE", UpdateStatus::CheckingForUpdate => &"UPDATE_STATUS_CHECKING_FOR_UPDATE", UpdateStatus::UpdateAvailable => &"UPDATE_STATUS_UPDATE_AVAILABLE", UpdateStatus::Downloading => &"UPDATE_STATUS_DOWNLOADING", UpdateStatus::Verifying => &"UPDATE_STATUS_VERIFYING", UpdateStatus::Finalizing => &"UPDATE_STATUS_FINALIZING", UpdateStatus::UpdatedNeedReboot => &"UPDATE_STATUS_UPDATED_NEED_REBOOT", UpdateStatus::ReportingErrorEvent => &"UPDATE_STATUS_REPORTING_ERROR_EVENT", } } } #[derive(Debug)] pub struct UpdateStatusIndication { pub last_checked_time: SystemTime, pub progress: f64, pub current_operation: UpdateStatus, pub new_version: String, pub new_size: i64, } impl UpdateStatusIndication { fn new(current_operation: UpdateStatus) -> UpdateStatusIndication { UpdateStatusIndication { last_checked_time: SystemTime::now(), progress: 0.0, current_operation: current_operation, new_version: UpdateStatusIndication::version(), new_size: 0, } } fn version() -> String { Command::new("lsb_release") .arg("-r") .arg("-s") .output() .and_then(|o| { String::from_utf8(o.stdout) .map(|mut s| { let len = s.trim_right().len(); s.truncate(len); s }).map_err(|_| Error::new(ErrorKind::InvalidData, "Invalid version string")) }).unwrap_or_else(|_| String::from(VERSION_ZERO)) } pub fn last_checked_time_millis(&self) -> i64 { let duration = self.last_checked_time.duration_since(UNIX_EPOCH).unwrap(); let nanos = duration.subsec_nanos() as u64; return ((1000 * 1000 * 1000 * duration.as_secs() + nanos) / (1000 * 1000)) as i64; } pub fn from_inotify_event(event: &Event) -> Option<UpdateStatusIndication> { let status: Option<UpdateStatus>; if event.is_create() { status = Option::Some(UpdateStatus::UpdatedNeedReboot); } else if event.is_delete() { status = Option::Some(UpdateStatus::Idle); } else { status = Option::None; } status.map(|s| UpdateStatusIndication::new(s)) } pub fn from_path(path: &Path) -> UpdateStatusIndication { let status: UpdateStatus; if path.exists() { status = UpdateStatus::UpdatedNeedReboot; } else { status = UpdateStatus::Idle; } UpdateStatusIndication::new(status) } } #[derive(Debug)] pub struct UpdateStatusNotifier(); pub trait UpdateStatusIndicationConsumer { fn status_changed(&self, status: UpdateStatusIndication) -> (); } impl UpdateStatusNotifier { fn add_watch(inotify: AsyncINotify, path: &Path) -> Result<AsyncINotify> { path.parent().map_or( Err(Error::new( ErrorKind::NotFound, "Invalid path to reboot sentinel file", )), |dir| { inotify .add_watch(dir, IN_CREATE | IN_DELETE) .map(|_| inotify) }, ) } fn get_file_name_os_string(path: &Path) -> Option<OsString> { path.file_name().map(|f| f.to_os_string()) } pub fn new_with_path_and_consumer( handle: &Handle, path: &Path, consumer: Box<UpdateStatusIndicationConsumer>, logger: Rc<Logger>, ) -> Result<Box<Future<Item = (), Error = Error>>> { if let Some(sentinel_file) = UpdateStatusNotifier::get_file_name_os_string(path) { AsyncINotify::init(handle) .and_then(|stream| UpdateStatusNotifier::add_watch(stream, path)) .map(|stream| { stream.filter(move |event: &Event| event.name.as_os_str() == sentinel_file) }).map(|stream| { stream .map(|ev| UpdateStatusIndication::from_inotify_event(&ev)) .map_err(move |e| { warn!(&logger, "Error handling watch. {:?}", e); e }) }).map(|stream| { return Box::new(stream.for_each(move |v| { if let Some(indication) = v { consumer.status_changed(indication) } Ok(()) })) as Box<Future<Item = (), Error = Error>>; }) } else { Err(Error::new( ErrorKind::NotFound, "Invalid path to reboot sentinel file", )) } } }
use futures::Future; use futures::Stream; use inotify::wrapper::Event; use slog::Logger; use std::ffi::OsString; use std::io::Result; use std::io::{Error, ErrorKind}; use std::ops::Deref; use std::path::Path; use std::process::Command; use std::rc::Rc; use std::time::{SystemTime, UNIX_EPOCH}; use tokio_core::reactor::Handle; use tokio_inotify::AsyncINotify; use tokio_inotify::{IN_CREATE, IN_DELETE}; static VERSION_ZERO: &'static str = "0.0.0"; #[derive(Debug, PartialEq, Eq)] #[allow(dead_code)] pub enum UpdateStatus { Idle, CheckingForUpdate, UpdateAvailable, Downloading, Verifying, Finalizing, UpdatedNeedReboot, ReportingErrorEvent, } impl Deref for UpdateStatus { type Target = str; fn deref(&self) -> &'static str { match *self { UpdateStatus::Idle => &"UPDATE_STATUS_IDLE", UpdateStatus::CheckingForUpdate => &"UPDATE_STATUS_CHECKING_FOR_UPDATE", UpdateStatus::UpdateAvailable => &"UPDATE_STATUS_UPDATE_AVAILABLE", UpdateStatus::Downloading => &"UPDATE_STATUS_DOWNLOADING", UpdateStatus::Verifying => &"UPDATE_STATUS_VERIFYING", UpdateStatus::Finalizing => &"UPDATE_STATUS_FINALIZING", UpdateStatus::UpdatedNeedReboot => &"UPDATE_STATUS_UPDATED_NEED_REBOOT", UpdateStatus::ReportingErrorEvent => &"UPDATE_STATUS_REPORTING_ERROR_EVENT", } } } #[derive(Debug)] pub struct UpdateStatusIndication { pub last_checked_time: SystemTime, pub progress: f64, pub current_operation: UpdateStatus, pub new_version: String, pub new_size: i64, } impl UpdateStatusIndication { fn new(current_operation: UpdateStatus) -> UpdateStatusIndication { UpdateStatusIndication { last_checked_time: SystemTime::now(), progress: 0.0, current_operation: current_operation, new_version: UpdateStatusIndication::version(), new_size: 0, } } fn version() -> String { Command::new("lsb_release") .arg("-r") .arg("-s") .output() .and_then(|o| { String::from_utf8(o.stdout) .map(|mut s| { let len = s.trim_right().len(); s.truncate(len); s }).map_err(|_| Error::new(ErrorKind::InvalidData, "Invalid version string")) }).unwrap_or_else(|_| String::from(VERSION_ZERO)) } pub fn last_checked_time_millis(&self) -> i64 { let duration = self.last_checked_time.duration_since(UNIX_EPOCH).unwrap(); let nanos = duration.subsec_nanos() as u64; return ((1000 * 1000 * 1000 * duration.as_secs() + nanos) / (1000 * 1000)) as i64; } pub fn from_inotify_event(event: &Event) -> Option<UpdateStatusIndication> { let status: Option<UpdateStatus>; if event.is_create() { status = Option::Some(UpdateStatus::UpdatedNeedReboot); } else if event.is_delete() { status = Option::Some(UpdateStatus::Idle); } else { status = Option::None; } status.map(|s| UpdateStatusIndication::new(s)) } pub fn from_path(path: &Path) -> UpdateStatusIndication { let status: UpdateStatus; if path.exists() { status = UpdateStatus::UpdatedNeedReboot; } else { status = UpdateStatus::Idle; } UpdateStatusIndication::new(status) } } #[derive(Debug)] pub struct UpdateStatusNotifier(); pub trait UpdateStatusIndicationConsumer { fn status_changed(&self, status: UpdateStatusIndication) -> (); } impl UpdateStatusNotifier { fn add_watch(inotify: AsyncINotify, path: &Path) -> Result<AsyncINotify> { path.parent().map_or( Err(Error::new( ErrorKind::NotFound, "Invalid path to reboot sentinel file", )), |dir| { inotify .add_watch(dir, IN_CREATE | IN_DELETE) .map(|_| inotify) }, ) } fn get_file_name_os_string(path: &Path) -> Option<OsString> { path.file_name().map(|f| f.to_os_string()) } pub fn new_with_path_and_consumer( handle: &Handle, path: &Path, consumer: Box<UpdateStatusIndicationConsumer
cINotify::init(handle) .and_then(|stream| UpdateStatusNotifier::add_watch(stream, path)) .map(|stream| { stream.filter(move |event: &Event| event.name.as_os_str() == sentinel_file) }).map(|stream| { stream .map(|ev| UpdateStatusIndication::from_inotify_event(&ev)) .map_err(move |e| { warn!(&logger, "Error handling watch. {:?}", e); e }) }).map(|stream| { return Box::new(stream.for_each(move |v| { if let Some(indication) = v { consumer.status_changed(indication) } Ok(()) })) as Box<Future<Item = (), Error = Error>>; }) } else { Err(Error::new( ErrorKind::NotFound, "Invalid path to reboot sentinel file", )) } } }
>, logger: Rc<Logger>, ) -> Result<Box<Future<Item = (), Error = Error>>> { if let Some(sentinel_file) = UpdateStatusNotifier::get_file_name_os_string(path) { Asyn
random
[ { "content": "pub fn engine(path: &Path, logger: Rc<Logger>) -> Result<(), IoError> {\n\n let owned_path = path.to_owned();\n\n let connection_r = Connection::get_private(BusType::System);\n\n if connection_r.is_err() {\n\n return connection_r.map(|_| ()).map_err(|e| {\n\n IoError::new(\n\n ErrorKind::Other,\n\n format!(\"Error creating connection. {:?}\", e),\n\n )\n\n });\n\n }\n\n let connection = Rc::new(connection_r.unwrap());\n\n let registration =\n\n connection.register_name(\"com.coreos.update1\", NameFlag::ReplaceExisting as u32);\n\n if registration.is_err() {\n\n return registration.map(|_| ()).map_err(|e| {\n\n IoError::new(ErrorKind::Other, format!(\"Error registering name. {:?}\", e))\n\n });\n\n }\n\n let f = AFactory::new_afn::<()>();\n", "file_path": "src/server.rs", "rank": 1, "score": 47151.60559156088 }, { "content": "fn main() {\n\n let yaml = load_yaml!(\"cli.yaml\");\n\n let matches = App::from_yaml(yaml).get_matches();\n\n let journal_drain = JournaldDrain.ignore_res();\n\n let root: Logger;\n\n\n\n if matches.is_present(\"debug\") {\n\n let decorator = slog_term::TermDecorator::new().build();\n\n let term_drain = slog_term::FullFormat::new(decorator).build().fuse();\n\n let term_drain = slog_async::Async::new(term_drain).build().fuse();\n\n root = Logger::root(Duplicate::new(term_drain, journal_drain).fuse(), o!());\n\n } else {\n\n root = Logger::root(journal_drain, o!());\n\n }\n\n let reboot_sentinel = Path::new(matches.value_of(\"file\").unwrap_or(SENTINEL_FILE));\n\n let logger = Rc::new(root);\n\n if let Err(e) = server::engine(&reboot_sentinel, logger.clone()) {\n\n error!(&logger, \"Startup failure. {:?}\", e);\n\n let timer = Timer::default();\n\n timer\n\n .sleep(time::Duration::from_millis(200))\n\n .wait()\n\n .unwrap();\n\n std::process::exit(1);\n\n } else {\n\n info!(&logger, \"Shutdown\");\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 2, "score": 25445.114427003205 }, { "content": "struct DBusUpdateIndicator {\n\n connection: Rc<Connection>,\n\n signal: Arc<Signal<ATree<()>>>,\n\n logger: Rc<Logger>,\n\n}\n\n\n\nimpl UpdateStatusIndicationConsumer for DBusUpdateIndicator {\n\n fn status_changed(&self, status: UpdateStatusIndication) {\n\n info!(&(self.logger), \"Broadcasting update status: {:?}\", status);\n\n self.connection\n\n .send(\n\n self.signal\n\n .msg(\n\n &\"/com/coreos/update1\".into(),\n\n &\"com.coreos.update1.Manager\".into(),\n\n ).append1(status.last_checked_time_millis())\n\n .append1(status.progress)\n\n .append2::<&str, &str>(&status.current_operation, &status.new_version)\n\n .append1(status.new_size),\n\n ).map(|_| ())\n\n .unwrap_or_else(|e| {\n\n warn!(&self.logger, \"Could not broadcast update signal. {:?}\", e);\n\n });\n\n }\n\n}\n\n\n", "file_path": "src/server.rs", "rank": 3, "score": 23306.0956531708 }, { "content": "use slog_journald::JournaldDrain;\n\n\n\nuse std::path::Path;\n\nuse std::rc::Rc;\n\nuse std::time;\n\n\n\nuse clap::App;\n\nuse futures::Future;\n\nuse tokio_timer::Timer;\n\nmod server;\n\nmod update_status;\n\n\n\nstatic SENTINEL_FILE: &'static str = \"/var/run/reboot-required\";\n\n\n", "file_path": "src/main.rs", "rank": 9, "score": 11.509209098256163 }, { "content": " let status = UpdateStatusIndication::from_path(&owned_path);\n\n debug!(\n\n &l,\n\n \"Sending update status to {:?}: {:?}\",\n\n m.msg.sender(),\n\n status\n\n );\n\n Ok(vec![\n\n m.msg\n\n .method_return()\n\n .append1(status.last_checked_time_millis())\n\n .append1(status.progress)\n\n .append2::<&str, &str>(\n\n &status.current_operation,\n\n &status.new_version,\n\n ).append1(status.new_size),\n\n ])\n\n }).outarg::<i64, _>(\"last_checked_time\")\n\n .outarg::<f64, _>(\"progress\")\n\n .outarg::<&str, _>(\"current_operation\")\n", "file_path": "src/server.rs", "rank": 11, "score": 10.079903376179479 }, { "content": "\n\n let signal = Arc::new(\n\n f.signal(\"StatusUpdate\", ())\n\n .sarg::<i64, _>(\"last_checked_time\")\n\n .sarg::<f64, _>(\"progress\")\n\n .sarg::<&str, _>(\"current_operation\")\n\n .sarg::<&str, _>(\"new_version\")\n\n .sarg::<i64, _>(\"new_size\"),\n\n );\n\n let l = logger.clone();\n\n let l2 = logger.clone();\n\n let l3 = logger.clone();\n\n let tree = f.tree(ATree::new()).add(\n\n f.object_path(\"/com/coreos/update1\", ())\n\n .introspectable()\n\n .add(\n\n f.interface(\"com.coreos.update1.Manager\", ())\n\n .add_s(signal.clone())\n\n .add_m(\n\n f.method(\"GetStatus\", (), move |m| {\n", "file_path": "src/server.rs", "rank": 12, "score": 7.83465721712668 }, { "content": "// This file is part of the upgrade broker\n\n// (c) 2017 FutureTV Production GmbH\n\nuse dbus::tree::{MethodErr, Signal};\n\nuse dbus::{BusType, Connection, NameFlag};\n\nuse dbus_tokio::tree::{AFactory, ATree, ATreeServer};\n\nuse dbus_tokio::AConnection;\n\nuse futures::future::Executor;\n\nuse futures::Future;\n\nuse futures::Stream;\n\nuse std::path::Path;\n\nuse std::result::Result;\n\nuse tokio_core::reactor::Core;\n\nuse tokio_signal::unix::{Signal as USignal, SIGTERM};\n\nuse tokio_timer::Timer;\n\n\n\nuse slog::Logger;\n\nuse std::io::{Error as IoError, ErrorKind};\n\nuse std::process;\n\nuse std::rc::Rc;\n\nuse std::sync::Arc;\n\nuse std::time;\n\n\n\nuse update_status::{UpdateStatusIndication, UpdateStatusIndicationConsumer, UpdateStatusNotifier};\n\n\n", "file_path": "src/server.rs", "rank": 15, "score": 5.428740419562988 }, { "content": " }\n\n let core_r = Core::new();\n\n if core_r.is_err() {\n\n return core_r.map(|_| ());\n\n }\n\n let mut core = Core::new().unwrap();\n\n let aconn = AConnection::new(connection.clone(), core.handle()).unwrap();\n\n let server = ATreeServer::new(connection.clone(), &tree, aconn.messages().unwrap());\n\n\n\n // Make the server run forever\n\n let server = server.for_each(|m| {\n\n debug!(&logger, \"Unhandled message: {:?}\", m);\n\n Ok(())\n\n });\n\n\n\n let notifier = UpdateStatusNotifier::new_with_path_and_consumer(\n\n &core.handle(),\n\n path,\n\n Box::new(DBusUpdateIndicator {\n\n connection: connection,\n", "file_path": "src/server.rs", "rank": 16, "score": 4.314145487245559 }, { "content": " signal: signal.clone(),\n\n logger: logger.clone(),\n\n }),\n\n logger.clone(),\n\n );\n\n\n\n if notifier.is_err() {\n\n return notifier.map(|_| ());\n\n }\n\n\n\n let l4 = logger.clone();\n\n info!(&logger, \"Monitoring {:?}\", path);\n\n let ex = core.execute(notifier.unwrap().map_err(move |e| {\n\n error!(&l4, \"File watch task exited. {:?}\", e);\n\n Timer::default()\n\n .sleep(time::Duration::from_millis(200))\n\n .wait()\n\n .unwrap();\n\n process::exit(1);\n\n }));\n", "file_path": "src/server.rs", "rank": 17, "score": 3.9234573897843457 }, { "content": "// This file is part of the upgrade broker\n\n// (c) 2017 FutureTV Production GmbH\n\n#[macro_use]\n\nextern crate clap;\n\n#[macro_use]\n\nextern crate slog;\n\nextern crate dbus;\n\nextern crate dbus_tokio;\n\nextern crate futures;\n\nextern crate inotify;\n\nextern crate libc;\n\nextern crate slog_async;\n\nextern crate slog_journald;\n\nextern crate slog_term;\n\nextern crate tokio_core;\n\nextern crate tokio_inotify;\n\nextern crate tokio_signal;\n\nextern crate tokio_timer;\n\n\n\nuse slog::*;\n", "file_path": "src/main.rs", "rank": 18, "score": 3.4647346971985575 }, { "content": " .outarg::<&str, _>(\"new_version\")\n\n .outarg::<i64, _>(\"new_size\"),\n\n ).add_m(f.method(\"AttemptUpdate\", (), move |_| {\n\n warn!(&l2, \"Ignoring attempt to call AttemptUpdate\");\n\n Err(MethodErr::failed(&\"Not implemented\".to_owned()))\n\n })).add_m(f.method(\"ResetStatus\", (), move |_| {\n\n warn!(&l3, \"Ignoring attempt to call ResetStatus\");\n\n Err(MethodErr::failed(&\"Not implemented\".to_owned()))\n\n })),\n\n ),\n\n );\n\n\n\n let registration2_r = tree.set_registered(&connection, true);\n\n if registration2_r.is_err() {\n\n return registration2_r.map_err(|e| {\n\n IoError::new(\n\n ErrorKind::Other,\n\n format!(\"Error registering D-Bus tree. {:?}\", e),\n\n )\n\n });\n", "file_path": "src/server.rs", "rank": 19, "score": 3.178167162770117 }, { "content": "# Update Broker: Using locksmithd on Ubuntu (and other Debian derived distributions)\n\n\n\n[![Build Status](https://dev.azure.com/glaux/update-broker/_apis/build/status/ngrewe.update-broker?branchName=master)](https://dev.azure.com/glaux/update-broker/_build/latest?definitionId=1&branchName=master)\n\n\n\n[locksmithd](https://github.com/coreos/locksmithd) is a very useful tool for coordinating reboots among a fleet of machines. By default, its use is limited to CoreOS Container Linux, where it integrates tightly with [update\\_engine](https://github.com/coreos/update_engine). **update-broker** is small daemon that allows the apt package manager to provide notifications that would usually be provided by update\\_engine. It works by monitoring the existence of the file `/var/run/reboot-required` and\n\nnotifying locksmithd when it is created.\n\n\n\n## Installation\n\n\n\n### From Packages\n\n\n\nFor Ubuntu 16.04 and 18.04 you can install update-broker and locksmithd from packages:\n\n\n\n```sh\n\nsudo apt-key adv --keyserver keyserver.ubuntu.com --recv AF0E925C4504784BF4E0FFF0C90E4BD2B36E75B9\n\necho \"deb https://dl.bintray.com/glaux/production $(lsb_release -s -c) main\" | sudo tee -a /etc/apt/sources.list.d/locksmithd.list\n\nsudo apt-get update\n\nsudo apt-get install locksmithd\n\n```\n\n\n\n### From Source\n\n\n\n#### Update Broker\n\n\n\nApart from a [reasonably recent Rust](https://rustup.rs/), Update Broker depends on libdbus and libsystemd.\n\n\n\n```sh\n\ncurl https://sh.rustup.rs -sSf | sh\n\nsudo apt-get install libsystemd-dev libdbus-1-dev\n\ngit clone https://github.com/FutureTVGroup/update-broker.git\n\ncd update-broker\n\ncargo build\n\nsudo cp target/release/update-broker /usr/local/sbin/\n\nsudo cp assets/com.futuretv-group.UpdateBroker.conf /etc/dbus-1/system.d/\n\ncat assets/update-broker.service| sed -e \"s%/usr/sbin/%/usr/local/sbin/%\" | sudo tee -a /etc/systemd/system/update-broker.service\n\nsudo systemctl enable update-broker\n\nsudo systemctl start update-broker\n\n```\n\n\n", "file_path": "README.md", "rank": 20, "score": 2.765650486554776 }, { "content": " if ex.is_err() {\n\n return ex.map(|_| ()).map_err(|e| {\n\n IoError::new(\n\n ErrorKind::Other,\n\n format!(\"Could not schedule inotify watcher. {:?}\", e),\n\n )\n\n });\n\n }\n\n let termination = USignal::new(SIGTERM, &core.handle())\n\n .flatten_stream()\n\n .take(1)\n\n .map_err(|_| ())\n\n .into_future()\n\n .select2(server.map_err(|_| ()));\n\n return core\n\n .run(termination)\n\n .map(|_| ())\n\n .map_err(|_| IoError::new(ErrorKind::Other, \"Error running server\"));\n\n // Ok(())\n\n}\n", "file_path": "src/server.rs", "rank": 21, "score": 2.5781879847102642 }, { "content": "#### Locksmithd\n\n\n\nLocksmithd has no dependencies apart from Go.\n\n\n\n```sh\n\nsudo apt-get install golang-any\n\ngit clone https://github.com/coreos/locksmith\n\ncd locksmith\n\nmake\n\nsudo cp bin/locksmithctl /usr/local/sbin/locksmithctl\n\nsudo ln -s /usr/local/sbin/locksmithctl /usr/local/sbin/locksmithd\n\ncat systemd/locksmithd.service| sed -e \"s%/usr/lib/locksmith/%/usr/local/sbin/%\" | sudo tee -a /etc/systemd/system/locksmithd.service\n\nsudo mkdir -p /etc/coreos/\n\necho \"REBOOT_STRATEGY=off\" | sudo tee -a /etc/coreos/update.conf\n\nsudo systemctl enable locksmithd\n\nsudo systemctl start locksmithd\n\n```\n\n\n\n## Configuration\n\n\n\nThere are no differences compared to [configuring locksmithd on CoreOS Container Linux](https://github.com/coreos/locksmith#configuration).\n", "file_path": "README.md", "rank": 22, "score": 0.8437611463800969 } ]
Rust
src/pwasm.rs
jakelang/libeci
a4add3d49ec631dbb9c7f65f50655f6ec3ca5f2e
/* * libeci: Ethereum WebAssembly ABI compliance library * * Copyright (c) 2018 Jake Lang * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ use parity_wasm::elements::{External, FunctionType, Internal, Module, Type}; pub fn func_type_by_index(module: &Module, index: usize) -> FunctionType { let function_section = module .function_section() .expect("No function section found"); let type_section = module.type_section().expect("No type section found"); let import_section_len: usize = match module.import_section() { Some(import) => import .entries() .iter() .filter(|entry| match entry.external() { &External::Function(_) => true, _ => false, }) .count(), None => 0, }; let function_index_in_section = index - import_section_len; let func_type_ref: usize = function_section.entries()[function_index_in_section].type_ref() as usize; match type_section.types()[func_type_ref] { Type::Function(ref func_type) => func_type.clone(), } } pub fn imported_func_type_by_index(module: &Module, index: usize) -> FunctionType { let import_section = module.import_section().expect("No function section found"); let type_section = module.type_section().expect("No type section found"); let func_type_ref: usize = match import_section.entries()[index].external() { &External::Function(idx) => idx as usize, _ => usize::max_value(), }; match type_section.types()[func_type_ref] { Type::Function(ref func_type) => func_type.clone(), } } pub fn resolve_export_by_name(module: &Module, name: &str) -> Option<(u32, Internal)> { if !has_export_section(module) { None } else { let idx: Option<(u32, Internal)> = match module .export_section() .unwrap() .entries() .iter() .find(|export| if export.field() == name { true } else { false }) { Some(export) => match *export.internal() { Internal::Function(index) => Some((index, Internal::Function(index))), Internal::Memory(index) => Some((index, Internal::Memory(index))), Internal::Global(index) => Some((index, Internal::Global(index))), Internal::Table(index) => Some((index, Internal::Table(index))), }, None => None, }; idx } } pub fn get_imports(module: &Module) -> Option<Vec<(&str, &str)>> { if !has_import_section(module) { return None; } else { let imports_list: Option<Vec<(&str, &str)>> = Some( module .import_section() .unwrap() .entries() .iter() .map(|x| (x.module(), x.field())) .collect(), ); imports_list } } pub fn has_export_section(module: &Module) -> bool { match module.export_section() { Some(_thing) => true, None => false, } } pub fn has_import_section(module: &Module) -> bool { match module.import_section() { Some(_thing) => true, None => false, } }
/* * libeci: Ethereum WebAssembly ABI compliance library * * Copyright (c) 2018 Jake Lang * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ use parity_wasm::elements::{External, FunctionType, Internal, Module, Type}; pub fn func_type_by_index(module: &Module, index: usize) -> FunctionType { let function_section = module .function_section() .expect("No function section found"); let type_section = module.type_section().expect("No type section found"); let import_section_len: usize = match module.import_section() { Some(import) => import .entries() .iter() .filter(|entry| match entry.external() { &External::Function(_) => true, _ => false, }) .count(), None => 0, }; let function_index_in_section = index - import_section_len; let func_type_ref: usize = function_section.entries()[function_index_in_section].type_ref() as usize; match type_section.types()[func_type_ref] { Type::Function(ref func_type) => func_type.clone(), } } pub fn imported_func_type_by_index(module: &Module, index: usize) -> FunctionType { let import_section = module.import_section().expect("No function section found"); let type_section = module.type_section().expect("No type section found"); let func_type_ref: usize = match import_section.entries()[index].external() { &External::Function(idx) => idx as usize, _ => usize::max_value(), }; match type_section.types()[func_type_ref] { Type::Function(ref func_type) => func_type.clone(), } }
pub fn get_imports(module: &Module) -> Option<Vec<(&str, &str)>> { if !has_import_section(module) { return None; } else { let imports_list: Option<Vec<(&str, &str)>> = Some( module .import_section() .unwrap() .entries() .iter() .map(|x| (x.module(), x.field())) .collect(), ); imports_list } } pub fn has_export_section(module: &Module) -> bool { match module.export_section() { Some(_thing) => true, None => false, } } pub fn has_import_section(module: &Module) -> bool { match module.import_section() { Some(_thing) => true, None => false, } }
pub fn resolve_export_by_name(module: &Module, name: &str) -> Option<(u32, Internal)> { if !has_export_section(module) { None } else { let idx: Option<(u32, Internal)> = match module .export_section() .unwrap() .entries() .iter() .find(|export| if export.field() == name { true } else { false }) { Some(export) => match *export.internal() { Internal::Function(index) => Some((index, Internal::Function(index))), Internal::Memory(index) => Some((index, Internal::Memory(index))), Internal::Global(index) => Some((index, Internal::Global(index))), Internal::Table(index) => Some((index, Internal::Table(index))), }, None => None, }; idx } }
function_block-full_function
[ { "content": "/// Utility function checking that a module has an exported function with a given signature.\n\npub fn has_func_export(module: &Module, name: &str, sig: FunctionType) -> CheckStatus {\n\n match resolve_export_by_name(module, name) {\n\n Some((index, reference)) => if reference == Internal::Function(index)\n\n && func_type_by_index(module, index as usize) == sig\n\n {\n\n CheckStatus::Good\n\n } else {\n\n CheckStatus::Malformed\n\n },\n\n None => CheckStatus::Nonexistent,\n\n }\n\n}\n\n\n", "file_path": "src/eci_std.rs", "rank": 4, "score": 68881.46348996017 }, { "content": "/// Checks that the module only imports functions from the \"ethereum\" namespace.\n\npub fn imports_only_eei_namespace(module: &Module) -> CheckStatus {\n\n let importlist = get_imports(module).unwrap();\n\n\n\n for (module, _field) in importlist {\n\n if module != \"ethereum\" {\n\n return CheckStatus::Malformed;\n\n }\n\n }\n\n CheckStatus::Good\n\n}\n\n\n", "file_path": "src/eci_std.rs", "rank": 5, "score": 66567.3360496527 }, { "content": "/// Ensures that a module has not incorrectly specified a start function.\n\npub fn chk_no_startfn(module: &Module) -> CheckStatus {\n\n match module.start_section() {\n\n Some(_thing) => CheckStatus::Malformed,\n\n None => CheckStatus::Good,\n\n }\n\n}\n\n\n", "file_path": "src/eci_std.rs", "rank": 8, "score": 54546.07311376465 }, { "content": "/// Verifies that the EEI has been imported with the correct function signatures.\n\npub fn chk_func_signatures(module: &Module) -> CheckStatus {\n\n if has_import_section(module) {\n\n eei_check_func_sigs(module)\n\n } else {\n\n CheckStatus::Good\n\n }\n\n}\n\n\n\n/*\n\n * Utilities\n\n */\n\n\n", "file_path": "src/eci_std.rs", "rank": 9, "score": 52686.42097414679 }, { "content": "/// Checks that the EEI host functions have been imported with the correct namespace.\n\npub fn chk_eei_namespace(module: &Module) -> CheckStatus {\n\n if has_import_section(module) {\n\n imports_only_eei_namespace(module)\n\n } else {\n\n CheckStatus::Good\n\n }\n\n}\n\n\n", "file_path": "src/eci_std.rs", "rank": 10, "score": 52686.3283506045 }, { "content": "/// Checks that the module's \"main\" function has been exported with no arguments or return values.\n\npub fn chk_main_exported(module: &Module) -> CheckStatus {\n\n has_func_export(module, \"main\", FunctionType::default())\n\n}\n\n\n", "file_path": "src/eci_std.rs", "rank": 11, "score": 52686.19875766224 }, { "content": "/// Checks that the module's memory segment has been properly exported.\n\npub fn chk_mem_exported(module: &Module) -> CheckStatus {\n\n match resolve_export_by_name(module, \"memory\") {\n\n Some((index, reference)) => if reference == Internal::Memory(index) {\n\n CheckStatus::Good\n\n } else {\n\n CheckStatus::Malformed\n\n },\n\n None => CheckStatus::Nonexistent,\n\n }\n\n}\n\n\n", "file_path": "src/eci_std.rs", "rank": 12, "score": 52683.84844078197 }, { "content": "/// Checks the imported function signatures against the EEI import list. Will fail if the module\n\n/// doesn't have an import section.\n\npub fn eei_check_func_sigs(module: &Module) -> CheckStatus {\n\n let eei = ImportInterfaceMap::default();\n\n\n\n module\n\n .import_section()\n\n .unwrap()\n\n .entries()\n\n .iter()\n\n .map(|x| (x.field(), x.external()))\n\n .map(|(name, binding)| {\n\n (\n\n eei.get_func(name),\n\n match *binding {\n\n External::Function(idx) => {\n\n Some(imported_func_type_by_index(module, idx as usize))\n\n }\n\n _ => None,\n\n },\n\n )\n\n })\n", "file_path": "src/eci_std.rs", "rank": 13, "score": 50972.425178366204 }, { "content": "## libeci is now obsoleted by [wasm-chisel](https://github.com/wasmx/wasm-chisel)\n\n\n\n# libeci\n\nlibeci is a small library written in Rust for verifying the properties of ewasm bytecode.\n\nInitially designed with the properties of the ewasm ECI (Ethereum Contract Interface) in mind, libeci can be easily expanded to support checking other properties of ewasm bytecode.\n\n\n\nlibeci is also intended, in the future, to be able to compile to WebAssembly and be called from a browser environment.\n\n\n\n[Documentation](https://jakelang.github.io/libeci)\n", "file_path": "README.md", "rank": 14, "score": 13476.290162683199 }, { "content": "/*\n\n * libeci: Ethereum WebAssembly ABI compliance library\n\n *\n\n * Copyright (c) 2018 Jake Lang\n\n *\n\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n\n * of this software and associated documentation files (the \"Software\"), to deal\n\n * in the Software without restriction, including without limitation the rights\n\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\n * copies of the Software, and to permit persons to whom the Software is\n\n * furnished to do so, subject to the following conditions:\n\n *\n\n * The above copyright notice and this permission notice shall be included in all\n\n * copies or substantial portions of the Software.\n\n *\n\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "src/eei.rs", "rank": 15, "score": 75.42022752099766 }, { "content": "/*\n\n * libeci: Ethereum WebAssembly ABI compliance library\n\n *\n\n * Copyright (c) 2018 Jake Lang\n\n *\n\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n\n * of this software and associated documentation files (the \"Software\"), to deal\n\n * in the Software without restriction, including without limitation the rights\n\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\n * copies of the Software, and to permit persons to whom the Software is\n\n * furnished to do so, subject to the following conditions:\n\n *\n\n * The above copyright notice and this permission notice shall be included in all\n\n * copies or substantial portions of the Software.\n\n *\n\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "src/checklist.rs", "rank": 16, "score": 75.42022752099767 }, { "content": "/*\n\n * libeci: Ethereum WebAssembly ABI compliance library\n\n *\n\n * Copyright (c) 2018 Jake Lang\n\n *\n\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n\n * of this software and associated documentation files (the \"Software\"), to deal\n\n * in the Software without restriction, including without limitation the rights\n\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\n * copies of the Software, and to permit persons to whom the Software is\n\n * furnished to do so, subject to the following conditions:\n\n *\n\n * The above copyright notice and this permission notice shall be included in all\n\n * copies or substantial portions of the Software.\n\n *\n\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "src/lib.rs", "rank": 17, "score": 75.42022752099766 }, { "content": "/*\n\n * libeci: Ethereum WebAssembly ABI compliance library\n\n *\n\n * Copyright (c) 2018 Jake Lang\n\n *\n\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n\n * of this software and associated documentation files (the \"Software\"), to deal\n\n * in the Software without restriction, including without limitation the rights\n\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\n * copies of the Software, and to permit persons to whom the Software is\n\n * furnished to do so, subject to the following conditions:\n\n *\n\n * The above copyright notice and this permission notice shall be included in all\n\n * copies or substantial portions of the Software.\n\n *\n\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "src/checker.rs", "rank": 18, "score": 75.42022752099766 }, { "content": "/*\n\n * libeci: Ethereum WebAssembly ABI compliance library\n\n *\n\n * Copyright (c) 2018 Jake Lang\n\n *\n\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n\n * of this software and associated documentation files (the \"Software\"), to deal\n\n * in the Software without restriction, including without limitation the rights\n\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\n * copies of the Software, and to permit persons to whom the Software is\n\n * furnished to do so, subject to the following conditions:\n\n *\n\n * The above copyright notice and this permission notice shall be included in all\n\n * copies or substantial portions of the Software.\n\n *\n\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "src/eci_std.rs", "rank": 19, "score": 75.42022752099767 }, { "content": " * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\n * SOFTWARE.\n\n */\n\n\n\nuse checklist::CheckStatus;\n\nuse eei::ImportInterfaceMap;\n\nuse parity_wasm::elements::{External, FunctionType, Internal, Module};\n\nuse pwasm::*;\n\n\n\n/// Checks that the module's \"main\" function has been exported with no arguments or return values.\n", "file_path": "src/eci_std.rs", "rank": 21, "score": 18.786743205243532 }, { "content": " * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\n * SOFTWARE.\n\n */\n\n\n\nuse parity_wasm::elements::{FunctionType, ValueType};\n\nuse std::collections::HashMap;\n\n\n\nstatic EEI_REV: i32 = 3;\n\n\n\n/// Struct mapping an imported function name to its expected signature.\n\npub struct ImportInterfaceMap {\n\n import_list: HashMap<String, FunctionType>,\n\n}\n\n\n\nimpl ImportInterfaceMap {\n\n pub fn new() -> Self {\n\n ImportInterfaceMap {\n\n import_list: HashMap::new(),\n\n }\n\n }\n", "file_path": "src/eei.rs", "rank": 22, "score": 17.145585450262903 }, { "content": "\n\n pub fn get_func(&self, name: &str) -> Option<FunctionType> {\n\n let result = self.import_list.get(&name.to_string());\n\n\n\n match result {\n\n Some(func) => Some(func.clone()),\n\n None => None,\n\n }\n\n }\n\n}\n\n\n\nimpl Default for ImportInterfaceMap {\n\n /// Constructs an ImportInterfaceMap containing the function signatures of the ewasm EEI.\n\n fn default() -> Self {\n\n ImportInterfaceMap {\n\n import_list: {\n\n let imports: HashMap<String, FunctionType> = [\n\n (\n\n \"useGas\".to_string(),\n\n FunctionType::new(vec![ValueType::I64], None),\n", "file_path": "src/eei.rs", "rank": 23, "score": 14.821980263310635 }, { "content": " * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\n * SOFTWARE.\n\n */\n\n\n\nuse eci_std;\n\nuse parity_wasm::elements::Module;\n\nuse std::collections::HashMap;\n\n\n\n/// Enum describing the state of a check. Always initialized as \"Unknown.\"\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub enum CheckStatus {\n\n Unknown,\n\n Nonexistent,\n\n Malformed,\n\n Good,\n\n}\n\n\n\n/// Struct containing data about the status of a check, and a function pointer implements the check.\n\n#[derive(Clone)]\n\npub struct Check {\n", "file_path": "src/checklist.rs", "rank": 24, "score": 13.200071481721341 }, { "content": " * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\n * SOFTWARE.\n\n */\n\n\n\nextern crate parity_wasm;\n\n\n\npub mod checker;\n\npub mod checklist;\n\npub mod eci_std;\n\npub mod eei;\n\npub mod pwasm;\n", "file_path": "src/lib.rs", "rank": 25, "score": 12.173678028617324 }, { "content": " vec![ValueType::I32, ValueType::I32, ValueType::I32],\n\n None,\n\n ),\n\n ),\n\n (\n\n \"finish\".to_string(),\n\n FunctionType::new(vec![ValueType::I32, ValueType::I32], None),\n\n ),\n\n (\n\n \"revert\".to_string(),\n\n FunctionType::new(vec![ValueType::I32, ValueType::I32], None),\n\n ),\n\n (\n\n \"selfDestruct\".to_string(),\n\n FunctionType::new(vec![ValueType::I32], None),\n\n ),\n\n ].iter()\n\n .cloned()\n\n .collect();\n\n imports\n", "file_path": "src/eei.rs", "rank": 26, "score": 11.66738298860893 }, { "content": " * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\n * SOFTWARE.\n\n */\n\n\n\nuse checklist::{CheckStatus, EciChecklist};\n\nuse parity_wasm::elements::{deserialize_buffer, Module};\n\n\n\n#[derive(Clone)]\n\n///Structure initialized with inputted WASM code and a checklist struct.\n\npub struct EcicChecker {\n\n code: Vec<u8>,\n\n checks: EciChecklist,\n\n}\n\n\n\nimpl EcicChecker {\n\n /// Constructs an empty checker struct with no code and no checks.\n\n pub fn new() -> Self {\n\n EcicChecker::empty()\n\n }\n\n\n", "file_path": "src/checker.rs", "rank": 27, "score": 11.616498594871368 }, { "content": " FunctionType::new(vec![ValueType::I32], None),\n\n ),\n\n (\n\n \"getBlockDifficulty\".to_string(),\n\n FunctionType::new(vec![ValueType::I32], None),\n\n ),\n\n (\n\n \"getBlockCoinbase\".to_string(),\n\n FunctionType::new(vec![ValueType::I32], None),\n\n ),\n\n (\n\n \"getBlockNumber\".to_string(),\n\n FunctionType::new(Vec::new(), Some(ValueType::I64)),\n\n ),\n\n (\n\n \"getBlockGasLimit\".to_string(),\n\n FunctionType::new(Vec::new(), Some(ValueType::I64)),\n\n ),\n\n (\n\n \"getBlockTimestamp\".to_string(),\n", "file_path": "src/eei.rs", "rank": 28, "score": 9.792955231336688 }, { "content": " Some(ValueType::I32),\n\n ),\n\n ),\n\n (\n\n \"callDataCopy\".to_string(),\n\n FunctionType::new(\n\n vec![ValueType::I32, ValueType::I32, ValueType::I32],\n\n None,\n\n ),\n\n ),\n\n (\n\n \"getCallDataSize\".to_string(),\n\n FunctionType::new(Vec::new(), Some(ValueType::I32)),\n\n ),\n\n (\n\n \"getCodeSize\".to_string(),\n\n FunctionType::new(Vec::new(), Some(ValueType::I32)),\n\n ),\n\n (\n\n \"externalCodeCopy\".to_string(),\n", "file_path": "src/eei.rs", "rank": 29, "score": 9.459920532440371 }, { "content": " FunctionType::new(\n\n vec![\n\n ValueType::I32,\n\n ValueType::I32,\n\n ValueType::I32,\n\n ValueType::I32,\n\n ValueType::I32,\n\n ValueType::I32,\n\n ValueType::I32,\n\n ],\n\n None,\n\n ),\n\n ),\n\n (\n\n \"getReturnDataSize\".to_string(),\n\n FunctionType::new(Vec::new(), Some(ValueType::I32)),\n\n ),\n\n (\n\n \"returnDataCopy\".to_string(),\n\n FunctionType::new(\n", "file_path": "src/eei.rs", "rank": 30, "score": 9.134671050544805 }, { "content": " },\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use eei::ImportInterfaceMap;\n\n use parity_wasm::elements::{FunctionType, ValueType};\n\n\n\n #[test]\n\n fn empty_interface() {\n\n let iface = ImportInterfaceMap::new();\n\n assert!(iface.import_list.is_empty());\n\n }\n\n\n\n #[test]\n\n fn default_interface_has_methods() {\n\n let iface = ImportInterfaceMap::default();\n\n assert_eq!(\n", "file_path": "src/eei.rs", "rank": 31, "score": 9.085851049243196 }, { "content": " iface.get_func(\"useGas\").unwrap(),\n\n FunctionType::new(vec![ValueType::I64], None)\n\n );\n\n assert_eq!(\n\n iface.get_func(\"create\").unwrap(),\n\n FunctionType::new(\n\n vec![\n\n ValueType::I64,\n\n ValueType::I32,\n\n ValueType::I32,\n\n ValueType::I32,\n\n ValueType::I32,\n\n ],\n\n Some(ValueType::I32)\n\n )\n\n );\n\n }\n\n}\n", "file_path": "src/eei.rs", "rank": 32, "score": 8.7602821212107 }, { "content": " FunctionType::new(Vec::new(), Some(ValueType::I64)),\n\n ),\n\n (\n\n \"getTxGasPrice\".to_string(),\n\n FunctionType::new(vec![ValueType::I32], None),\n\n ),\n\n (\n\n \"getTxOrigin\".to_string(),\n\n FunctionType::new(vec![ValueType::I32], None),\n\n ),\n\n (\n\n \"storageStore\".to_string(),\n\n FunctionType::new(vec![ValueType::I32, ValueType::I32], None),\n\n ),\n\n (\n\n \"storageLoad\".to_string(),\n\n FunctionType::new(vec![ValueType::I32, ValueType::I32], None),\n\n ),\n\n (\n\n \"log\".to_string(),\n", "file_path": "src/eei.rs", "rank": 33, "score": 8.690453931661072 }, { "content": " FunctionType::new(\n\n vec![\n\n ValueType::I32,\n\n ValueType::I32,\n\n ValueType::I32,\n\n ValueType::I32,\n\n ],\n\n None,\n\n ),\n\n ),\n\n (\n\n \"getExternalCodeSize\".to_string(),\n\n FunctionType::new(vec![ValueType::I32], Some(ValueType::I32)),\n\n ),\n\n (\n\n \"getCaller\".to_string(),\n\n FunctionType::new(vec![ValueType::I32], None),\n\n ),\n\n (\n\n \"getCallValue\".to_string(),\n", "file_path": "src/eei.rs", "rank": 34, "score": 8.08189483843614 }, { "content": " ),\n\n (\n\n \"getGasLeft\".to_string(),\n\n FunctionType::new(Vec::new(), Some(ValueType::I64)),\n\n ),\n\n (\n\n \"getAddress\".to_string(),\n\n FunctionType::new(vec![ValueType::I32], None),\n\n ),\n\n (\n\n \"getExternalBalance\".to_string(),\n\n FunctionType::new(vec![ValueType::I32, ValueType::I32], None),\n\n ),\n\n (\n\n \"getBlockHash\".to_string(),\n\n FunctionType::new(\n\n vec![ValueType::I64, ValueType::I32],\n\n Some(ValueType::I32),\n\n ),\n\n ),\n", "file_path": "src/eei.rs", "rank": 35, "score": 8.067638122888777 }, { "content": " pub fn get_check_status(&self, key: &str) -> CheckStatus {\n\n self.checklist[&key.to_string()].status.clone()\n\n }\n\n\n\n /// Returns a simple boolean value describing whether a check is good or not.\n\n pub fn check_is_good(&self, key: &str) -> bool {\n\n match self.get_check_status(key) {\n\n CheckStatus::Good => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n /// Returns a function pointer to the implementation of a check.\n\n pub fn get_checker(&self, key: &str) -> fn(&Module) -> CheckStatus {\n\n self.checklist[&key.to_string()].do_check\n\n }\n\n\n\n /// Returns a vector containing each check and its respective status.\n\n pub fn dump_checks(&self) -> Vec<(String, CheckStatus)> {\n\n self.checklist\n", "file_path": "src/checklist.rs", "rank": 36, "score": 7.871833954112032 }, { "content": "\n\n /// Deserializes the WASM code and executes all checks in the checklist.\n\n pub fn fire(&mut self) {\n\n let module = deserialize_buffer::<Module>(&mut self.code).unwrap();\n\n\n\n let check_ids: Vec<String> = self.checks.checklist.keys().map(|x| x.clone()).collect();\n\n\n\n for check in check_ids {\n\n let checkresult = self.checks.get_checker(check.as_str())(&module);\n\n self.checks.set_check_status(check.as_str(), checkresult);\n\n }\n\n }\n\n\n\n /// Writes a brief report of each check to stdout.\n\n pub fn print_report(&self) {\n\n for (id, val) in self.checks.dump_checks().iter() {\n\n println!(\n\n \"ID: {} - STATUS: {}\",\n\n id,\n\n match val {\n", "file_path": "src/checker.rs", "rank": 37, "score": 6.231292716452147 }, { "content": " FunctionType::new(\n\n vec![\n\n ValueType::I64,\n\n ValueType::I32,\n\n ValueType::I32,\n\n ValueType::I32,\n\n ],\n\n Some(ValueType::I32),\n\n ),\n\n ),\n\n (\n\n \"create\".to_string(),\n\n FunctionType::new(\n\n vec![\n\n ValueType::I64,\n\n ValueType::I32,\n\n ValueType::I32,\n\n ValueType::I32,\n\n ValueType::I32,\n\n ],\n", "file_path": "src/eei.rs", "rank": 38, "score": 5.699982375006305 }, { "content": " (\n\n \"call\".to_string(),\n\n FunctionType::new(\n\n vec![\n\n ValueType::I64,\n\n ValueType::I32,\n\n ValueType::I32,\n\n ValueType::I32,\n\n ValueType::I32,\n\n ],\n\n Some(ValueType::I32),\n\n ),\n\n ),\n\n (\n\n \"callCode\".to_string(),\n\n FunctionType::new(\n\n vec![\n\n ValueType::I64,\n\n ValueType::I32,\n\n ValueType::I32,\n", "file_path": "src/eei.rs", "rank": 39, "score": 5.668081178034388 }, { "content": " ValueType::I32,\n\n ValueType::I32,\n\n ],\n\n Some(ValueType::I32),\n\n ),\n\n ),\n\n (\n\n \"callDelegate\".to_string(),\n\n FunctionType::new(\n\n vec![\n\n ValueType::I64,\n\n ValueType::I32,\n\n ValueType::I32,\n\n ValueType::I32,\n\n ],\n\n Some(ValueType::I32),\n\n ),\n\n ),\n\n (\n\n \"callStatic\".to_string(),\n", "file_path": "src/eei.rs", "rank": 40, "score": 5.118276644101221 }, { "content": " .map(|(correctsig, funcsig)| {\n\n if correctsig != None\n\n && funcsig != None\n\n && funcsig.unwrap().clone() == correctsig.unwrap()\n\n {\n\n CheckStatus::Good\n\n } else {\n\n CheckStatus::Malformed\n\n }\n\n })\n\n .find(|x| {\n\n if *x == CheckStatus::Malformed {\n\n true\n\n } else {\n\n false\n\n }\n\n })\n\n .unwrap_or(CheckStatus::Good)\n\n}\n", "file_path": "src/eci_std.rs", "rank": 41, "score": 4.676751110026171 }, { "content": " status: CheckStatus,\n\n do_check: fn(&Module) -> CheckStatus,\n\n}\n\n\n\n/// Checklist structure containing a string-to-check map.\n\n#[derive(Clone)]\n\npub struct EciChecklist {\n\n pub checklist: HashMap<String, Check>,\n\n}\n\n\n\nimpl EciChecklist {\n\n /// Constructs an empty checklist.\n\n pub fn new() -> Self {\n\n EciChecklist {\n\n checklist: HashMap::new(),\n\n }\n\n }\n\n\n\n /// Constructs a checklist with the standard ewasm ECI checks.\n\n pub fn default() -> Self {\n", "file_path": "src/checklist.rs", "rank": 42, "score": 4.393921314750013 }, { "content": " /// Constructs an empty checker struct with no code and no checks.\n\n pub fn empty() -> Self {\n\n EcicChecker {\n\n code: Vec::new(),\n\n checks: EciChecklist::new(),\n\n }\n\n }\n\n\n\n /// Initializes with inputted code and the default ECI checks.\n\n pub fn default(input: &Vec<u8>) -> Self {\n\n EcicChecker {\n\n code: input.clone(),\n\n checks: EciChecklist::default(),\n\n }\n\n }\n\n\n\n /// Returns the length of the WASM bytecode.\n\n pub fn code_len(&self) -> usize {\n\n self.code.len()\n\n }\n", "file_path": "src/checker.rs", "rank": 43, "score": 4.261273748133478 }, { "content": " EciChecklist { checklist: checks }\n\n }\n\n\n\n /// Adds a check with the given ID and function implementing said check.\n\n pub fn add_check(&mut self, key: &str, checkfn: fn(&Module) -> CheckStatus) {\n\n self.checklist.insert(\n\n key.to_string(),\n\n Check {\n\n status: CheckStatus::Unknown,\n\n do_check: checkfn,\n\n },\n\n );\n\n }\n\n\n\n /// Sets the status of a check.\n\n pub fn set_check_status(&mut self, key: &str, val: CheckStatus) {\n\n self.checklist.get_mut(&key.to_string()).unwrap().status = val;\n\n }\n\n\n\n /// Returns the status of a check.\n", "file_path": "src/checklist.rs", "rank": 44, "score": 3.493943228919447 }, { "content": " // TODO: Refactor this into a method on CheckStatus\n\n CheckStatus::Unknown => \"Unknown\",\n\n CheckStatus::Nonexistent => \"Nonexistent\",\n\n CheckStatus::Malformed => \"Malformed\",\n\n CheckStatus::Good => \"Good\",\n\n }\n\n )\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use checker::EcicChecker;\n\n use checklist::CheckStatus;\n\n\n\n #[test]\n\n fn empty_code() {\n\n let ctx = EcicChecker::new();\n\n assert!(ctx.code.is_empty());\n", "file_path": "src/checker.rs", "rank": 45, "score": 2.2520915358963087 }, { "content": " },\n\n ),\n\n (\n\n \"no-startfn\".to_string(),\n\n Check {\n\n status: CheckStatus::Unknown,\n\n do_check: eci_std::chk_no_startfn,\n\n },\n\n ),\n\n (\n\n \"eei-funcsigs\".to_string(),\n\n Check {\n\n status: CheckStatus::Unknown,\n\n do_check: eci_std::chk_func_signatures,\n\n },\n\n ),\n\n ].iter()\n\n .cloned()\n\n .collect();\n\n\n", "file_path": "src/checklist.rs", "rank": 46, "score": 2.0022559624527223 }, { "content": " .keys()\n\n .map(|x| (x.clone(), (self.checklist[x].status)))\n\n .collect()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use checklist::*;\n\n\n\n #[test]\n\n fn empty_checklist() {\n\n let checks = EciChecklist::new();\n\n assert!(checks.checklist.is_empty());\n\n }\n\n\n\n #[test]\n\n fn default_checks() {\n\n let checks = EciChecklist::default();\n\n assert!(checks.checklist.contains_key(&\"export-main\".to_string()));\n", "file_path": "src/checklist.rs", "rank": 47, "score": 1.7126885897598028 }, { "content": " 0x08, 0x01, 0x00, 0x0a, 0x04, 0x01, 0x02, 0x00, 0x0b,\n\n ];\n\n let mut checker = EcicChecker::default(&wasm);\n\n assert_eq!(\n\n checker.checks.get_check_status(\"no-startfn\"),\n\n CheckStatus::Unknown\n\n );\n\n checker.fire();\n\n assert_eq!(\n\n checker.checks.get_check_status(\"no-startfn\"),\n\n CheckStatus::Malformed\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_correct_import_namespace() {\n\n let wasm: Vec<u8> = vec![\n\n 0x00, 0x61, 0x73, 0x6d, 0x01, 0x00, 0x00, 0x00, 0x01, 0x09, 0x02, 0x60, 0x02, 0x7f,\n\n 0x7f, 0x00, 0x60, 0x00, 0x00, 0x02, 0x17, 0x01, 0x08, 0x65, 0x74, 0x68, 0x65, 0x72,\n\n 0x65, 0x75, 0x6d, 0x0a, 0x67, 0x65, 0x74, 0x42, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65,\n", "file_path": "src/checker.rs", "rank": 48, "score": 1.0726829011698724 }, { "content": " 0x00, 0x00, 0x03, 0x02, 0x01, 0x01, 0x07, 0x08, 0x01, 0x04, 0x6d, 0x61, 0x69, 0x6e,\n\n 0x00, 0x01, 0x0a, 0x04, 0x01, 0x02, 0x00, 0x0b,\n\n ];\n\n let mut checker = EcicChecker::default(&wasm);\n\n assert_eq!(\n\n checker.checks.get_check_status(\"eei-namespace\"),\n\n CheckStatus::Unknown\n\n );\n\n checker.fire();\n\n assert_eq!(\n\n checker.checks.get_check_status(\"eei-namespace\"),\n\n CheckStatus::Good\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_incorrect_import_namespace() {\n\n let wasm: Vec<u8> = vec![\n\n 0x00, 0x61, 0x73, 0x6d, 0x01, 0x00, 0x00, 0x00, 0x01, 0x09, 0x02, 0x60, 0x02, 0x7f,\n\n 0x7f, 0x00, 0x60, 0x00, 0x00, 0x02, 0x12, 0x01, 0x03, 0x65, 0x6e, 0x76, 0x0a, 0x67,\n", "file_path": "src/checker.rs", "rank": 49, "score": 1.0650454105819311 } ]
Rust
contracts/farms/spectrum_pylon_farm/src/compound.rs
spectrumprotocol/contract
eee411acc13ed999a9718c096aef93b586941c02
use cosmwasm_std::{attr, to_binary, Attribute, CanonicalAddr, Coin, CosmosMsg, DepsMut, Env, MessageInfo, Response, StdError, StdResult, Uint128, WasmMsg, QueryRequest, WasmQuery}; use crate::{ bond::deposit_farm_share, state::{read_config, state_store}, }; use crate::querier::query_pylon_reward_info; use cw20::Cw20ExecuteMsg; use crate::state::{pool_info_read, pool_info_store, read_state, Config, PoolInfo}; use pylon_token::staking::{ Cw20HookMsg as PylonStakingCw20HookMsg, ExecuteMsg as PylonStakingExecuteMsg, }; use spectrum_protocol::gov::{ExecuteMsg as GovExecuteMsg}; use spectrum_protocol::pylon_farm::ExecuteMsg; use terraswap::asset::{Asset, AssetInfo}; use terraswap::pair::{Cw20HookMsg as TerraswapCw20HookMsg, ExecuteMsg as TerraswapExecuteMsg, QueryMsg as TerraswapQueryMsg, PoolResponse}; use terraswap::querier::{query_token_balance, simulate}; use spectrum_protocol::farm_helper::{compute_provide_after_swap, deduct_tax}; use moneymarket::market::{ExecuteMsg as MoneyMarketExecuteMsg}; pub fn compound(deps: DepsMut, env: Env, info: MessageInfo) -> StdResult<Response> { let config = read_config(deps.storage)?; if config.controller != deps.api.addr_canonicalize(info.sender.as_str())? { return Err(StdError::generic_err("unauthorized")); } let pair_contract = deps.api.addr_humanize(&config.pair_contract)?; let pylon_staking = deps.api.addr_humanize(&config.pylon_staking)?; let pylon_token = deps.api.addr_humanize(&config.pylon_token)?; let pylon_reward_info = query_pylon_reward_info( deps.as_ref(), &config.pylon_staking, &env.contract.address, Some(env.block.height), )?; let mut total_mine_swap_amount = Uint128::zero(); let mut total_mine_stake_amount = Uint128::zero(); let mut total_mine_commission = Uint128::zero(); let mut compound_amount = Uint128::zero(); let mut attributes: Vec<Attribute> = vec![]; let community_fee = config.community_fee; let platform_fee = config.platform_fee; let controller_fee = config.controller_fee; let total_fee = community_fee + platform_fee + controller_fee; let mut pool_info = pool_info_read(deps.storage).load(config.pylon_token.as_slice())?; let reward = pylon_reward_info.pending_reward; if !reward.is_zero() && !pylon_reward_info.bond_amount.is_zero() { let commission = reward * total_fee; let pylon_amount = reward.checked_sub(commission)?; total_mine_commission += commission; total_mine_swap_amount += commission; let auto_bond_amount = pylon_reward_info .bond_amount .checked_sub(pool_info.total_stake_bond_amount)?; compound_amount = pylon_amount.multiply_ratio(auto_bond_amount, pylon_reward_info.bond_amount); let stake_amount = pylon_amount.checked_sub(compound_amount)?; attributes.push(attr("commission", commission)); attributes.push(attr("compound_amount", compound_amount)); attributes.push(attr("stake_amount", stake_amount)); total_mine_stake_amount += stake_amount; } let mut state = read_state(deps.storage)?; let reinvest_allowance = query_token_balance(&deps.querier, pylon_token.clone(), env.contract.address.clone())? .checked_sub(state.total_farm_amount)?; deposit_farm_share( &mut state, &mut pool_info, total_mine_stake_amount, )?; state_store(deps.storage).save(&state)?; pool_info_store(deps.storage).save(config.pylon_token.as_slice(), &pool_info)?; let reinvest_amount = reinvest_allowance + compound_amount; let swap_amount = reinvest_amount.multiply_ratio(1u128, 2u128); total_mine_swap_amount += swap_amount; let mine = Asset { info: AssetInfo::Token { contract_addr: pylon_token.to_string(), }, amount: total_mine_swap_amount, }; let mine_swap_rate = simulate( &deps.querier, pair_contract.clone(), &mine, )?; let total_ust_return_amount = deduct_tax(&deps.querier, mine_swap_rate.return_amount, config.base_denom.clone())?; attributes.push(attr("total_ust_return_amount", total_ust_return_amount)); let total_ust_commission_amount = if total_mine_swap_amount != Uint128::zero() { total_ust_return_amount.multiply_ratio(total_mine_commission, total_mine_swap_amount) } else { Uint128::zero() }; let total_ust_reinvest_amount = total_ust_return_amount.checked_sub(total_ust_commission_amount)?; let net_reinvest_ust = deduct_tax( &deps.querier, total_ust_reinvest_amount, config.base_denom.clone(), )?; let pool: PoolResponse = deps.querier.query(&QueryRequest::Wasm(WasmQuery::Smart { contract_addr: pair_contract.to_string(), msg: to_binary(&TerraswapQueryMsg::Pool {})?, }))?; let provide_mine = compute_provide_after_swap( &pool, &mine, mine_swap_rate.return_amount, net_reinvest_ust )?; let mut messages: Vec<CosmosMsg> = vec![]; let withdraw_all_mine: CosmosMsg = CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: pylon_staking.to_string(), funds: vec![], msg: to_binary(&PylonStakingExecuteMsg::Withdraw {})?, }); messages.push(withdraw_all_mine); if !total_mine_swap_amount.is_zero() { let swap_mine: CosmosMsg = CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: pylon_token.to_string(), msg: to_binary(&Cw20ExecuteMsg::Send { contract: pair_contract.to_string(), amount: total_mine_swap_amount, msg: to_binary(&TerraswapCw20HookMsg::Swap { max_spread: None, belief_price: None, to: None, })?, })?, funds: vec![], }); messages.push(swap_mine); } if !total_ust_commission_amount.is_zero() { let net_commission_amount = deduct_tax(&deps.querier, total_ust_commission_amount, config.base_denom.clone())?; let mut state = read_state(deps.storage)?; state.earning += net_commission_amount; state_store(deps.storage).save(&state)?; attributes.push(attr("net_commission", net_commission_amount)); messages.push(CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: deps.api.addr_humanize(&config.anchor_market)?.to_string(), msg: to_binary(&MoneyMarketExecuteMsg::DepositStable {})?, funds: vec![Coin { denom: config.base_denom.clone(), amount: net_commission_amount, }], })); messages.push(CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: deps.api.addr_humanize(&config.spectrum_gov)?.to_string(), msg: to_binary(&GovExecuteMsg::mint {})?, funds: vec![], })); messages.push(CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: env.contract.address.to_string(), msg: to_binary(&ExecuteMsg::send_fee {})?, funds: vec![], })); } if !provide_mine.is_zero() { let increase_allowance = CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: pylon_token.to_string(), msg: to_binary(&Cw20ExecuteMsg::IncreaseAllowance { spender: pair_contract.to_string(), amount: provide_mine, expires: None, })?, funds: vec![], }); messages.push(increase_allowance); let provide_liquidity = CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: pair_contract.to_string(), msg: to_binary(&TerraswapExecuteMsg::ProvideLiquidity { assets: [ Asset { info: AssetInfo::Token { contract_addr: pylon_token.to_string(), }, amount: provide_mine, }, Asset { info: AssetInfo::NativeToken { denom: config.base_denom.clone(), }, amount: net_reinvest_ust, }, ], slippage_tolerance: None, receiver: None, })?, funds: vec![Coin { denom: config.base_denom, amount: net_reinvest_ust, }], }); messages.push(provide_liquidity); let stake = CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: env.contract.address.to_string(), msg: to_binary(&ExecuteMsg::stake { asset_token: pylon_token.to_string(), })?, funds: vec![], }); messages.push(stake); } attributes.push(attr("action", "compound")); attributes.push(attr("asset_token", pylon_token)); attributes.push(attr("reinvest_amount", reinvest_amount)); attributes.push(attr("provide_token_amount", provide_mine)); attributes.push(attr("provide_ust_amount", net_reinvest_ust)); Ok(Response::new() .add_messages(messages) .add_attributes(attributes)) } pub fn stake( deps: DepsMut, env: Env, info: MessageInfo, asset_token: String, ) -> StdResult<Response> { if info.sender != env.contract.address { return Err(StdError::generic_err("unauthorized")); } let config: Config = read_config(deps.storage)?; let pylon_staking = deps.api.addr_humanize(&config.pylon_staking)?; let asset_token_raw: CanonicalAddr = deps.api.addr_canonicalize(&asset_token)?; let pool_info: PoolInfo = pool_info_read(deps.storage).load(asset_token_raw.as_slice())?; let staking_token = deps.api.addr_humanize(&pool_info.staking_token)?; let amount = query_token_balance(&deps.querier, staking_token.clone(), env.contract.address)?; Ok(Response::new() .add_messages(vec![CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: staking_token.to_string(), funds: vec![], msg: to_binary(&Cw20ExecuteMsg::Send { contract: pylon_staking.to_string(), amount, msg: to_binary(&PylonStakingCw20HookMsg::Bond {})?, })?, })]) .add_attributes(vec![ attr("action", "stake"), attr("asset_token", asset_token), attr("staking_token", staking_token), attr("amount", amount), ])) } pub fn send_fee( deps: DepsMut, env: Env, info: MessageInfo, ) -> StdResult<Response> { if info.sender != env.contract.address { return Err(StdError::generic_err("unauthorized")); } let config = read_config(deps.storage)?; let aust_token = deps.api.addr_humanize(&config.aust_token)?; let spectrum_gov = deps.api.addr_humanize(&config.spectrum_gov)?; let aust_balance = query_token_balance(&deps.querier, aust_token.clone(), env.contract.address)?; let mut messages: Vec<CosmosMsg> = vec![]; let thousand = Uint128::from(1000u64); let total_fee = config.community_fee + config.controller_fee + config.platform_fee; let community_amount = aust_balance.multiply_ratio(thousand * config.community_fee, thousand * total_fee); if !community_amount.is_zero() { let transfer_community_fee = CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: aust_token.to_string(), msg: to_binary(&Cw20ExecuteMsg::Transfer { recipient: spectrum_gov.to_string(), amount: community_amount, })?, funds: vec![], }); messages.push(transfer_community_fee); } let platform_amount = aust_balance.multiply_ratio(thousand * config.platform_fee, thousand * total_fee); if !platform_amount.is_zero() { let stake_platform_fee = CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: aust_token.to_string(), msg: to_binary(&Cw20ExecuteMsg::Transfer { recipient: deps.api.addr_humanize(&config.platform)?.to_string(), amount: platform_amount, })?, funds: vec![], }); messages.push(stake_platform_fee); } let controller_amount = aust_balance.checked_sub(community_amount + platform_amount)?; if !controller_amount.is_zero() { let stake_controller_fee = CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: aust_token.to_string(), msg: to_binary(&Cw20ExecuteMsg::Transfer { recipient: deps.api.addr_humanize(&config.controller)?.to_string(), amount: controller_amount, })?, funds: vec![], }); messages.push(stake_controller_fee); } Ok(Response::new() .add_messages(messages)) }
use cosmwasm_std::{attr, to_binary, Attribute, CanonicalAddr, Coin, CosmosMsg, DepsMut, Env, MessageInfo, Response, StdError, StdResult, Uint128, WasmMsg, QueryRequest, WasmQuery}; use crate::{ bond::deposit_farm_share, state::{read_config, state_store}, }; use crate::querier::query_pylon_reward_info; use cw20::Cw20ExecuteMsg; use crate::state::{pool_info_read, pool_info_store, read_state, Config, PoolInfo}; use pylon_token::staking::{ Cw20HookMsg as PylonStakingCw20HookMsg, ExecuteMsg as PylonStakingExecuteMsg, }; use spectrum_protocol::gov::{ExecuteMsg as GovExecuteMsg}; use spectrum_protocol::pylon_farm::ExecuteMsg; use terraswap::asset::{Asset, AssetInfo}; use terraswap::pair::{Cw20HookMsg as TerraswapCw20HookMsg, ExecuteMsg as TerraswapExecuteMsg, QueryMsg as TerraswapQueryMsg, PoolResponse}; use terraswap::querier::{query_token_balance, simulate}; use spectrum_protocol::farm_helper::{compute_provide_after_swap, deduct_tax}; use moneymarket::market::{ExecuteMsg as MoneyMarketExecuteMsg}; pub fn compound(deps: DepsMut, env: Env, info: MessageInfo) -> StdResult<Response> { let config = read_config(deps.storage)?; if config.controller != deps.api.addr_canonicalize(info.sender.as_str())? { return Err(StdError::generic_err("unauthorized")); } let pair_contract = deps.api.addr_humanize(&config.pair_contract)?; let pylon_staking = deps.api.addr_humanize(&config.pylon_staking)?; let pylon_token = deps.api.addr_humanize(&config.pylon_token)?; let pylon_reward_info = query_pylon_reward_info( deps.as_ref(), &config.pylon_staking, &env.contract.address, Some(env.block.height), )?; let mut total_mine_swap_amount = Uint128::zero(); let mut total_mine_stake_amount = Uint128::zero(); let mut total_mine_commission = Uint128::zero(); let mut compound_amount = Uint128::zero(); let mut attributes: Vec<Attribute> = vec![]; let community_fee = config.community_fee; let platform_fee = config.platform_fee; let controller_fee = config.controller_fee; let total_fee = community_fee + platform_fee + controller_fee; let mut pool_info = pool_info_read(deps.storage).load(config.pylon_token.as_slice())?; let reward = pylon_reward_info.pending_reward; if !reward.is_zero() && !pylon_reward_info.bond_amount.is_zero() { let commission = reward * total_fee; let pylon_amount = reward.checked_sub(commission)?; total_mine_commission += commission; total_mine_swap_amount += commission; let auto_bond_amount = pylon_reward_info .bond_amount .checked_sub(pool_info.total_stake_bond_amount)?; compound_amount = pylon_amount.multiply_ratio(auto_bond_amount, pylon_reward_info.bond_amount); let stake_amount = pylon_amount.checked_sub(compound_amount)?; attributes.push(attr("commission", commission)); attributes.push(attr("compound_amount", compound_amount)); attributes.push(attr("stake_amount", stake_amount)); total_mine_stake_amount += stake_amount; } let mut state = read_state(deps.storage)?; let reinvest_allowance = query_token_balance(&deps.querier, pylon_token.clone(), env.contract.address.clone())? .checked_sub(state.total_farm_amount)?; deposit_farm_share( &mut state, &mut pool_info, total_mine_stake_amount, )?; state_store(deps.storage).save(&state)?; pool_info_store(deps.storage).save(config.pylon_token.as_slice(), &pool_info)?; let reinvest_amount = reinvest_allowance + compound_amount; let swap_amount = reinvest_amount.multiply_ratio(1u128, 2u128); total_mine_swap_amount += swap_amount; let mine = Asset { info: AssetInfo::Token { contract_addr: pylon_token.to_string(), }, amount: total_mine_swap_amount, }; let mine_swap_rate = simulate( &deps.querier, pair_contract.clone(), &mine, )?; let total_ust_return_amount = deduct_tax(&deps.querier, mine_swap_rate.return_amount, config.base_denom.clone())?; attributes.push(attr("total_ust_return_amount", total_ust_return_amount)); let total_ust_commission_amount = if total_mine_swap_amount != Uint128::zero() { total_ust_return_amount.multiply_ratio(total_mine_commission, total_mine_swap_amount) } else { Uint128::zero() }; let total_ust_reinvest_amount = total_ust_return_amount.checked_sub(total_ust_commission_amount)?; let net_reinvest_ust = deduct_tax( &deps.querier, total_ust_reinvest_amount, config.base_denom.clone(), )?; let pool: PoolResponse = deps.querier.query(&QueryRequest::Wasm(WasmQuery::Smart { contract_addr: pair_contract.to_string(), msg: to_binary(&TerraswapQueryMsg::Pool {})?, }))?; let provide_mine = compute_provide_after_swap( &pool, &mine, mine_swap_rate.return_amount, net_reinvest_ust )?; let mut messages: Vec<CosmosMsg> = vec![]; let withdraw_all_mine: CosmosMsg = CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: pylon_staking.to_string(), funds: vec![], msg: to_binary(&PylonStakingExecuteMsg::Withdraw {})?, }); messages.push(withdraw_all_mine); if !total_mine_swap_amount.is_zero() { let swap_mine: CosmosMsg = CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: pylon_token.to_string(), msg: to_binary(&Cw20ExecuteMsg::Send { contract: pair_contract.to_string(), amount: total_mine_swap_amount, msg: to_binary(&TerraswapCw20HookMsg::Swap { max_spread: None, belief_price: None, to: None, })?, })?, funds: vec![], }); messages.push(swap_mine); } if !total_ust_commission_amount.is_zero() { let net_commission_amount = deduct_tax(&deps.querier, total_ust_commission_amount, config.base_denom.clone())?; let mut state = read_state(deps.storage)?; state.earning += net_commission_amount; state
], slippage_tolerance: None, receiver: None, })?, funds: vec![Coin { denom: config.base_denom, amount: net_reinvest_ust, }], }); messages.push(provide_liquidity); let stake = CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: env.contract.address.to_string(), msg: to_binary(&ExecuteMsg::stake { asset_token: pylon_token.to_string(), })?, funds: vec![], }); messages.push(stake); } attributes.push(attr("action", "compound")); attributes.push(attr("asset_token", pylon_token)); attributes.push(attr("reinvest_amount", reinvest_amount)); attributes.push(attr("provide_token_amount", provide_mine)); attributes.push(attr("provide_ust_amount", net_reinvest_ust)); Ok(Response::new() .add_messages(messages) .add_attributes(attributes)) } pub fn stake( deps: DepsMut, env: Env, info: MessageInfo, asset_token: String, ) -> StdResult<Response> { if info.sender != env.contract.address { return Err(StdError::generic_err("unauthorized")); } let config: Config = read_config(deps.storage)?; let pylon_staking = deps.api.addr_humanize(&config.pylon_staking)?; let asset_token_raw: CanonicalAddr = deps.api.addr_canonicalize(&asset_token)?; let pool_info: PoolInfo = pool_info_read(deps.storage).load(asset_token_raw.as_slice())?; let staking_token = deps.api.addr_humanize(&pool_info.staking_token)?; let amount = query_token_balance(&deps.querier, staking_token.clone(), env.contract.address)?; Ok(Response::new() .add_messages(vec![CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: staking_token.to_string(), funds: vec![], msg: to_binary(&Cw20ExecuteMsg::Send { contract: pylon_staking.to_string(), amount, msg: to_binary(&PylonStakingCw20HookMsg::Bond {})?, })?, })]) .add_attributes(vec![ attr("action", "stake"), attr("asset_token", asset_token), attr("staking_token", staking_token), attr("amount", amount), ])) } pub fn send_fee( deps: DepsMut, env: Env, info: MessageInfo, ) -> StdResult<Response> { if info.sender != env.contract.address { return Err(StdError::generic_err("unauthorized")); } let config = read_config(deps.storage)?; let aust_token = deps.api.addr_humanize(&config.aust_token)?; let spectrum_gov = deps.api.addr_humanize(&config.spectrum_gov)?; let aust_balance = query_token_balance(&deps.querier, aust_token.clone(), env.contract.address)?; let mut messages: Vec<CosmosMsg> = vec![]; let thousand = Uint128::from(1000u64); let total_fee = config.community_fee + config.controller_fee + config.platform_fee; let community_amount = aust_balance.multiply_ratio(thousand * config.community_fee, thousand * total_fee); if !community_amount.is_zero() { let transfer_community_fee = CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: aust_token.to_string(), msg: to_binary(&Cw20ExecuteMsg::Transfer { recipient: spectrum_gov.to_string(), amount: community_amount, })?, funds: vec![], }); messages.push(transfer_community_fee); } let platform_amount = aust_balance.multiply_ratio(thousand * config.platform_fee, thousand * total_fee); if !platform_amount.is_zero() { let stake_platform_fee = CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: aust_token.to_string(), msg: to_binary(&Cw20ExecuteMsg::Transfer { recipient: deps.api.addr_humanize(&config.platform)?.to_string(), amount: platform_amount, })?, funds: vec![], }); messages.push(stake_platform_fee); } let controller_amount = aust_balance.checked_sub(community_amount + platform_amount)?; if !controller_amount.is_zero() { let stake_controller_fee = CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: aust_token.to_string(), msg: to_binary(&Cw20ExecuteMsg::Transfer { recipient: deps.api.addr_humanize(&config.controller)?.to_string(), amount: controller_amount, })?, funds: vec![], }); messages.push(stake_controller_fee); } Ok(Response::new() .add_messages(messages)) }
_store(deps.storage).save(&state)?; attributes.push(attr("net_commission", net_commission_amount)); messages.push(CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: deps.api.addr_humanize(&config.anchor_market)?.to_string(), msg: to_binary(&MoneyMarketExecuteMsg::DepositStable {})?, funds: vec![Coin { denom: config.base_denom.clone(), amount: net_commission_amount, }], })); messages.push(CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: deps.api.addr_humanize(&config.spectrum_gov)?.to_string(), msg: to_binary(&GovExecuteMsg::mint {})?, funds: vec![], })); messages.push(CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: env.contract.address.to_string(), msg: to_binary(&ExecuteMsg::send_fee {})?, funds: vec![], })); } if !provide_mine.is_zero() { let increase_allowance = CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: pylon_token.to_string(), msg: to_binary(&Cw20ExecuteMsg::IncreaseAllowance { spender: pair_contract.to_string(), amount: provide_mine, expires: None, })?, funds: vec![], }); messages.push(increase_allowance); let provide_liquidity = CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: pair_contract.to_string(), msg: to_binary(&TerraswapExecuteMsg::ProvideLiquidity { assets: [ Asset { info: AssetInfo::Token { contract_addr: pylon_token.to_string(), }, amount: provide_mine, }, Asset { info: AssetInfo::NativeToken { denom: config.base_denom.clone(), }, amount: net_reinvest_ust, },
random
[ { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::zap_to_bond {\n\n contract,\n\n provide_asset,\n\n swap_operations,\n\n max_spread,\n\n compound_rate,\n\n } => zap_to_bond(\n\n deps,\n\n env,\n\n info,\n\n contract,\n\n provide_asset,\n\n swap_operations,\n\n max_spread,\n\n compound_rate,\n\n ),\n\n ExecuteMsg::zap_to_bond_hook {\n\n contract,\n", "file_path": "contracts/spectrum_staker_single_asset/src/contract.rs", "rank": 0, "score": 792247.8230527089 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, _env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::receive(msg) => receive_cw20(deps, info, msg),\n\n ExecuteMsg::register_asset {\n\n asset_token,\n\n staking_token,\n\n weight,\n\n } => register_asset(deps, info, asset_token, staking_token, weight),\n\n ExecuteMsg::withdraw { asset_token, spec_amount } => withdraw(deps, info, asset_token, spec_amount),\n\n ExecuteMsg::unbond {\n\n asset_token,\n\n amount,\n\n } => unbond(deps, info, asset_token, amount),\n\n ExecuteMsg::update_config {\n\n owner,\n\n generator_proxy\n\n } => update_config(deps, info, owner, generator_proxy),\n\n }\n\n}\n\n\n", "file_path": "contracts/astro_dual_rewards/spectrum_spec_astroport_farm/src/contract.rs", "rank": 1, "score": 768678.3900490026 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::poll_end { poll_id } => poll_end(deps, env, poll_id),\n\n ExecuteMsg::poll_execute { poll_id } => poll_execute(deps, env, poll_id),\n\n ExecuteMsg::poll_expire { poll_id } => poll_expire(deps, env, poll_id),\n\n ExecuteMsg::poll_start {\n\n title,\n\n description,\n\n link,\n\n execute_msgs,\n\n } => poll_start(deps, env, info, title, description, link, execute_msgs),\n\n ExecuteMsg::poll_vote { poll_id, vote } => poll_vote(deps, env, info, poll_id, vote),\n\n ExecuteMsg::update_config {\n\n owner,\n\n quorum,\n\n threshold,\n\n voting_period,\n\n effective_delay,\n\n expiration_period,\n\n } => update_config(\n", "file_path": "contracts/spectrum_platform/src/contract.rs", "rank": 2, "score": 753225.6854675169 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::receive(msg) => receive_cw20(deps, env, info, msg),\n\n ExecuteMsg::bond {\n\n contract,\n\n assets,\n\n slippage_tolerance,\n\n compound_rate,\n\n staker_addr,\n\n } => bond(\n\n deps,\n\n env,\n\n info,\n\n contract,\n\n assets,\n\n slippage_tolerance,\n\n compound_rate,\n\n staker_addr,\n\n ),\n\n ExecuteMsg::bond_hook {\n", "file_path": "contracts/spectrum_staker/src/contract.rs", "rank": 3, "score": 753225.685467517 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::poll_vote {\n\n poll_id,\n\n vote,\n\n amount,\n\n } => poll_vote(deps, info, poll_id, vote, amount),\n\n ExecuteMsg::stake { amount, days } => stake(deps, info, amount, days),\n\n ExecuteMsg::unstake { amount, days } => unstake(deps, info, amount, days),\n\n ExecuteMsg::upsert_share {\n\n address,\n\n lock_start,\n\n lock_end,\n\n lock_amount,\n\n disable_withdraw,\n\n } => upsert_share(\n\n deps,\n\n info,\n\n address,\n\n lock_start,\n", "file_path": "contracts/spectrum_wallet/src/contract.rs", "rank": 4, "score": 753225.685467517 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::harvest { aust_amount, days } => harvest(deps, info, aust_amount, days.unwrap_or(0u64)),\n\n ExecuteMsg::mint {} => mint(deps, env),\n\n ExecuteMsg::poll_end { poll_id } => poll_end(deps, env, poll_id),\n\n ExecuteMsg::poll_execute { poll_id } => poll_execute(deps, env, poll_id),\n\n ExecuteMsg::poll_expire { poll_id } => poll_expire(deps, env, poll_id),\n\n ExecuteMsg::poll_vote {\n\n poll_id,\n\n vote,\n\n amount,\n\n } => poll_vote(deps, env, info, poll_id, vote, amount),\n\n ExecuteMsg::receive(msg) => receive_cw20(deps, env, info, msg),\n\n ExecuteMsg::update_config {\n\n owner,\n\n spec_token,\n\n quorum,\n\n threshold,\n\n voting_period,\n\n effective_delay,\n", "file_path": "contracts/spectrum_gov/src/contract.rs", "rank": 5, "score": 753225.6854675171 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::receive(msg) => receive_cw20(deps, env, info, msg),\n\n ExecuteMsg::update_config {\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n } => update_config(\n\n deps,\n\n info,\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n ),\n", "file_path": "contracts/farms/spectrum_anchor_farm/src/contract.rs", "rank": 6, "score": 745817.6155367948 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::receive(msg) => receive_cw20(deps, env, info, msg),\n\n ExecuteMsg::update_config {\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n } => update_config(\n\n deps,\n\n info,\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n ),\n", "file_path": "contracts/farms/spectrum_mirror_farm/src/contract.rs", "rank": 7, "score": 745817.6155367948 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::receive(msg) => receive_cw20(deps, env, info, msg),\n\n ExecuteMsg::update_config {\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n } => update_config(\n\n deps,\n\n info,\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n ),\n", "file_path": "contracts/farms/spectrum_terraworld_farm/src/contract.rs", "rank": 8, "score": 745817.6155367948 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::receive(msg) => receive_cw20(deps, env, info, msg),\n\n ExecuteMsg::update_config {\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n } => update_config(\n\n deps,\n\n info,\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n ),\n", "file_path": "contracts/farms/spectrum_pylon_farm/src/contract.rs", "rank": 9, "score": 745817.6155367948 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::receive(msg) => receive_cw20(deps, env, info, msg),\n\n ExecuteMsg::update_config {\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n } => update_config(\n\n deps,\n\n info,\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n ),\n", "file_path": "contracts/farms/spectrum_valkyrie_farm/src/contract.rs", "rank": 10, "score": 745817.6155367948 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::receive(msg) => receive_cw20(deps, env, info, msg),\n\n ExecuteMsg::update_config {\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n } => update_config(\n\n deps,\n\n info,\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n ),\n", "file_path": "contracts/farms/spectrum_nexus_farm/src/contract.rs", "rank": 11, "score": 745817.6155367948 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::receive(msg) => receive_cw20(deps, env, info, msg),\n\n ExecuteMsg::update_config {\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n } => update_config(\n\n deps,\n\n info,\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n ),\n", "file_path": "contracts/farms/spectrum_orion_farm/src/contract.rs", "rank": 12, "score": 745817.6155367948 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::receive(msg) => receive_cw20(deps, env, info, msg),\n\n ExecuteMsg::update_config {\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n } => update_config(\n\n deps,\n\n info,\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n ),\n", "file_path": "contracts/farms/spectrum_pylon_liquid_farm/src/contract.rs", "rank": 13, "score": 742216.5851713631 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::Receive(msg) => receive_cw20(deps, env, info, msg),\n\n ExecuteMsg::Unstake { amount} => unstake(deps, env, info, amount),\n\n }\n\n}\n\n\n", "file_path": "contracts/gov_proxies/anchor_gov_proxy/src/contract.rs", "rank": 14, "score": 742216.5851713631 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::Receive(msg) => receive_cw20(deps, env, info, msg),\n\n ExecuteMsg::Unstake { amount} => unstake(deps, env, info, amount),\n\n }\n\n}\n\n\n", "file_path": "contracts/gov_proxies/nexus_gov_proxy/src/contract.rs", "rank": 15, "score": 742216.5851713631 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::Receive(msg) => receive_cw20(deps, env, info, msg),\n\n ExecuteMsg::Unstake { amount} => unstake(deps, env, info, amount),\n\n }\n\n}\n\n\n", "file_path": "contracts/gov_proxies/token_gov_proxy/src/contract.rs", "rank": 16, "score": 742216.5851713631 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::Receive(msg) => receive_cw20(deps, env, info, msg),\n\n ExecuteMsg::Unstake { amount} => unstake(deps, env, info, amount),\n\n }\n\n}\n\n\n", "file_path": "contracts/gov_proxies/xastro_gov_proxy/src/contract.rs", "rank": 17, "score": 742216.5851713631 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::receive(msg) => receive_cw20(deps, env, info, msg),\n\n ExecuteMsg::update_config {\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n } => update_config(\n\n deps,\n\n info,\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n ),\n", "file_path": "contracts/astro_farms/spectrum_astroport_farm/src/contract.rs", "rank": 18, "score": 742216.5851713631 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::receive(msg) => receive_cw20(deps, env, info, msg),\n\n ExecuteMsg::update_config {\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n } => update_config(\n\n deps,\n\n info,\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n ),\n", "file_path": "contracts/farms/spectrum_nexus_nasset_psi_farm/src/contract.rs", "rank": 19, "score": 738681.5940503802 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::receive(msg) => receive_cw20(deps, env, info, msg),\n\n ExecuteMsg::update_config {\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n } => update_config(\n\n deps,\n\n info,\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n ),\n", "file_path": "contracts/astro_farms/spectrum_astroport_token_luna_farm/src/contract.rs", "rank": 20, "score": 735210.7661476096 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::receive(msg) => receive_cw20(deps, env, info, msg),\n\n ExecuteMsg::update_config {\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n } => update_config(\n\n deps,\n\n info,\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n ),\n", "file_path": "contracts/astro_farms/spectrum_astroport_luna_ust_farm/src/contract.rs", "rank": 21, "score": 735210.7661476096 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::receive(msg) => receive_cw20(deps, env, info, msg),\n\n ExecuteMsg::update_config {\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n } => update_config(\n\n deps,\n\n info,\n\n owner,\n\n controller,\n\n community_fee,\n\n platform_fee,\n\n controller_fee,\n\n deposit_fee,\n\n ),\n", "file_path": "contracts/astro_farms/spectrum_astroport_token_ust_farm/src/contract.rs", "rank": 22, "score": 735210.7661476096 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, _env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::receive(msg) => receive_cw20(deps, info, msg),\n\n ExecuteMsg::register_asset {\n\n asset_token,\n\n staking_token,\n\n weight,\n\n } => register_asset(deps, info, asset_token, staking_token, weight),\n\n ExecuteMsg::withdraw { asset_token, spec_amount } => withdraw(deps, info, asset_token, spec_amount),\n\n ExecuteMsg::unbond {\n\n asset_token,\n\n amount,\n\n } => unbond(deps, info, asset_token, amount),\n\n ExecuteMsg::update_config {\n\n owner,\n\n } => update_config(deps, info, owner),\n\n }\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_spec_farm/src/contract.rs", "rank": 23, "score": 734559.2395664958 }, { "content": "pub fn compound(deps: DepsMut, env: Env, info: MessageInfo, max_compound: Uint128) -> StdResult<Response> {\n\n let config = read_config(deps.storage)?;\n\n\n\n if config.controller != deps.api.addr_canonicalize(info.sender.as_str())? {\n\n return Err(StdError::generic_err(\"unauthorized\"));\n\n }\n\n\n\n let pair_contract = deps.api.addr_humanize(&config.pair_contract)?;\n\n let valkyrie_staking = deps.api.addr_humanize(&config.valkyrie_staking)?;\n\n let valkyrie_token = deps.api.addr_humanize(&config.valkyrie_token)?;\n\n // let valkyrie_gov = deps.api.addr_humanize(&config.valkyrie_gov)?;\n\n\n\n let valkyrie_reward_info = query_valkyrie_reward_info(\n\n deps.as_ref(),\n\n &config.valkyrie_staking,\n\n &env.contract.address,\n\n )?;\n\n\n\n let mut total_vkr_swap_amount = Uint128::zero();\n\n let mut total_vkr_stake_amount = Uint128::zero();\n", "file_path": "contracts/farms/spectrum_valkyrie_farm/src/compound.rs", "rank": 24, "score": 683037.949256293 }, { "content": "pub fn reconcile_balance(deps: &Deps, state: &mut State, config: &Config, deposited_amount: Uint128) -> StdResult<Uint128> {\n\n reconcile_aust(deps, state, config)?;\n\n reconcile_spec(deps, state, config, deposited_amount)\n\n}\n\n\n", "file_path": "contracts/spectrum_gov/src/stake.rs", "rank": 25, "score": 681201.0316358388 }, { "content": "fn buy_spec(deps: DepsMut, env: Env, info: MessageInfo, ust_amount: Option<Uint128>) -> StdResult<Response> {\n\n let sender_addr = deps.api.addr_canonicalize(info.sender.as_str())?;\n\n let found = reward_store(deps.storage).may_load(&sender_addr)?.is_some();\n\n if !found {\n\n return Err(StdError::generic_err(\"unauthorized\"));\n\n }\n\n\n\n let config = read_config(deps.storage)?;\n\n let factory_contract = deps.api.addr_humanize(&config.terraswap_factory)?;\n\n let spectrum_token = deps.api.addr_humanize(&config.spectrum_token)?;\n\n let spec_info = AssetInfo::Token {\n\n contract_addr: spectrum_token.to_string(),\n\n };\n\n let ust_info = AssetInfo::NativeToken {\n\n denom: \"uusd\".to_string()\n\n };\n\n let pair_info = query_pair_info(&deps.querier, factory_contract, &[spec_info, ust_info.clone()])?;\n\n let avail_ust = deps.querier.query_balance(env.contract.address, \"uusd\")?;\n\n let swap_amount = Asset {\n\n info: ust_info.clone(),\n", "file_path": "contracts/spectrum_wallet/src/contract.rs", "rank": 26, "score": 662030.4026786762 }, { "content": "fn aust_redeem(deps: DepsMut, env: Env, info: MessageInfo, aust_amount: Option<Uint128>) -> StdResult<Response> {\n\n let sender_addr = deps.api.addr_canonicalize(info.sender.as_str())?;\n\n let found = reward_store(deps.storage).may_load(&sender_addr)?.is_some();\n\n if !found {\n\n return Err(StdError::generic_err(\"unauthorized\"));\n\n }\n\n\n\n let config = read_config(deps.storage)?;\n\n let aust_token = deps.api.addr_humanize(&config.aust_token)?;\n\n let aust_balance = query_token_balance(\n\n &deps.querier,\n\n aust_token,\n\n env.contract.address)?;\n\n let amount = aust_amount.unwrap_or(aust_balance);\n\n\n\n Ok(Response::new()\n\n .add_messages(vec![CosmosMsg::Wasm(WasmMsg::Execute {\n\n contract_addr: deps.api.addr_humanize(&config.aust_token)?.to_string(),\n\n msg: to_binary(&Cw20ExecuteMsg::Send {\n\n contract: deps.api.addr_humanize(&config.anchor_market)?.to_string(),\n\n amount,\n\n msg: to_binary(&MarketCw20HookMsg::RedeemStable { })?,\n\n })?,\n\n funds: vec![],\n\n })]))\n\n}\n\n\n", "file_path": "contracts/spectrum_wallet/src/contract.rs", "rank": 27, "score": 662030.4026786763 }, { "content": "// harvest all\n\npub fn harvest_all(mut deps: DepsMut, env: Env, info: MessageInfo) -> StdResult<Response> {\n\n let config = read_config(deps.storage)?;\n\n\n\n if config.controller != deps.api.addr_canonicalize(info.sender.as_str())? {\n\n return Err(StdError::generic_err(\"unauthorized\"));\n\n }\n\n\n\n let terraswap_factory = deps.api.addr_humanize(&config.terraswap_factory)?;\n\n let mirror_staking = deps.api.addr_humanize(&config.mirror_staking)?;\n\n let mirror_token = deps.api.addr_humanize(&config.mirror_token)?;\n\n let mirror_gov = deps.api.addr_humanize(&config.mirror_gov)?;\n\n\n\n let mirror_reward_infos = query_mirror_reward_info(\n\n deps.as_ref(),\n\n mirror_staking.to_string(),\n\n env.contract.address.to_string(),\n\n )?;\n\n\n\n let mut total_mir_swap_amount = Uint128::zero();\n\n let mut total_mir_stake_amount = Uint128::zero();\n", "file_path": "contracts/farms/spectrum_mirror_farm/src/harvest.rs", "rank": 28, "score": 661370.8603358853 }, { "content": "fn reward_to_pool(state: &State, pool_info: &mut PoolInfo) -> StdResult<()> {\n\n if pool_info.total_bond_amount.is_zero() {\n\n return Ok(());\n\n }\n\n\n\n let share = (UDec128::from(state.spec_share_index) - pool_info.state_spec_share_index.into())\n\n * Uint128::from(pool_info.weight as u128);\n\n let share_per_bond = share / pool_info.total_bond_amount;\n\n pool_info.spec_share_index = pool_info.spec_share_index + share_per_bond.into();\n\n pool_info.state_spec_share_index = state.spec_share_index;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "contracts/astro_dual_rewards/spectrum_spec_astroport_farm/src/bond.rs", "rank": 29, "score": 624607.2501313749 }, { "content": "fn reward_to_pool(state: &State, pool_info: &mut PoolInfo) -> StdResult<()> {\n\n if pool_info.total_bond_amount.is_zero() {\n\n return Ok(());\n\n }\n\n\n\n let share = (UDec128::from(state.spec_share_index) - pool_info.state_spec_share_index.into())\n\n * Uint128::from(pool_info.weight as u128);\n\n let share_per_bond = share / pool_info.total_bond_amount;\n\n pool_info.spec_share_index = pool_info.spec_share_index + share_per_bond.into();\n\n pool_info.state_spec_share_index = state.spec_share_index;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_spec_farm/src/bond.rs", "rank": 30, "score": 621580.933744056 }, { "content": "pub fn compound(deps: DepsMut, env: Env, info: MessageInfo) -> StdResult<Response> {\n\n let config = read_config(deps.storage)?;\n\n\n\n if config.controller != deps.api.addr_canonicalize(info.sender.as_str())? {\n\n return Err(StdError::generic_err(\"unauthorized\"));\n\n }\n\n\n\n let pair_contract = deps.api.addr_humanize(&config.pair_contract)?;\n\n let nexus_staking = deps.api.addr_humanize(&config.nexus_staking)?;\n\n let nexus_token = deps.api.addr_humanize(&config.nexus_token)?;\n\n let nexus_gov = deps.api.addr_humanize(&config.nexus_gov)?;\n\n\n\n let nexus_reward_info = query_nexus_reward_info(\n\n deps.as_ref(),\n\n &config.nexus_staking,\n\n &env.contract.address,\n\n Some(env.block.time.seconds()),\n\n )?;\n\n\n\n let mut total_psi_swap_amount = Uint128::zero();\n", "file_path": "contracts/farms/spectrum_nexus_farm/src/compound.rs", "rank": 32, "score": 618476.3564321038 }, { "content": "pub fn compound(deps: DepsMut, env: Env, info: MessageInfo) -> StdResult<Response> {\n\n let config = read_config(deps.storage)?;\n\n\n\n if config.controller != deps.api.addr_canonicalize(info.sender.as_str())? {\n\n return Err(StdError::generic_err(\"unauthorized\"));\n\n }\n\n\n\n let pair_contract = deps.api.addr_humanize(&config.pair_contract)?;\n\n let orion_staking = deps.api.addr_humanize(&config.orion_staking)?;\n\n let orion_token = deps.api.addr_humanize(&config.orion_token)?;\n\n let orion_gov = deps.api.addr_humanize(&config.orion_gov)?;\n\n\n\n let orion_reward_info = query_orion_reward_info(\n\n deps.as_ref(),\n\n &config.orion_staking,\n\n &env.contract.address,\n\n Some(env.block.time.seconds()),\n\n )?;\n\n\n\n let mut total_orion_swap_amount = Uint128::zero();\n", "file_path": "contracts/farms/spectrum_orion_farm/src/compound.rs", "rank": 33, "score": 618476.3564321038 }, { "content": "pub fn compound(deps: DepsMut, env: Env, info: MessageInfo) -> StdResult<Response> {\n\n let config = read_config(deps.storage)?;\n\n\n\n if config.controller != deps.api.addr_canonicalize(info.sender.as_str())? {\n\n return Err(StdError::generic_err(\"unauthorized\"));\n\n }\n\n\n\n let pair_contract = deps.api.addr_humanize(&config.pair_contract)?;\n\n let anchor_staking = deps.api.addr_humanize(&config.anchor_staking)?;\n\n let anchor_token = deps.api.addr_humanize(&config.anchor_token)?;\n\n let anchor_gov = deps.api.addr_humanize(&config.anchor_gov)?;\n\n\n\n let anchor_reward_info = query_anchor_reward_info(\n\n deps.as_ref(),\n\n &config.anchor_staking,\n\n &env.contract.address,\n\n Some(env.block.height),\n\n )?;\n\n\n\n let mut total_anc_swap_amount = Uint128::zero();\n", "file_path": "contracts/farms/spectrum_anchor_farm/src/compound.rs", "rank": 34, "score": 618476.3564321038 }, { "content": "pub fn compound(deps: DepsMut, env: Env, info: MessageInfo) -> StdResult<Response> {\n\n let config = read_config(deps.storage)?;\n\n\n\n if config.controller != deps.api.addr_canonicalize(info.sender.as_str())? {\n\n return Err(StdError::generic_err(\"unauthorized\"));\n\n }\n\n\n\n let pair_contract = deps.api.addr_humanize(&config.pair_contract)?;\n\n let astro_token = deps.api.addr_humanize(&config.astro_token)?;\n\n let astro_gov_proxy = deps.api.addr_humanize(&config.astro_gov_proxy)?;\n\n\n\n let mut pool_info = pool_info_read(deps.storage).load(config.astro_token.as_slice())?;\n\n\n\n // This get pending (ASTRO) reward\n\n let pending_token_response = query_astroport_pending_token(\n\n deps.as_ref(),\n\n &pool_info.staking_token,\n\n &env.contract.address,\n\n &config.astroport_generator\n\n )?;\n", "file_path": "contracts/astro_farms/spectrum_astroport_farm/src/compound.rs", "rank": 35, "score": 615026.6842340503 }, { "content": "pub fn compound(deps: DepsMut, env: Env, info: MessageInfo) -> StdResult<Response> {\n\n let config = read_config(deps.storage)?;\n\n\n\n if config.controller != deps.api.addr_canonicalize(info.sender.as_str())? {\n\n return Err(StdError::generic_err(\"unauthorized\"));\n\n }\n\n\n\n let pair_contract = deps.api.addr_humanize(&config.pair_contract)?;\n\n let reward_token = deps.api.addr_humanize(&config.reward_token)?;\n\n let gateway_pool = deps.api.addr_humanize(&config.gateway_pool)?;\n\n let dp_token = deps.api.addr_humanize(&config.dp_token)?;\n\n\n\n let gov_proxy = if let Some(gov_proxy) = &config.gov_proxy {\n\n Some(deps.api.addr_humanize(gov_proxy)?)\n\n } else {\n\n None\n\n };\n\n\n\n let reward_info = query_claimable_reward(\n\n deps.as_ref(),\n", "file_path": "contracts/farms/spectrum_pylon_liquid_farm/src/compound.rs", "rank": 36, "score": 615026.6842340503 }, { "content": "pub fn compound(deps: DepsMut, env: Env, info: MessageInfo) -> StdResult<Response> {\n\n let config = read_config(deps.storage)?;\n\n\n\n if config.controller != deps.api.addr_canonicalize(info.sender.as_str())? {\n\n return Err(StdError::generic_err(\"unauthorized\"));\n\n }\n\n\n\n let pair_contract = deps.api.addr_humanize(&config.pair_contract)?;\n\n let nasset_staking = deps.api.addr_humanize(&config.nasset_staking)?;\n\n let nexus_token = deps.api.addr_humanize(&config.nexus_token)?;\n\n let nexus_gov = deps.api.addr_humanize(&config.nexus_gov)?;\n\n let nasset_token = deps.api.addr_humanize(&config.nasset_token)?;\n\n\n\n let nexus_reward_info = query_nexus_reward_info(\n\n deps.as_ref(),\n\n &config.nasset_staking,\n\n &env.contract.address,\n\n Some(env.block.time.seconds()),\n\n )?;\n\n\n", "file_path": "contracts/farms/spectrum_nexus_nasset_psi_farm/src/compound.rs", "rank": 37, "score": 611647.344270814 }, { "content": "fn reconcile_spec(deps: &Deps, state: &mut State, config: &Config, deposited_amount: Uint128) -> StdResult<Uint128> {\n\n\n\n let balance = query_token_balance(\n\n &deps.querier,\n\n deps.api.addr_humanize(&config.spec_token)?,\n\n deps.api.addr_humanize(&state.contract_addr)?,\n\n )?\n\n .checked_sub(state.poll_deposit)?\n\n .checked_sub(state.vault_balances)?\n\n .checked_sub(deposited_amount)?;\n\n\n\n if balance >= state.prev_balance {\n\n let diff = balance.checked_sub(state.prev_balance)?;\n\n if diff.is_zero() {\n\n return Ok(balance);\n\n }\n\n\n\n let mut changes = distribute_reward(state, diff)?;\n\n state.total_balance += changes.pop().unwrap();\n\n for pool in state.pools.iter_mut() {\n", "file_path": "contracts/spectrum_gov/src/stake.rs", "rank": 38, "score": 606898.0271026926 }, { "content": "fn stake(deps: DepsMut, info: MessageInfo, amount: Uint128, days: Option<u64>) -> StdResult<Response> {\n\n let sender_addr = deps.api.addr_canonicalize(info.sender.as_str())?;\n\n let found = reward_store(deps.storage).may_load(&sender_addr)?.is_some();\n\n if !found {\n\n return Err(StdError::generic_err(\"unauthorized\"));\n\n }\n\n\n\n let config = read_config(deps.storage)?;\n\n Ok(Response::new()\n\n .add_messages(vec![CosmosMsg::Wasm(WasmMsg::Execute {\n\n contract_addr: deps.api.addr_humanize(&config.spectrum_token)?.to_string(),\n\n msg: to_binary(&Cw20ExecuteMsg::Send {\n\n contract: deps.api.addr_humanize(&config.spectrum_gov)?.to_string(),\n\n amount,\n\n msg: to_binary(&GovCw20HookMsg::stake_tokens { staker_addr: None, days })?,\n\n })?,\n\n funds: vec![],\n\n })]))\n\n}\n\n\n", "file_path": "contracts/spectrum_wallet/src/contract.rs", "rank": 39, "score": 597821.774476116 }, { "content": "fn unstake(deps: DepsMut, info: MessageInfo, amount: Option<Uint128>, days: Option<u64>) -> StdResult<Response> {\n\n let sender_addr = deps.api.addr_canonicalize(info.sender.as_str())?;\n\n let found = reward_store(deps.storage).may_load(&sender_addr)?.is_some();\n\n if !found {\n\n return Err(StdError::generic_err(\"unauthorized\"));\n\n }\n\n\n\n let config = read_config(deps.storage)?;\n\n Ok(Response::new()\n\n .add_messages(vec![CosmosMsg::Wasm(WasmMsg::Execute {\n\n contract_addr: deps.api.addr_humanize(&config.spectrum_gov)?.to_string(),\n\n msg: to_binary(&GovExecuteMsg::withdraw {\n\n amount,\n\n days,\n\n })?,\n\n funds: vec![],\n\n })]))\n\n}\n\n\n", "file_path": "contracts/spectrum_wallet/src/contract.rs", "rank": 40, "score": 588991.3206776177 }, { "content": "pub fn token_asset(contract_addr: Addr, amount: Uint128) -> Asset {\n\n Asset {\n\n info: AssetInfo::Token { contract_addr },\n\n amount,\n\n }\n\n}\n\n\n", "file_path": "packages/astroport/src/asset.rs", "rank": 41, "score": 552596.3726600603 }, { "content": "fn harvest(deps: DepsMut, info: MessageInfo, aust_amount: Option<Uint128>, days: Option<u64>) -> StdResult<Response> {\n\n let sender_addr = deps.api.addr_canonicalize(info.sender.as_str())?;\n\n let found = reward_store(deps.storage).may_load(&sender_addr)?.is_some();\n\n if !found {\n\n return Err(StdError::generic_err(\"unauthorized\"));\n\n }\n\n\n\n let config = read_config(deps.storage)?;\n\n Ok(Response::new()\n\n .add_messages(vec![CosmosMsg::Wasm(WasmMsg::Execute {\n\n contract_addr: deps.api.addr_humanize(&config.spectrum_gov)?.to_string(),\n\n msg: to_binary(&GovExecuteMsg::harvest {\n\n aust_amount,\n\n days,\n\n })?,\n\n funds: vec![],\n\n })]))\n\n}\n\n\n", "file_path": "contracts/spectrum_wallet/src/contract.rs", "rank": 42, "score": 547081.8104263155 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(deps: DepsMut, env: Env, msg: MigrateMsg) -> StdResult<Response> {\n\n let config = read_config(deps.storage)?;\n\n let farm_staked: PylonStakerResponse =\n\n deps.querier.query(&QueryRequest::Wasm(WasmQuery::Smart {\n\n contract_addr: deps.api.addr_humanize(&config.pylon_gov)?.to_string(),\n\n msg: to_binary(&PylonGovQueryMsg::Staker {\n\n address: env.contract.address.to_string(),\n\n })?,\n\n }))?;\n\n\n\n if farm_staked.balance.is_zero() {\n\n return Ok(Response::default())\n\n }\n\n\n\n let mut state = read_state(deps.storage)?;\n\n state.total_farm_amount += farm_staked.balance;\n\n state_store(deps.storage).save(&state)?;\n\n\n\n let mut messages: Vec<CosmosMsg> = vec![\n\n CosmosMsg::Wasm(WasmMsg::Execute {\n", "file_path": "contracts/farms/spectrum_pylon_farm/src/contract.rs", "rank": 43, "score": 539304.9347475078 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n let cfg = CONFIG.load(deps.storage)?;\n\n match msg {\n\n QueryMsg::Config {} => to_binary(&ConfigResponse {\n\n generator_contract_addr: cfg.generator_contract_addr.to_string(),\n\n pair_addr: cfg.pair_addr.to_string(),\n\n lp_token_addr: cfg.lp_token_addr.to_string(),\n\n reward_contract_addr: cfg.reward_contract_addr.to_string(),\n\n reward_token_addr: cfg.reward_token_addr.to_string(),\n\n }),\n\n QueryMsg::Deposit {} => {\n\n let res: StdResult<SpecRewardInfoResponse> = deps.querier.query_wasm_smart(\n\n cfg.reward_contract_addr,\n\n &SpecQueryMsg::reward_info {\n\n staker_addr: env.contract.address.to_string(),\n\n asset_token: Some(cfg.pair_addr.to_string()),\n\n },\n\n );\n\n let reward_infos = res?.reward_infos;\n\n let deposit_amount = if !reward_infos.is_empty() {\n", "file_path": "contracts/astro_dual_rewards/generator_proxy_to_spec/src/contract.rs", "rank": 44, "score": 536843.5571513574 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(_deps: DepsMut, _env: Env, _msg: MigrateMsg) -> StdResult<Response> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/spectrum_staker_single_asset/src/contract.rs", "rank": 45, "score": 535422.7981586193 }, { "content": "pub fn pool_info_store(storage: &mut dyn Storage) -> Bucket<PoolInfo> {\n\n bucket(storage, PREFIX_POOL_INFO)\n\n}\n\n\n", "file_path": "contracts/astro_dual_rewards/spectrum_spec_astroport_farm/src/state.rs", "rank": 46, "score": 534733.9665493994 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(deps: DepsMut, _env: Env, msg: MigrateMsg) -> StdResult<Response> {\n\n let mut config = read_config(deps.storage)?;\n\n config.aust_token = deps.api.addr_canonicalize(&msg.aust_token)?;\n\n config.anchor_market = deps.api.addr_canonicalize(&msg.anchor_market)?;\n\n config.terraswap_factory = deps.api.addr_canonicalize(&msg.terraswap_factory)?;\n\n config_store(deps.storage).save(&config)?;\n\n\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/spectrum_wallet/src/contract.rs", "rank": 47, "score": 534188.3971352519 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(deps: DepsMut, _env: Env, msg: MigrateMsg) -> StdResult<Response> {\n\n let vaults = read_vaults(deps.storage)?;\n\n let mut state = read_state(deps.storage)?;\n\n let mut config = read_config(deps.storage)?;\n\n config.aust_token = deps.api.addr_canonicalize(&msg.aust_token)?;\n\n reconcile_balance(&deps.as_ref(), &mut state, &config, Uint128::zero())?;\n\n\n\n for (addr, mut vault) in vaults {\n\n let key = addr.as_slice();\n\n let account = read_account(deps.storage, key)?;\n\n if let Some(mut account) = account {\n\n let amount = account.calc_balance(0u64, &state)?;\n\n let share = account.share;\n\n account.deduct_share(0u64, share, None)?;\n\n state.deduct_share(0u64, share, amount)?;\n\n vault.balance += amount;\n\n state.vault_balances += amount;\n\n vault_store(deps.storage).save(key, &vault)?;\n\n account_store(deps.storage).save(key, &account)?;\n\n }\n\n }\n\n state.vault_share_multiplier = Decimal::from_ratio(state.total_share, state.total_balance);\n\n\n\n state_store(deps.storage).save(&state)?;\n\n config_store(deps.storage).save(&config)?;\n\n\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/spectrum_gov/src/contract.rs", "rank": 48, "score": 534188.3971352519 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::config {} => to_binary(&query_config(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/spectrum_staker_single_asset/src/contract.rs", "rank": 49, "score": 531842.4480182619 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(_deps: DepsMut, _env: Env, _msg: MigrateMsg) -> StdResult<Response> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/astro_dual_rewards/generator_proxy_to_spec/src/contract.rs", "rank": 50, "score": 528530.8851116672 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(deps: DepsMut, _env: Env, msg: MigrateMsg) -> StdResult<Response> {\n\n let mut config = read_config(deps.storage)?;\n\n config.anchor_market = deps.api.addr_canonicalize(&msg.anchor_market)?;\n\n config.aust_token = deps.api.addr_canonicalize(&msg.aust_token)?;\n\n\n\n let pair_info = query_pair_info(\n\n &deps.querier,\n\n deps.api.addr_validate(&msg.terraswap_factory)?,\n\n &[\n\n AssetInfo::NativeToken {\n\n denom: config.base_denom.clone(),\n\n },\n\n AssetInfo::Token {\n\n contract_addr: deps.api.addr_humanize(&config.orion_token)?.to_string(),\n\n },\n\n ],\n\n )?;\n\n\n\n config.pair_contract = deps.api.addr_canonicalize(&pair_info.contract_addr)?;\n\n store_config(deps.storage, &config)?;\n\n\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/farms/spectrum_orion_farm/src/contract.rs", "rank": 51, "score": 528371.0399851949 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(deps: DepsMut, _env: Env, msg: MigrateMsg) -> StdResult<Response> {\n\n let mut config = read_config(deps.storage)?;\n\n config.anchor_market = deps.api.addr_canonicalize(&msg.anchor_market)?;\n\n config.aust_token = deps.api.addr_canonicalize(&msg.aust_token)?;\n\n\n\n let pair_info = query_pair_info(\n\n &deps.querier,\n\n deps.api.addr_validate(&msg.terraswap_factory)?,\n\n &[\n\n AssetInfo::NativeToken {\n\n denom: config.base_denom.clone(),\n\n },\n\n AssetInfo::Token {\n\n contract_addr: deps.api.addr_humanize(&config.anchor_token)?.to_string(),\n\n },\n\n ],\n\n )?;\n\n\n\n config.pair_contract = deps.api.addr_canonicalize(&pair_info.contract_addr)?;\n\n store_config(deps.storage, &config)?;\n\n\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/farms/spectrum_anchor_farm/src/contract.rs", "rank": 52, "score": 528371.0399851948 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(deps: DepsMut, _env: Env, msg: MigrateMsg) -> StdResult<Response> {\n\n let mut config = read_config(deps.storage)?;\n\n config.anchor_market = deps.api.addr_canonicalize(&msg.anchor_market)?;\n\n config.aust_token = deps.api.addr_canonicalize(&msg.aust_token)?;\n\n\n\n let pair_info = query_pair_info(\n\n &deps.querier,\n\n deps.api.addr_validate(&msg.terraswap_factory)?,\n\n &[\n\n AssetInfo::NativeToken {\n\n denom: config.base_denom.clone(),\n\n },\n\n AssetInfo::Token {\n\n contract_addr: deps.api.addr_humanize(&config.nexus_token)?.to_string(),\n\n },\n\n ],\n\n )?;\n\n\n\n config.pair_contract = deps.api.addr_canonicalize(&pair_info.contract_addr)?;\n\n store_config(deps.storage, &config)?;\n\n\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/farms/spectrum_nexus_farm/src/contract.rs", "rank": 53, "score": 528371.0399851949 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(deps: DepsMut, _env: Env, msg: MigrateMsg) -> StdResult<Response> {\n\n let mut config = read_config(deps.storage)?;\n\n config.anchor_market = deps.api.addr_canonicalize(&msg.anchor_market)?;\n\n config.aust_token = deps.api.addr_canonicalize(&msg.aust_token)?;\n\n store_config(deps.storage, &config)?;\n\n\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/farms/spectrum_mirror_farm/src/contract.rs", "rank": 54, "score": 528371.0399851948 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(deps: DepsMut, _env: Env, msg: MigrateMsg) -> StdResult<Response> {\n\n let mut config = read_config(deps.storage)?;\n\n config.anchor_market = deps.api.addr_canonicalize(&msg.anchor_market)?;\n\n config.aust_token = deps.api.addr_canonicalize(&msg.aust_token)?;\n\n\n\n let pair_info = query_pair_info(\n\n &deps.querier,\n\n deps.api.addr_validate(&msg.terraswap_factory)?,\n\n &[\n\n AssetInfo::NativeToken {\n\n denom: config.base_denom.clone(),\n\n },\n\n AssetInfo::Token {\n\n contract_addr: deps.api.addr_humanize(&config.terraworld_token)?.to_string(),\n\n },\n\n ],\n\n )?;\n\n\n\n config.pair_contract = deps.api.addr_canonicalize(&pair_info.contract_addr)?;\n\n store_config(deps.storage, &config)?;\n\n\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/farms/spectrum_terraworld_farm/src/contract.rs", "rank": 55, "score": 528371.0399851948 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(deps: DepsMut, _env: Env, msg: MigrateMsg) -> StdResult<Response> {\n\n let mut config = read_config(deps.storage)?;\n\n config.anchor_market = deps.api.addr_canonicalize(&msg.anchor_market)?;\n\n config.aust_token = deps.api.addr_canonicalize(&msg.aust_token)?;\n\n\n\n let pair_info = query_pair_info(\n\n &deps.querier,\n\n deps.api.addr_validate(&msg.terraswap_factory)?,\n\n &[\n\n AssetInfo::NativeToken {\n\n denom: config.base_denom.clone(),\n\n },\n\n AssetInfo::Token {\n\n contract_addr: deps.api.addr_humanize(&config.valkyrie_token)?.to_string(),\n\n },\n\n ],\n\n )?;\n\n\n\n config.pair_contract = deps.api.addr_canonicalize(&pair_info.contract_addr)?;\n\n store_config(deps.storage, &config)?;\n\n\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/farms/spectrum_valkyrie_farm/src/contract.rs", "rank": 56, "score": 528371.0399851948 }, { "content": "#[allow(clippy::needless_range_loop)]\n\nfn distribute_reward(state: &State, diff: Uint128) -> StdResult<Vec<Uint128>> {\n\n // if amount grow, distribute reward to each pool equally\n\n // however higher lock pool also earn from lower lock pool\n\n let mut pools: Vec<&StatePool> = state.pools.iter().filter(|it| it.active).rev().collect();\n\n let pool_0 = StatePool {\n\n days: 0u64,\n\n total_balance: state.total_balance,\n\n total_share: state.total_share,\n\n aust_index: state.aust_index,\n\n active: true,\n\n };\n\n pools.push(&pool_0);\n\n\n\n let len: u128 = (pools.len() as u64).into();\n\n let mut denom = 0u128;\n\n let mut changes = vec![Uint128::zero(); pools.len()];\n\n let mut total = Uint128::zero();\n\n for i in 0..pools.len() {\n\n let pool = pools.get(i).unwrap();\n\n denom += pool.total_balance.u128() * len;\n", "file_path": "contracts/spectrum_gov/src/stake.rs", "rank": 57, "score": 527439.8214561678 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(_deps: DepsMut, _env: Env, _msg: MigrateMsg) -> StdResult<Response> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/astro_dual_rewards/spectrum_spec_astroport_farm/src/contract.rs", "rank": 58, "score": 525292.570702293 }, { "content": "pub fn read_rewards(storage: &dyn Storage) -> StdResult<Vec<(CanonicalAddr, RewardInfo)>> {\n\n bucket_read(storage, PREFIX_REWARD)\n\n .range(None, None, Order::Descending)\n\n .map(|item| {\n\n let (k, v) = item?;\n\n Ok((CanonicalAddr::from(k), v))\n\n })\n\n .collect()\n\n}\n", "file_path": "contracts/spectrum_wallet/src/state.rs", "rank": 59, "score": 524345.9366058302 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::pools {} => to_binary(&query_pools(deps)?),\n\n QueryMsg::reward_info {\n\n staker_addr,\n\n asset_token,\n\n } => to_binary(&query_reward_info(deps, staker_addr, asset_token)?),\n\n QueryMsg::state {} => to_binary(&query_state(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/astro_dual_rewards/spectrum_spec_astroport_farm/src/contract.rs", "rank": 60, "score": 523136.7715693043 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(deps: DepsMut, _env: Env, msg: MigrateMsg) -> StdResult<Response> {\n\n let mut config = read_config(deps.storage)?;\n\n config.anchor_market = deps.api.addr_canonicalize(&msg.anchor_market)?;\n\n config.aust_token = deps.api.addr_canonicalize(&msg.aust_token)?;\n\n\n\n let pair_info = query_pair_info(\n\n &deps.querier,\n\n deps.api.addr_validate(&msg.terraswap_factory)?,\n\n &[\n\n AssetInfo::Token {\n\n contract_addr: deps.api.addr_humanize(&config.nasset_token)?.to_string(),\n\n },\n\n AssetInfo::Token {\n\n contract_addr: deps.api.addr_humanize(&config.nexus_token)?.to_string(),\n\n },\n\n ],\n\n )?;\n\n config.pair_contract = deps.api.addr_canonicalize(&pair_info.contract_addr)?;\n\n\n\n let pair_info = query_pair_info(\n", "file_path": "contracts/farms/spectrum_nexus_nasset_psi_farm/src/contract.rs", "rank": 61, "score": 522774.54254864773 }, { "content": "pub fn calc_mintable(state: &State, config: &Config, height: u64) -> Uint128 {\n\n let last_mint = if config.mint_start > state.last_mint {\n\n config.mint_start\n\n } else {\n\n state.last_mint\n\n };\n\n let height = if height < config.mint_end {\n\n height\n\n } else {\n\n config.mint_end\n\n };\n\n if last_mint < height {\n\n let diff: u128 = (height - last_mint).into();\n\n let val = config.mint_per_block.u128() * diff;\n\n Uint128::from(val)\n\n } else {\n\n Uint128::zero()\n\n }\n\n}\n\n\n", "file_path": "contracts/spectrum_gov/src/stake.rs", "rank": 62, "score": 520154.39834953746 }, { "content": "fn before_share_change(pool_info: &PoolInfo, reward_info: &mut RewardInfo) -> StdResult<()> {\n\n let share =\n\n reward_info.bond_amount * (pool_info.spec_share_index - reward_info.spec_share_index);\n\n reward_info.spec_share += share;\n\n reward_info.spec_share_index = pool_info.spec_share_index;\n\n Ok(())\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_spec_farm/src/bond.rs", "rank": 63, "score": 516270.8722437407 }, { "content": "fn before_share_change(pool_info: &PoolInfo, reward_info: &mut RewardInfo) -> StdResult<()> {\n\n let share =\n\n reward_info.bond_amount * (pool_info.spec_share_index - reward_info.spec_share_index);\n\n reward_info.spec_share += share;\n\n reward_info.spec_share_index = pool_info.spec_share_index;\n\n Ok(())\n\n}\n\n\n", "file_path": "contracts/astro_dual_rewards/spectrum_spec_astroport_farm/src/bond.rs", "rank": 64, "score": 514682.0756959474 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::simulate_zap_to_bond {\n\n provide_asset,\n\n pair_asset,\n\n pair_asset_b,\n\n } => to_binary(&simulate_zap_to_bond(deps, env, provide_asset, pair_asset, pair_asset_b)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/spectrum_staker/src/contract.rs", "rank": 65, "score": 498974.1290014852 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n cw20_query(deps, env, msg)\n\n}\n", "file_path": "contracts/spectrum_token/src/contract.rs", "rank": 66, "score": 498974.1290014852 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::balance { address } => to_binary(&query_balances(deps, address, env.block.height)?),\n\n QueryMsg::config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::poll { poll_id } => to_binary(&query_poll(deps, poll_id)?),\n\n QueryMsg::polls {\n\n filter,\n\n start_after,\n\n limit,\n\n order_by,\n\n } => to_binary(&query_polls(deps, filter, start_after, limit, order_by)?),\n\n QueryMsg::state { } => to_binary(&query_state(deps, env.block.height)?),\n\n QueryMsg::vaults {} => to_binary(&query_vaults(deps)?),\n\n QueryMsg::voters {\n\n poll_id,\n\n start_after,\n\n limit,\n\n order_by,\n\n } => to_binary(&query_voters(deps, poll_id, start_after, limit, order_by)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/spectrum_gov/src/contract.rs", "rank": 67, "score": 498974.1290014852 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::balance { address } => to_binary(&query_balance(deps, env, address)?),\n\n QueryMsg::shares {} => to_binary(&query_shares(deps)?),\n\n QueryMsg::state {} => to_binary(&query_state(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/spectrum_wallet/src/contract.rs", "rank": 68, "score": 498974.1290014852 }, { "content": "pub fn pool_info_store(storage: &mut dyn Storage) -> Bucket<PoolInfo> {\n\n bucket(storage, PREFIX_POOL_INFO)\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_valkyrie_farm/src/state.rs", "rank": 69, "score": 498024.517929193 }, { "content": "pub fn pool_info_store(storage: &mut dyn Storage) -> Bucket<PoolInfo> {\n\n bucket(storage, PREFIX_POOL_INFO)\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_mirror_farm/src/state.rs", "rank": 70, "score": 498024.517929193 }, { "content": "pub fn pool_info_store(storage: &mut dyn Storage) -> Bucket<PoolInfo> {\n\n bucket(storage, PREFIX_POOL_INFO)\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_orion_farm/src/state.rs", "rank": 71, "score": 498024.517929193 }, { "content": "pub fn pool_info_store(storage: &mut dyn Storage) -> Bucket<PoolInfo> {\n\n bucket(storage, PREFIX_POOL_INFO)\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_spec_farm/src/state.rs", "rank": 72, "score": 498024.517929193 }, { "content": "pub fn pool_info_store(storage: &mut dyn Storage) -> Bucket<PoolInfo> {\n\n bucket(storage, PREFIX_POOL_INFO)\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_terraworld_farm/src/state.rs", "rank": 73, "score": 498024.517929193 }, { "content": "pub fn pool_info_store(storage: &mut dyn Storage) -> Bucket<PoolInfo> {\n\n bucket(storage, PREFIX_POOL_INFO)\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_nexus_farm/src/state.rs", "rank": 74, "score": 498024.517929193 }, { "content": "pub fn pool_info_store(storage: &mut dyn Storage) -> Bucket<PoolInfo> {\n\n bucket(storage, PREFIX_POOL_INFO)\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_anchor_farm/src/state.rs", "rank": 75, "score": 498024.517929193 }, { "content": "pub fn pool_info_store(storage: &mut dyn Storage) -> Bucket<PoolInfo> {\n\n bucket(storage, PREFIX_POOL_INFO)\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_pylon_farm/src/state.rs", "rank": 76, "score": 498024.517929193 }, { "content": "pub fn pool_info_store(storage: &mut dyn Storage) -> Bucket<PoolInfo> {\n\n bucket(storage, PREFIX_POOL_INFO)\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_pylon_liquid_farm/src/state.rs", "rank": 77, "score": 494431.8851418436 }, { "content": "pub fn pool_info_store(storage: &mut dyn Storage) -> Bucket<PoolInfo> {\n\n bucket(storage, PREFIX_POOL_INFO)\n\n}\n\n\n", "file_path": "contracts/astro_farms/spectrum_astroport_farm/src/state.rs", "rank": 78, "score": 494431.8851418436 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::pools {} => to_binary(&query_pools(deps)?),\n\n QueryMsg::reward_info {\n\n staker_addr,\n\n } => to_binary(&query_reward_info(deps, env, staker_addr)?),\n\n QueryMsg::state {} => to_binary(&query_state(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_anchor_farm/src/contract.rs", "rank": 79, "score": 494188.5331351425 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::pools {} => to_binary(&query_pools(deps)?),\n\n QueryMsg::reward_info {\n\n staker_addr,\n\n } => to_binary(&query_reward_info(deps, env, staker_addr)?),\n\n QueryMsg::state {} => to_binary(&query_state(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_terraworld_farm/src/contract.rs", "rank": 80, "score": 494188.5331351425 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::pools {} => to_binary(&query_pools(deps)?),\n\n QueryMsg::reward_info {\n\n asset_token,\n\n staker_addr,\n\n } => to_binary(&query_reward_info(\n\n deps,\n\n env,\n\n staker_addr,\n\n asset_token,\n\n )?),\n\n QueryMsg::state {} => to_binary(&query_state(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_mirror_farm/src/contract.rs", "rank": 81, "score": 494188.5331351425 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::pools {} => to_binary(&query_pools(deps)?),\n\n QueryMsg::reward_info {\n\n staker_addr,\n\n } => to_binary(&query_reward_info(deps, env, staker_addr)?),\n\n QueryMsg::state {} => to_binary(&query_state(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_valkyrie_farm/src/contract.rs", "rank": 82, "score": 494188.5331351425 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::pools {} => to_binary(&query_pools(deps)?),\n\n QueryMsg::reward_info {\n\n staker_addr,\n\n } => to_binary(&query_reward_info(deps, env, staker_addr)?),\n\n QueryMsg::state {} => to_binary(&query_state(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_pylon_farm/src/contract.rs", "rank": 83, "score": 494188.5331351425 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::pools {} => to_binary(&query_pools(deps)?),\n\n QueryMsg::reward_info {\n\n staker_addr,\n\n } => to_binary(&query_reward_info(deps, env, staker_addr)?),\n\n QueryMsg::state {} => to_binary(&query_state(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_nexus_farm/src/contract.rs", "rank": 84, "score": 494188.5331351425 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::pools {} => to_binary(&query_pools(deps)?),\n\n QueryMsg::reward_info {\n\n staker_addr,\n\n } => to_binary(&query_reward_info(deps, env, staker_addr)?),\n\n QueryMsg::state {} => to_binary(&query_state(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_orion_farm/src/contract.rs", "rank": 85, "score": 494188.5331351425 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(_deps: DepsMut, _env: Env, _msg: MigrateMsg) -> StdResult<Response> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/spectrum_platform/src/contract.rs", "rank": 86, "score": 493210.32967628824 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(_deps: DepsMut, _env: Env, _msg: MigrateMsg) -> StdResult<Response> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/spectrum_staker/src/contract.rs", "rank": 87, "score": 493210.3296762883 }, { "content": "pub fn config_store(storage: &mut dyn Storage) -> Singleton<Config> {\n\n singleton(storage, KEY_CONFIG)\n\n}\n\n\n", "file_path": "contracts/spectrum_staker_single_asset/src/state.rs", "rank": 88, "score": 493101.2388093749 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::pools {} => to_binary(&query_pools(deps)?),\n\n QueryMsg::reward_info {\n\n staker_addr,\n\n } => to_binary(&query_reward_info(deps, env, staker_addr)?),\n\n QueryMsg::state {} => to_binary(&query_state(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_pylon_liquid_farm/src/contract.rs", "rank": 89, "score": 491862.2274583763 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::State {} => to_binary(&query_state(deps)?),\n\n QueryMsg::Staker { address } => to_binary(&query_staker_info_gov(deps, env, address)?)\n\n }\n\n}\n\n\n", "file_path": "contracts/gov_proxies/xastro_gov_proxy/src/contract.rs", "rank": 90, "score": 491862.2274583763 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::pools {} => to_binary(&query_pools(deps)?),\n\n QueryMsg::reward_info {\n\n staker_addr,\n\n } => to_binary(&query_reward_info(deps, env, staker_addr)?),\n\n QueryMsg::state {} => to_binary(&query_state(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/astro_farms/spectrum_astroport_farm/src/contract.rs", "rank": 91, "score": 491862.2274583762 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::State {} => to_binary(&query_state(deps)?),\n\n QueryMsg::Staker { address } => to_binary(&query_staker_info_gov(deps, env, address)?)\n\n }\n\n}\n\n\n", "file_path": "contracts/gov_proxies/token_gov_proxy/src/contract.rs", "rank": 92, "score": 491862.2274583763 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::State {} => to_binary(&query_state(deps)?),\n\n QueryMsg::Staker { address } => to_binary(&query_staker_info_gov(deps, env, address)?)\n\n }\n\n}\n\n\n", "file_path": "contracts/gov_proxies/nexus_gov_proxy/src/contract.rs", "rank": 93, "score": 491862.2274583763 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::State {} => to_binary(&query_state(deps)?),\n\n QueryMsg::Staker { address } => to_binary(&query_staker_info_gov(deps, env, address)?)\n\n }\n\n}\n\n\n", "file_path": "contracts/gov_proxies/anchor_gov_proxy/src/contract.rs", "rank": 94, "score": 491862.2274583763 }, { "content": "pub fn pool_info_store(storage: &mut dyn Storage) -> Bucket<PoolInfo> {\n\n bucket(storage, PREFIX_POOL_INFO)\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_nexus_nasset_psi_farm/src/state.rs", "rank": 95, "score": 490914.1586686926 }, { "content": "pub fn store_config(storage: &mut dyn Storage, config: &Config) -> StdResult<()> {\n\n singleton(storage, KEY_CONFIG).save(config)\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_anchor_farm/src/state.rs", "rank": 96, "score": 490194.75812021946 }, { "content": "pub fn store_config(storage: &mut dyn Storage, config: &Config) -> StdResult<()> {\n\n singleton(storage, KEY_CONFIG).save(config)\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_terraworld_farm/src/state.rs", "rank": 97, "score": 490194.75812021946 }, { "content": "pub fn store_config(storage: &mut dyn Storage, config: &Config) -> StdResult<()> {\n\n singleton(storage, KEY_CONFIG).save(config)\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_pylon_farm/src/state.rs", "rank": 98, "score": 490194.7581202195 }, { "content": "pub fn store_config(storage: &mut dyn Storage, config: &Config) -> StdResult<()> {\n\n singleton(storage, KEY_CONFIG).save(config)\n\n}\n\n\n", "file_path": "contracts/farms/spectrum_mirror_farm/src/state.rs", "rank": 99, "score": 490194.7581202195 } ]
Rust
query-engine/core/src/metrics/formatters.rs
prisma-korea/prisma-engines
99ac5a5c95ba2f5308d84fc30a7110f606182d92
use super::common::{Histogram, Metric, MetricValue, Snapshot}; use metrics_exporter_prometheus::formatting::{ sanitize_description, sanitize_label_key, sanitize_label_value, write_help_line, write_metric_line, write_type_line, }; use serde_json::Value; use std::collections::HashMap; fn create_label_string(labels: &HashMap<String, String>) -> Vec<String> { let mut label_string = labels .iter() .map(|(k, v)| format!("{}=\"{}\"", sanitize_label_key(k), sanitize_label_value(v))) .collect::<Vec<String>>(); label_string.sort(); label_string } pub(crate) fn metrics_to_json(snapshot: Snapshot) -> Value { let Snapshot { counters, histograms, gauges, } = snapshot; let mut normalised_histograms = Vec::new(); for histogram in histograms { if let MetricValue::Histogram(histogram_value) = histogram.value { let mut prev = 0; let buckets = histogram_value .buckets .iter() .cloned() .map(|(le, count)| { let new_count = count - prev; prev = count; (le, new_count) }) .collect(); let new_histogram = Histogram { buckets, sum: histogram_value.sum, count: histogram_value.count, }; normalised_histograms.push(Metric { key: histogram.key.clone(), labels: histogram.labels.clone(), description: histogram.description.clone(), value: MetricValue::Histogram(new_histogram), }); } } let snapshot = Snapshot { counters, histograms: normalised_histograms, gauges, }; serde_json::to_value(snapshot).unwrap() } pub(crate) fn metrics_to_prometheus(snapshot: Snapshot) -> String { let Snapshot { counters, histograms, gauges, } = snapshot; let mut output = String::new(); for counter in counters { let desc = sanitize_description(counter.description.as_str()); write_help_line(&mut output, counter.key.as_str(), desc.as_str()); write_type_line(&mut output, counter.key.as_str(), "counter"); let labels = create_label_string(&counter.labels); if let MetricValue::Counter(value) = counter.value { write_metric_line::<&str, u64>(&mut output, &counter.key.as_str(), None, &labels, None, value); } output.push('\n'); } for gauge in gauges { let desc = sanitize_description(gauge.description.as_str()); write_help_line(&mut output, gauge.key.as_str(), desc.as_str()); write_type_line(&mut output, gauge.key.as_str(), "gauge"); let labels = create_label_string(&gauge.labels); if let MetricValue::Gauge(value) = gauge.value { write_metric_line::<&str, f64>(&mut output, &gauge.key.as_str(), None, &labels, None, value); } output.push('\n'); } for histogram in histograms { let desc = sanitize_description(histogram.description.as_str()); write_help_line(&mut output, histogram.key.as_str(), desc.as_str()); write_type_line(&mut output, histogram.key.as_str(), "histogram"); let labels = create_label_string(&histogram.labels); if let MetricValue::Histogram(histogram_values) = histogram.value { for (le, count) in histogram_values.buckets { write_metric_line( &mut output, histogram.key.as_str(), Some("bucket"), &labels, Some(("le", le)), count, ); } write_metric_line( &mut output, histogram.key.as_str(), Some("bucket"), &labels, Some(("le", "+Inf")), histogram_values.count, ); write_metric_line::<&str, f64>( &mut output, histogram.key.as_str(), Some("sum"), &labels, None, histogram_values.sum, ); write_metric_line::<&str, u64>( &mut output, histogram.key.as_str(), Some("count"), &labels, None, histogram_values.count, ); } output.push('\n'); } output }
use super::common::{Histogram, Metric, MetricValue, Snapshot}; use metrics_exporter_prometheus::formatting::{ sanitize_description, sanitize_label_key, sanitize_label_value, write_help_line, write_metric_line, write_type_line, }; use serde_json::Value; use std::collections::HashMap; fn create_label_string(labels: &HashMap<String, String>) -> Vec<String> { let mut label_string = labels .iter() .map(|(k, v)| format!("{}=\"{}\"", sanitize_label_key(k), sanitize_label_value(v))) .collect::<Vec<String>>(); label_string.sort(); label_string } pub(crate) fn metrics_to_json(snapshot: Snapshot) -> Value { let Snapshot { counters, histograms, gauges, } = snapshot; let mut normalised_histograms = Vec::new(); for histogram in histograms { if let MetricValue::Histogram(histogram_value) = histogram.value { let mut prev = 0; let buckets = histogram_value .buckets .iter() .cloned() .map(|(le, count)| { let new_count = count - prev; prev = count; (le, new_count) }) .collect(); let new_histogram = His
pub(crate) fn metrics_to_prometheus(snapshot: Snapshot) -> String { let Snapshot { counters, histograms, gauges, } = snapshot; let mut output = String::new(); for counter in counters { let desc = sanitize_description(counter.description.as_str()); write_help_line(&mut output, counter.key.as_str(), desc.as_str()); write_type_line(&mut output, counter.key.as_str(), "counter"); let labels = create_label_string(&counter.labels); if let MetricValue::Counter(value) = counter.value { write_metric_line::<&str, u64>(&mut output, &counter.key.as_str(), None, &labels, None, value); } output.push('\n'); } for gauge in gauges { let desc = sanitize_description(gauge.description.as_str()); write_help_line(&mut output, gauge.key.as_str(), desc.as_str()); write_type_line(&mut output, gauge.key.as_str(), "gauge"); let labels = create_label_string(&gauge.labels); if let MetricValue::Gauge(value) = gauge.value { write_metric_line::<&str, f64>(&mut output, &gauge.key.as_str(), None, &labels, None, value); } output.push('\n'); } for histogram in histograms { let desc = sanitize_description(histogram.description.as_str()); write_help_line(&mut output, histogram.key.as_str(), desc.as_str()); write_type_line(&mut output, histogram.key.as_str(), "histogram"); let labels = create_label_string(&histogram.labels); if let MetricValue::Histogram(histogram_values) = histogram.value { for (le, count) in histogram_values.buckets { write_metric_line( &mut output, histogram.key.as_str(), Some("bucket"), &labels, Some(("le", le)), count, ); } write_metric_line( &mut output, histogram.key.as_str(), Some("bucket"), &labels, Some(("le", "+Inf")), histogram_values.count, ); write_metric_line::<&str, f64>( &mut output, histogram.key.as_str(), Some("sum"), &labels, None, histogram_values.sum, ); write_metric_line::<&str, u64>( &mut output, histogram.key.as_str(), Some("count"), &labels, None, histogram_values.count, ); } output.push('\n'); } output }
togram { buckets, sum: histogram_value.sum, count: histogram_value.count, }; normalised_histograms.push(Metric { key: histogram.key.clone(), labels: histogram.labels.clone(), description: histogram.description.clone(), value: MetricValue::Histogram(new_histogram), }); } } let snapshot = Snapshot { counters, histograms: normalised_histograms, gauges, }; serde_json::to_value(snapshot).unwrap() }
function_block-function_prefixed
[ { "content": "// Todo: Sanitizing might need to be adjusted to also change the fields in the RelationInfo\n\nfn sanitize_models(ctx: &mut Context) -> HashMap<String, (String, Option<String>)> {\n\n let mut enum_renames = HashMap::new();\n\n let sql_family = ctx.sql_family();\n\n\n\n for model in ctx.datamodel.models_mut() {\n\n rename_reserved(model);\n\n sanitize_name(model);\n\n\n\n let model_name = model.name().to_owned();\n\n let model_db_name = model.database_name().map(|s| s.to_owned());\n\n\n\n if let Some(pk) = &mut model.primary_key {\n\n sanitize_pk_field_names(&mut pk.fields);\n\n }\n\n\n\n for field in model.fields_mut() {\n\n sanitize_name(field);\n\n\n\n match field {\n\n Field::RelationField(rf) => {\n", "file_path": "introspection-engine/connectors/sql-introspection-connector/src/sanitize_datamodel_names.rs", "rank": 1, "score": 256330.70460039424 }, { "content": "pub fn format(graph: &QueryGraph) -> String {\n\n format!(\n\n \"---- Query Graph ----\\nResult Nodes: {}\\nMarked Nodes: {}\\nRoot Nodes: {}\\n\\n{}\\n----------------------\",\n\n fmt_raw_indices(&graph.result_nodes),\n\n fmt_node_tuples(&graph.marked_node_pairs),\n\n fmt_node_list(&graph.root_nodes()),\n\n stringify_nodes(graph, graph.root_nodes(), &mut Vec::new()).join(\"\\n\\n\")\n\n )\n\n}\n\n\n", "file_path": "query-engine/core/src/query_graph/formatters.rs", "rank": 2, "score": 252999.6273583552 }, { "content": "fn sanitize_enums(enum_renames: &HashMap<String, (String, Option<String>)>, ctx: &mut Context) {\n\n for enm in ctx.datamodel.enums_mut() {\n\n if let Some((sanitized_name, db_name)) = enum_renames.get(&enm.name) {\n\n if enm.database_name().is_none() {\n\n enm.set_database_name(db_name.clone());\n\n }\n\n\n\n enm.set_name(sanitized_name);\n\n } else {\n\n sanitize_name(enm);\n\n }\n\n\n\n for enum_value in enm.values_mut() {\n\n if enum_value.name.is_empty() {\n\n enum_value.name = EMPTY_ENUM_PLACEHOLDER.to_string();\n\n enum_value.database_name = Some(\"\".to_string());\n\n } else {\n\n sanitize_name(enum_value);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "introspection-engine/connectors/sql-introspection-connector/src/sanitize_datamodel_names.rs", "rank": 3, "score": 248181.1608324613 }, { "content": "/// The two parameters are:\n\n/// - The Prisma schema to reformat, as a string.\n\n/// - An LSP\n\n/// [DocumentFormattingParams](https://github.com/microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-16.md#textDocument_formatting) object, as JSON.\n\n///\n\n/// The function returns the formatted schema, as a string.\n\n///\n\n/// Of the DocumentFormattingParams, we only take into account tabSize, at the moment.\n\npub fn format(schema: &str, params: &str) -> String {\n\n let params: lsp_types::DocumentFormattingParams = match serde_json::from_str(params) {\n\n Ok(params) => params,\n\n Err(err) => {\n\n warn!(\"Error parsing DocumentFormattingParams params: {}\", err);\n\n return schema.to_owned();\n\n }\n\n };\n\n\n\n datamodel::reformat(schema, params.options.tab_size as usize).unwrap_or_else(|err| err.to_owned())\n\n}\n\n\n", "file_path": "prisma-fmt/src/lib.rs", "rank": 4, "score": 247505.90943154762 }, { "content": "fn format_opt<T: Display>(opt: Option<T>) -> String {\n\n match opt {\n\n Some(t) => format!(\"{}\", t),\n\n None => \"None\".to_owned(),\n\n }\n\n}\n\n\n\n/// Explanation constants for conversion errors.\n\nmod expl {\n\n #![allow(dead_code)]\n\n\n\n pub const MONGO_DOUBLE: &str = \"MongoDB Double (64bit)\";\n\n pub const MONGO_I32: &str = \"MongoDB Int (32 bit)\";\n\n pub const MONGO_I64: &str = \"MongoDB Int (64 bit)\";\n\n\n\n pub const PRISMA_FLOAT: &str = \"Prisma Float (BigDecimal)\";\n\n pub const PRISMA_BIGINT: &str = \"Prisma BigInt (64 bit)\";\n\n pub const PRISMA_INT: &str = \"Prisma Int (64 bit)\";\n\n}\n", "file_path": "query-engine/connectors/mongodb-query-connector/src/value.rs", "rank": 5, "score": 247345.98507594556 }, { "content": "/// Syntax for single-record and multi-record queries\n\nfn single_to_multi_filter(obj: IndexMap<String, QueryValue>) -> IndexMap<String, QueryValue> {\n\n let mut new_obj = IndexMap::new();\n\n\n\n for (key, value) in obj {\n\n let equality_obj = vec![(filters::EQUALS.to_owned(), value)].into_iter().collect();\n\n new_obj.insert(key, QueryValue::Object(equality_obj));\n\n }\n\n\n\n new_obj\n\n}\n", "file_path": "query-engine/core/src/query_document/selection.rs", "rank": 6, "score": 229767.15012389142 }, { "content": "/// A document must have a id column and the name is always `_id`. If we have no\n\n/// data in the collection, we must assume an id field exists.\n\nfn add_missing_ids_to_models(models: &mut BTreeMap<String, Model>) {\n\n for (_, model) in models.iter_mut() {\n\n if model.fields.iter().any(|f| f.database_name() == Some(\"_id\")) {\n\n continue;\n\n }\n\n\n\n let field = ScalarField {\n\n name: String::from(\"id\"),\n\n field_type: dml::FieldType::from(FieldType::ObjectId),\n\n arity: FieldArity::Required,\n\n database_name: Some(String::from(\"_id\")),\n\n default_value: Some(DefaultValue::new_expression(ValueGenerator::new_auto())),\n\n documentation: None,\n\n is_generated: false,\n\n is_updated_at: false,\n\n is_commented_out: false,\n\n is_ignored: false,\n\n };\n\n\n\n model.fields.insert(0, Field::ScalarField(field));\n", "file_path": "introspection-engine/connectors/mongodb-introspection-connector/src/sampler/statistics.rs", "rank": 7, "score": 227583.76248878296 }, { "content": "fn stringify_env_values(origin: serde_json::Value) -> crate::Result<HashMap<String, String>> {\n\n use serde_json::Value;\n\n\n\n let msg = match origin {\n\n Value::Object(map) => {\n\n let mut result: HashMap<String, String> = HashMap::new();\n\n\n\n for (key, val) in map.into_iter() {\n\n match val {\n\n Value::Null => continue,\n\n Value::String(val) => {\n\n result.insert(key, val);\n\n }\n\n val => {\n\n result.insert(key, val.to_string());\n\n }\n\n }\n\n }\n\n\n\n return Ok(result);\n", "file_path": "query-engine/query-engine-node-api/src/engine.rs", "rank": 8, "score": 226867.99914846173 }, { "content": "fn check_datamodel_for_mysql_5_6(datamodel: &ValidatedSchema, errors: &mut Vec<String>) {\n\n datamodel\n\n .db\n\n .walk_models()\n\n .flat_map(|model| model.scalar_fields())\n\n .for_each(|field| {\n\n if field\n\n .scalar_type()\n\n .map(|t| matches!(t, ScalarType::Json))\n\n .unwrap_or(false)\n\n {\n\n errors.push(format!(\n\n \"The `Json` data type used in {}.{} is not supported on MySQL 5.6.\",\n\n field.model().name(),\n\n field.name()\n\n ))\n\n }\n\n });\n\n}\n\n\n", "file_path": "migration-engine/connectors/sql-migration-connector/src/flavour/mysql.rs", "rank": 9, "score": 224406.67976634885 }, { "content": "pub fn encode_bytes(bytes: &[u8]) -> String {\n\n base64::encode(bytes)\n\n}\n\n\n", "file_path": "libs/prisma-value/src/lib.rs", "rank": 10, "score": 221926.33809909312 }, { "content": "fn fetch_dbgenerated(value: &str) -> Option<String> {\n\n static POSTGRES_DB_GENERATED_RE: Lazy<Regex> =\n\n Lazy::new(|| Regex::new(r#\"(^\\((.*)\\)):{2,3}(\\\\\")?(.*)(\\\\\")?$\"#).unwrap());\n\n\n\n let captures = POSTGRES_DB_GENERATED_RE.captures(value)?;\n\n let fun = captures.get(1).unwrap().as_str();\n\n let suffix = captures.get(4).unwrap().as_str();\n\n Some(format!(\"{}::{}\", fun, suffix))\n\n}\n\n\n", "file_path": "libs/sql-schema-describer/src/postgres.rs", "rank": 11, "score": 218666.82927447555 }, { "content": "fn write_rel_aggregation_row(row: &RelAggregationRow, map: &mut HashMap<String, Item>) {\n\n for result in row.iter() {\n\n match result {\n\n RelAggregationResult::Count(rf, count) => match map.get_mut(UNDERSCORE_COUNT) {\n\n Some(item) => match item {\n\n Item::Map(inner_map) => inner_map.insert(rf.name.clone(), Item::Value(count.clone())),\n\n _ => unreachable!(),\n\n },\n\n None => {\n\n let mut inner_map: Map = Map::new();\n\n inner_map.insert(rf.name.clone(), Item::Value(count.clone()));\n\n map.insert(UNDERSCORE_COUNT.to_owned(), Item::Map(inner_map))\n\n }\n\n },\n\n };\n\n }\n\n}\n\n\n", "file_path": "query-engine/core/src/response_ir/internal.rs", "rank": 12, "score": 214754.6906871242 }, { "content": "fn collect_selection_tree(fields: &[FieldPair]) -> Vec<(String, Option<Vec<String>>)> {\n\n fields\n\n .iter()\n\n .map(|field| {\n\n let field = &field.parsed_field;\n\n (\n\n field.name.clone(),\n\n field.nested_fields.as_ref().and_then(|nested_object| {\n\n let nested: Vec<_> = nested_object\n\n .fields\n\n .iter()\n\n .map(|f| f.parsed_field.name.clone())\n\n .collect();\n\n\n\n if nested.is_empty() {\n\n None\n\n } else {\n\n Some(nested)\n\n }\n\n }),\n\n )\n\n })\n\n .collect()\n\n}\n", "file_path": "query-engine/core/src/query_graph_builder/read/aggregations/mod.rs", "rank": 13, "score": 213704.21326702044 }, { "content": "fn empty_enum_values(datamodel: &mut Datamodel) -> Vec<EnumAndValue> {\n\n let mut enum_values_with_empty_names = vec![];\n\n\n\n for enm in datamodel.enums_mut() {\n\n let enum_name = enm.name.clone();\n\n\n\n for enum_value in enm.values_mut() {\n\n let name = match &enum_value.database_name {\n\n Some(name) => name,\n\n None => continue,\n\n };\n\n\n\n if !enum_value.name.is_empty() {\n\n continue;\n\n }\n\n\n\n enum_value.name = name.clone();\n\n enum_value.commented_out = true;\n\n enum_values_with_empty_names.push(EnumAndValue::new(&enum_name, &enum_value.name))\n\n }\n\n }\n\n\n\n enum_values_with_empty_names\n\n}\n\n\n", "file_path": "introspection-engine/connectors/sql-introspection-connector/src/commenting_out_guardrails.rs", "rank": 14, "score": 213602.00780690773 }, { "content": "fn validate_invalid_default_enum_value(enum_value: &str, ctx: &mut Context<'_>) {\n\n ctx.push_attribute_validation_error(&format!(\n\n \"The defined default value `{enum_value}` is not a valid value of the enum specified for the field.\"\n\n ));\n\n}\n\n\n", "file_path": "libs/datamodel/parser-database/src/attributes/default.rs", "rank": 15, "score": 210075.1979563222 }, { "content": "fn fmt_node_list(v: &[NodeRef]) -> String {\n\n let inner_string = v.iter().map(|x| format!(\"{}\", x)).collect::<Vec<String>>().join(\", \");\n\n\n\n format!(\"[{}]\", inner_string.as_str())\n\n}\n\n\n", "file_path": "query-engine/core/src/query_graph/formatters.rs", "rank": 16, "score": 209303.4140852029 }, { "content": "/// Stringify a date to the following format\n\n/// 1999-05-01T00:00:00.000Z\n\npub fn stringify_date(date: &DateTime<FixedOffset>) -> String {\n\n // Warning: Be careful if you plan on changing the code below\n\n // The findUnique batch optimization expects date inputs to have exactly the same format as date outputs\n\n // This works today because clients always send date inputs in the same format as the serialized format below\n\n // Updating this without transforming date inputs to the same format WILL break the findUnique batch optimization\n\n date.to_rfc3339_opts(SecondsFormat::Millis, true)\n\n}\n\n\n", "file_path": "libs/prisma-value/src/lib.rs", "rank": 17, "score": 208547.76565900855 }, { "content": "fn string_filters(ctx: &mut BuilderContext, mapped_type: InputType) -> impl Iterator<Item = InputField> {\n\n let mut string_filters = vec![\n\n input_field(filters::CONTAINS, mapped_type.clone(), None).optional(),\n\n input_field(filters::STARTS_WITH, mapped_type.clone(), None).optional(),\n\n input_field(filters::ENDS_WITH, mapped_type.clone(), None).optional(),\n\n ];\n\n\n\n if ctx.can_full_text_search() {\n\n string_filters.push(input_field(filters::SEARCH, mapped_type, None).optional());\n\n }\n\n\n\n string_filters.into_iter()\n\n}\n\n\n", "file_path": "query-engine/schema-builder/src/input_types/fields/field_filter_types.rs", "rank": 18, "score": 208328.76616207982 }, { "content": "/// Returns the name of the sequence in the schema that the defaultvalue matches if it is drawn\n\n/// from one of them\n\nfn is_sequence(value: &str, sequences: &[Sequence]) -> Option<String> {\n\n AUTOINCREMENT_REGEX.captures(value).and_then(|captures| {\n\n let sequence_name = captures.name(\"sequence\").or_else(|| captures.name(\"sequence2\"));\n\n\n\n sequence_name.and_then(|name| {\n\n sequences\n\n .iter()\n\n .find(|seq| seq.name == name.as_str())\n\n .map(|x| x.name.clone())\n\n })\n\n })\n\n}\n\n\n", "file_path": "libs/sql-schema-describer/src/postgres.rs", "rank": 19, "score": 208017.14962992608 }, { "content": "fn format_chunks(chunks: Vec<dissimilar::Chunk>) -> String {\n\n let mut buf = String::new();\n\n for chunk in chunks {\n\n let formatted = match chunk {\n\n dissimilar::Chunk::Equal(text) => text.into(),\n\n dissimilar::Chunk::Delete(text) => format!(\"\\x1b[41m{}\\x1b[0m\", text),\n\n dissimilar::Chunk::Insert(text) => format!(\"\\x1b[42m{}\\x1b[0m\", text),\n\n };\n\n buf.push_str(&formatted);\n\n }\n\n buf\n\n}\n\n\n", "file_path": "prisma-fmt/tests/text_document_completion/test_api.rs", "rank": 20, "score": 205738.8156645307 }, { "content": "pub fn format_expression(expr: &Expression, indent: usize) -> String {\n\n match expr {\n\n Expression::Sequence { seq } => seq\n\n .iter()\n\n .map(|expr| add_indent(indent, format_expression(expr, indent + 2)))\n\n .collect::<Vec<String>>()\n\n .join(\"\\n\"),\n\n\n\n Expression::Query { query } => match &**query {\n\n Query::Read(rq) => add_indent(indent, format!(\"{}\", rq)),\n\n Query::Write(wq) => add_indent(indent, format!(\"{}\", wq)),\n\n },\n\n\n\n Expression::Func { func: _ } => add_indent(indent, \"(Fn env)\"),\n\n Expression::Let { bindings, expressions } => {\n\n let binding_strs = bindings\n\n .iter()\n\n .map(|binding| {\n\n add_indent(\n\n indent + 2,\n", "file_path": "query-engine/core/src/interpreter/formatters.rs", "rank": 21, "score": 205099.6476558072 }, { "content": "fn fmt_list(buffer: &mut String, list: &[Bson], depth: usize) -> std::fmt::Result {\n\n if cfg!(debug_assertions) {\n\n writeln!(buffer, \"[\")?;\n\n } else {\n\n write!(buffer, \"[\")?;\n\n }\n\n\n\n for item in list {\n\n write_indented!(buffer, depth, \"\",);\n\n fmt_val(buffer, item, depth)?;\n\n if cfg!(debug_assertions) {\n\n writeln!(buffer, \",\")?;\n\n } else {\n\n write!(buffer, \",\")?;\n\n }\n\n }\n\n\n\n write_indented!(buffer, usize::max(depth - 1, 0), \"]\",);\n\n Ok(())\n\n}\n\n\n", "file_path": "query-engine/connectors/mongodb-query-connector/src/logger.rs", "rank": 22, "score": 205034.83224033943 }, { "content": "fn fmt_val(buffer: &mut String, val: &Bson, depth: usize) -> std::fmt::Result {\n\n match val {\n\n Bson::Array(ary) => fmt_list(buffer, ary, depth + 1),\n\n Bson::Document(doc) => fmt_doc(buffer, doc, depth + 1),\n\n val => write!(buffer, \"{}\", val),\n\n }\n\n}\n\n\n\n// NOTE: All these log functions could be reduced to a single macro\n\npub(crate) fn log_insert_one(coll: &str, doc: &Document) {\n\n let mut buffer = String::new();\n\n\n\n write!(&mut buffer, \"db.{}.insertOne(\", coll).unwrap();\n\n fmt_doc(&mut buffer, doc, 1).unwrap();\n\n write!(&mut buffer, \")\").unwrap();\n\n\n\n let params: Vec<i32> = Vec::new();\n\n debug!(target: \"mongodb_query_connector::query\", query = %buffer, item_type = \"query\", is_query = true, params = ?params);\n\n}\n\n\n", "file_path": "query-engine/connectors/mongodb-query-connector/src/logger.rs", "rank": 23, "score": 205034.83224033943 }, { "content": "fn fmt_doc(buffer: &mut String, doc: &Document, depth: usize) -> std::fmt::Result {\n\n if cfg!(debug_assertions) {\n\n writeln!(buffer, \"{{\")?;\n\n } else {\n\n write!(buffer, \"{{\")?;\n\n }\n\n\n\n for (key, value) in doc {\n\n write_indented!(buffer, depth, \"{}: \", key);\n\n fmt_val(buffer, value, depth)?;\n\n if cfg!(debug_assertions) {\n\n writeln!(buffer, \",\")?;\n\n } else {\n\n write!(buffer, \",\")?;\n\n }\n\n }\n\n\n\n write_indented!(buffer, usize::max(depth - 1, 0), \"}}\",);\n\n Ok(())\n\n}\n\n\n", "file_path": "query-engine/connectors/mongodb-query-connector/src/logger.rs", "rank": 24, "score": 205034.83224033943 }, { "content": "/// Enum type convenience wrapper function.\n\npub fn string_enum_type<T>(name: T, values: Vec<String>) -> EnumType\n\nwhere\n\n T: Into<String>,\n\n{\n\n EnumType::String(StringEnumType {\n\n name: name.into(),\n\n values,\n\n })\n\n}\n\n\n", "file_path": "query-engine/schema-builder/src/utils.rs", "rank": 25, "score": 204109.13789344713 }, { "content": "fn path_prefixed_selection(doc: &mut Document, parent_paths: Vec<String>, selections: Vec<SelectedField>) {\n\n for field in selections {\n\n match field {\n\n prisma_models::SelectedField::Scalar(sf) => {\n\n let mut parent_paths = parent_paths.clone();\n\n parent_paths.push(sf.db_name().to_owned());\n\n doc.insert(parent_paths.join(\".\"), Bson::Int32(1));\n\n }\n\n\n\n prisma_models::SelectedField::Composite(cs) => {\n\n let mut parent_paths = parent_paths.clone();\n\n parent_paths.push(cs.field.db_name().to_owned());\n\n path_prefixed_selection(doc, parent_paths, cs.selections);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "query-engine/connectors/mongodb-query-connector/src/projection.rs", "rank": 26, "score": 204104.8303715287 }, { "content": "fn render_column_changes(columns: Pair<ColumnWalker<'_>>, changes: &ColumnChanges, sink: &mut String) {\n\n let readable_changes = changes\n\n .iter()\n\n .map(|change| match change {\n\n ColumnChange::Arity => format!(\n\n \"changed from {:?} to {:?}\",\n\n columns.previous().arity(),\n\n columns.next().arity()\n\n ),\n\n ColumnChange::Default => format!(\n\n \"default changed from `{:?}` to `{:?}`\",\n\n columns.previous().default().map(|d| d.kind()),\n\n columns.next().default().map(|d| d.kind())\n\n ),\n\n ColumnChange::TypeChanged => \"type changed\".to_owned(),\n\n ColumnChange::Autoincrement => {\n\n if columns.previous().is_autoincrement() {\n\n \"column is no longer autoincrementing\".to_owned()\n\n } else {\n\n \"column became autoincrementing\".to_owned()\n", "file_path": "migration-engine/connectors/sql-migration-connector/src/sql_migration.rs", "rank": 27, "score": 204104.8303715287 }, { "content": "fn fmt_opts(buffer: &mut String, opts: &FindOptions, depth: usize) -> std::fmt::Result {\n\n if cfg!(debug_assertions) {\n\n writeln!(buffer, \"{{\")?;\n\n } else {\n\n write!(buffer, \"{{\")?;\n\n }\n\n\n\n if let Some(skip) = opts.skip {\n\n write_indented!(buffer, depth, \"skip: {},\\n\", skip);\n\n }\n\n\n\n if let Some(limit) = opts.limit {\n\n write_indented!(buffer, depth, \"limit: {},\\n\", limit);\n\n }\n\n\n\n if let Some(ref sort) = opts.sort {\n\n write_indented!(buffer, depth, \"sort: \",);\n\n fmt_doc(buffer, sort, depth + 1)?;\n\n\n\n if cfg!(debug_assertions) {\n", "file_path": "query-engine/connectors/mongodb-query-connector/src/logger.rs", "rank": 28, "score": 202724.98728247074 }, { "content": "fn get_name_argument(ctx: &mut Context<'_>) -> Option<StringId> {\n\n match ctx.visit_optional_arg(\"name\").map(|name| name.as_str()) {\n\n Some(Ok(\"\")) => {\n\n ctx.push_attribute_validation_error(\"The `name` argument cannot be an empty string.\");\n\n }\n\n Some(Err(err)) => ctx.push_error(err),\n\n Some(Ok(name)) => return Some(ctx.interner.intern(name)),\n\n None => (),\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "libs/datamodel/parser-database/src/attributes.rs", "rank": 29, "score": 202143.95514770588 }, { "content": "pub fn lint(schema: String) -> String {\n\n lint::run(&schema)\n\n}\n\n\n", "file_path": "prisma-fmt/src/lib.rs", "rank": 30, "score": 201765.77380076947 }, { "content": "pub fn test_tracing_subscriber(log_config: String, metrics: MetricRegistry) -> Sub {\n\n let filter = EnvFilter::new(log_config);\n\n\n\n let fmt_layer = tracing_subscriber::fmt::layer()\n\n .with_writer(PrintWriter)\n\n .with_filter(filter);\n\n\n\n tracing_subscriber::registry()\n\n .with(fmt_layer)\n\n .with(metrics)\n\n .with(ErrorLayer::default())\n\n}\n\n\n\n/// This is a temporary implementation detail for `tracing` logs in tests.\n\n/// Instead of going through `std::io::stderr`, it goes through the specific\n\n/// local stderr handle used by `eprintln` and `dbg`, allowing logs to appear in\n\n/// specific test outputs for readability.\n\n///\n\n/// It is used from test_macros.\n\npub struct PrintWriter;\n", "file_path": "query-engine/connector-test-kit-rs/query-tests-setup/src/logging.rs", "rank": 31, "score": 200956.03498162673 }, { "content": "fn sanitize_strings(strings: &[String]) -> Vec<String> {\n\n strings.iter().map(|f| sanitize_string(f)).collect()\n\n}\n\n\n", "file_path": "introspection-engine/connectors/sql-introspection-connector/src/sanitize_datamodel_names.rs", "rank": 32, "score": 200876.1343460949 }, { "content": "pub fn collect_selection_order(from: &[FieldPair]) -> Vec<String> {\n\n from.iter()\n\n .map(|pair| {\n\n pair.parsed_field\n\n .alias\n\n .clone()\n\n .unwrap_or_else(|| pair.parsed_field.name.clone())\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "query-engine/core/src/query_graph_builder/read/utils.rs", "rank": 33, "score": 200290.29224866504 }, { "content": "fn format_chunks(chunks: Vec<dissimilar::Chunk>) -> String {\n\n let mut buf = String::new();\n\n for chunk in chunks {\n\n let formatted = match chunk {\n\n dissimilar::Chunk::Equal(text) => text.into(),\n\n dissimilar::Chunk::Delete(text) => format!(\"\\x1b[41m{}\\x1b[0m\", text),\n\n dissimilar::Chunk::Insert(text) => format!(\"\\x1b[42m{}\\x1b[0m\", text),\n\n };\n\n buf.push_str(&formatted);\n\n }\n\n buf\n\n}\n", "file_path": "migration-engine/connectors/mongodb-migration-connector/tests/migrations/test_api.rs", "rank": 34, "score": 200287.34155381302 }, { "content": "pub fn referential_actions(schema: String) -> String {\n\n actions::run(&schema)\n\n}\n\n\n", "file_path": "prisma-fmt/src/lib.rs", "rank": 35, "score": 199641.1646148549 }, { "content": "pub fn native_types(schema: String) -> String {\n\n native::run(&schema)\n\n}\n\n\n", "file_path": "prisma-fmt/src/lib.rs", "rank": 36, "score": 199641.1646148549 }, { "content": "fn fmt_query(buffer: &mut String, coll_name: &str, query: &MongoReadQuery) -> std::fmt::Result {\n\n match query {\n\n MongoReadQuery::Find(find) => {\n\n write!(buffer, \"db.{}.find(\", coll_name)?;\n\n\n\n if let Some(ref filter) = find.filter {\n\n fmt_doc(buffer, filter, 1)?;\n\n write!(buffer, \", \")?;\n\n }\n\n\n\n fmt_opts(buffer, &find.options, 1)?;\n\n write!(buffer, \")\")\n\n }\n\n MongoReadQuery::Pipeline(pipeline) => {\n\n write!(buffer, \"db.{}.aggregate(\", coll_name)?;\n\n\n\n let stages: Vec<_> = pipeline\n\n .stages\n\n .iter()\n\n .map(|stage| Bson::Document(stage.clone()))\n\n .collect();\n\n\n\n fmt_list(buffer, &stages, 1)?;\n\n write!(buffer, \")\")\n\n }\n\n }\n\n}\n\n\n", "file_path": "query-engine/connectors/mongodb-query-connector/src/logger.rs", "rank": 37, "score": 198338.71306745388 }, { "content": "pub fn replace_relation_info_field_names(target: &mut Vec<String>, old_name: &str, new_name: &str) {\n\n target\n\n .iter_mut()\n\n .map(|v| {\n\n if v == old_name {\n\n *v = new_name.to_string()\n\n }\n\n })\n\n .for_each(drop);\n\n}\n\n\n", "file_path": "introspection-engine/connectors/sql-introspection-connector/src/introspection_helpers.rs", "rank": 38, "score": 197848.85572389382 }, { "content": "// @map on enum values\n\nfn merge_changed_enum_values(old_data_model: &Datamodel, new_data_model: &mut Datamodel, warnings: &mut Vec<Warning>) {\n\n let mut changed_enum_values = vec![];\n\n\n\n for enm in new_data_model.enums() {\n\n let old_enum = match old_data_model.find_enum(&enm.name) {\n\n Some(old_enum) => old_enum,\n\n None => continue,\n\n };\n\n\n\n for value in enm.values() {\n\n let old_value =\n\n match old_enum.find_value_db_name(value.database_name.as_ref().unwrap_or(&value.name.to_owned())) {\n\n Some(old_value) => old_value,\n\n None => continue,\n\n };\n\n\n\n if enm.find_value(&old_value.name).is_none() {\n\n let ev = EnumAndValue::new(&enm.name, &value.name);\n\n changed_enum_values.push((ev, old_value.name.clone()))\n\n }\n", "file_path": "introspection-engine/connectors/sql-introspection-connector/src/re_introspection.rs", "rank": 39, "score": 197801.22705765345 }, { "content": "/// Produces pair like `(\"count_fieldName\", { \"$sum\": \"$fieldName\" })`.\n\n/// Important: Only valid for field-level count aggregations.\n\nfn count_field_pair(field: &ScalarFieldRef) -> (String, Bson) {\n\n (\n\n format!(\"count_{}\", field.db_name()),\n\n doc! { \"$push\": { \"$cond\": { \"if\": format!(\"${}\", field.db_name()), \"then\": 1, \"else\": 0 }}}.into(),\n\n )\n\n}\n\n\n", "file_path": "query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs", "rank": 40, "score": 196829.61427043105 }, { "content": "fn primary_key_mapped_name(ctx: &mut Context<'_>) -> Option<StringId> {\n\n let mapped_name = match ctx.visit_optional_arg(\"map\").map(|name| name.as_str()) {\n\n Some(Ok(\"\")) => {\n\n ctx.push_attribute_validation_error(\"The `map` argument cannot be an empty string.\");\n\n None\n\n }\n\n Some(Ok(name)) => Some(ctx.interner.intern(name)),\n\n Some(Err(err)) => {\n\n ctx.push_error(err);\n\n None\n\n }\n\n None => None,\n\n };\n\n\n\n mapped_name\n\n}\n", "file_path": "libs/datamodel/parser-database/src/attributes/id.rs", "rank": 41, "score": 196694.76094922321 }, { "content": "fn default_attribute_mapped_name(ctx: &mut Context<'_>) -> Option<StringId> {\n\n match ctx.visit_optional_arg(\"map\").map(|name| name.as_str()) {\n\n Some(Ok(\"\")) => {\n\n ctx.push_attribute_validation_error(\"The `map` argument cannot be an empty string.\");\n\n None\n\n }\n\n Some(Ok(name)) => Some(ctx.interner.intern(name)),\n\n Some(Err(err)) => {\n\n ctx.push_error(err);\n\n None\n\n }\n\n None => None,\n\n }\n\n}\n\n\n", "file_path": "libs/datamodel/parser-database/src/attributes/default.rs", "rank": 42, "score": 196694.76094922321 }, { "content": "/// This is the same command as get_config()\n\n///\n\n/// Params is a JSON string with the following shape:\n\n///\n\n/// ```ignore\n\n/// interface GetConfigParams {\n\n/// prismaSchema: string\n\n/// ignoreEnvVarErrors?: bool\n\n/// env?: { [key: string]: string }\n\n/// datasourceOverrides?: { [key: string]: string }\n\n/// }\n\n/// ```\n\n/// Params example:\n\n///\n\n/// ```ignore\n\n/// {\n\n/// \"prismaSchema\": <the prisma schema>,\n\n/// \"env\": {\n\n/// \"DBURL\": \"postgresql://example.com/mydb\"\n\n/// }\n\n/// }\n\n/// ```\n\n///\n\n/// The response is a JSON string with the following shape:\n\n///\n\n/// ```ignore\n\n/// type GetConfigSuccessResponse = any // same as QE getConfig\n\n///\n\n/// interface GetConfigErrorResponse {\n\n/// error: {\n\n/// error_code?: string\n\n/// message: string\n\n/// }\n\n/// }\n\n///\n\n/// type GetConfigResponse = GetConfigErrorResponse | GetConfigSuccessResponse\n\n///\n\n/// ```\n\npub fn get_config(get_config_params: String) -> String {\n\n get_config::get_config(&get_config_params)\n\n}\n", "file_path": "prisma-fmt/src/lib.rs", "rank": 43, "score": 195621.0497523977 }, { "content": "#[test]\n\nfn string() {\n\n let res = introspect(|db| async move {\n\n db.create_collection(\"A\", None).await?;\n\n let collection = db.collection(\"A\");\n\n\n\n let docs = vec![\n\n doc! {\"first\": \"Musti\", \"second\": \"Naukio\", \"third\": \"MeowMeow\"},\n\n doc! {\"first\": \"MeowMeow\", \"second\": null, \"third\": \"Lol\"},\n\n doc! {\"first\": \"Lol\", \"second\": \"Bar\"},\n\n ];\n\n\n\n collection.insert_many(docs, None).await.unwrap();\n\n\n\n Ok(())\n\n });\n\n\n\n let expected = expect![[r#\"\n\n model A {\n\n id String @id @default(auto()) @map(\"_id\") @db.ObjectId\n\n first String\n\n second String?\n\n third String?\n\n }\n\n \"#]];\n\n\n\n expected.assert_eq(res.datamodel());\n\n}\n\n\n", "file_path": "introspection-engine/connectors/mongodb-introspection-connector/tests/types/mod.rs", "rank": 44, "score": 194405.02655615145 }, { "content": "/// Transforms a document to a `Record`, fields ordered as defined in `fields`.\n\nfn document_to_record(mut doc: Document, fields: &[String], meta_mapping: &OutputMetaMapping) -> crate::Result<Record> {\n\n let mut values: Vec<PrismaValue> = Vec::with_capacity(fields.len());\n\n\n\n for field in fields {\n\n let bson = doc.remove(field).unwrap_or(Bson::Null);\n\n let mapping = meta_mapping.get(field).expect(\"Incorrect meta type mapping.\");\n\n let val = value_from_bson(bson, mapping).decorate_with_field_name(field)?;\n\n\n\n values.push(val);\n\n }\n\n\n\n Ok(Record::new(values))\n\n}\n\n\n", "file_path": "query-engine/connectors/mongodb-query-connector/src/root_queries/mod.rs", "rank": 45, "score": 194238.30576128326 }, { "content": "fn stringify_nodes(graph: &QueryGraph, nodes: Vec<NodeRef>, seen_nodes: &mut Vec<NodeRef>) -> Vec<String> {\n\n let mut rendered_nodes = vec![];\n\n\n\n for node in nodes {\n\n if seen_nodes.contains(&node) {\n\n continue;\n\n }\n\n\n\n seen_nodes.push(node);\n\n let mut node_child_info = vec![];\n\n\n\n let children: Vec<NodeRef> = graph\n\n .outgoing_edges(&node)\n\n .iter()\n\n .map(|child_edge| {\n\n let child_node = graph.edge_target(child_edge);\n\n node_child_info.push(format!(\n\n \"Child (edge {}): Node {} - {}\",\n\n child_edge.id(),\n\n child_node.id(),\n", "file_path": "query-engine/core/src/query_graph/formatters.rs", "rank": 46, "score": 192971.01341808937 }, { "content": "fn convert_composite_object(cf: &CompositeFieldRef, pairs: Vec<(String, PrismaValue)>) -> crate::Result<Bson> {\n\n let mut doc = Document::new();\n\n\n\n for (field, value) in pairs {\n\n let field = cf\n\n .typ\n\n .find_field(&field) // Todo: This is assuming a lot by only checking the prisma names, not DB names.\n\n .expect(\"Writing unavailable composite field.\");\n\n\n\n let converted = (field, value).into_bson()?;\n\n\n\n doc.insert(field.db_name(), converted);\n\n }\n\n\n\n Ok(Bson::Document(doc))\n\n}\n\n\n\nimpl IntoBson for (&ScalarFieldRef, PrismaValue) {\n\n fn into_bson(self) -> crate::Result<Bson> {\n\n let (sf, value) = self;\n", "file_path": "query-engine/connectors/mongodb-query-connector/src/value.rs", "rank": 47, "score": 190281.21060636418 }, { "content": "/// `INSERT` a new record to the database. Resulting an `INSERT` ast and an\n\n/// optional `RecordProjection` if available from the arguments or model.\n\npub fn create_record(model: &ModelRef, mut args: WriteArgs, trace_id: Option<String>) -> Insert<'static> {\n\n let fields: Vec<_> = model\n\n .fields()\n\n .scalar()\n\n .into_iter()\n\n .filter(|field| args.has_arg_for(&field.db_name()))\n\n .collect();\n\n\n\n let insert = fields\n\n .into_iter()\n\n .fold(Insert::single_into(model.as_table()), |insert, field| {\n\n let db_name = field.db_name();\n\n let value = args.take_field_value(db_name).unwrap();\n\n let value: PrismaValue = value\n\n .try_into()\n\n .expect(\"Create calls can only use PrismaValue write expressions (right now).\");\n\n\n\n insert.value(db_name.to_owned(), field.value(value))\n\n });\n\n\n\n Insert::from(insert)\n\n .returning(ModelProjection::from(model.primary_identifier()).as_columns())\n\n .append_trace(&Span::current())\n\n .add_trace_id(trace_id)\n\n}\n\n\n\n/// `INSERT` new records into the database based on the given write arguments,\n\n/// where each `WriteArg` in the Vec is one row.\n\n/// Requires `affected_fields` to be non-empty to produce valid SQL.\n", "file_path": "query-engine/connectors/sql-query-connector/src/query_builder/write.rs", "rank": 48, "score": 189003.17081079158 }, { "content": "#[napi]\n\npub fn dmmf(datamodel_string: String) -> napi::Result<String> {\n\n let datamodel = datamodel::parse_datamodel(&datamodel_string)\n\n .map_err(|errors| ApiError::conversion(errors, &datamodel_string))?;\n\n\n\n let config = datamodel::parse_configuration(&datamodel_string)\n\n .map_err(|errors| ApiError::conversion(errors, &datamodel_string))?;\n\n let datasource = config.subject.datasources.first();\n\n\n\n let capabilities = datasource\n\n .map(|ds| ds.capabilities())\n\n .unwrap_or_else(ConnectorCapabilities::empty);\n\n\n\n let referential_integrity = datasource.map(|ds| ds.referential_integrity()).unwrap_or_default();\n\n\n\n let internal_data_model = InternalDataModelBuilder::from(&datamodel.subject).build(\"\".into());\n\n\n\n let query_schema: QuerySchemaRef = Arc::new(schema_builder::build(\n\n internal_data_model,\n\n true,\n\n capabilities,\n\n config.subject.preview_features().iter().collect(),\n\n referential_integrity,\n\n ));\n\n\n\n let dmmf = dmmf::render_dmmf(&datamodel.subject, query_schema);\n\n\n\n Ok(serde_json::to_string(&dmmf)?)\n\n}\n\n\n", "file_path": "query-engine/query-engine-node-api/src/functions.rs", "rank": 49, "score": 188790.0360550444 }, { "content": "fn indent_string(indent: usize) -> String {\n\n \" \".repeat(indent)\n\n}\n", "file_path": "query-engine/core/src/interpreter/formatters.rs", "rank": 50, "score": 188539.10534795403 }, { "content": "fn validate_invalid_default_enum_expr(bad_value: &ast::Expression, ctx: &mut Context<'_>) {\n\n ctx.push_attribute_validation_error(&format!(\"Expected an enum value, but found `{bad_value}`.\"))\n\n}\n\n\n", "file_path": "libs/datamodel/parser-database/src/attributes/default.rs", "rank": 51, "score": 188506.43926357848 }, { "content": "/// Parses a Prisma V2 datamodel document into an internal AST representation.\n\npub fn parse_schema(datamodel_string: &str, diagnostics: &mut Diagnostics) -> SchemaAst {\n\n let datamodel_result = PrismaDatamodelParser::parse(Rule::schema, datamodel_string);\n\n\n\n match datamodel_result {\n\n Ok(mut datamodel_wrapped) => {\n\n let datamodel = datamodel_wrapped.next().unwrap();\n\n let mut top_level_definitions: Vec<Top> = vec![];\n\n for current in datamodel.relevant_children() {\n\n match current.as_rule() {\n\n Rule::model_declaration => {\n\n let keyword = current.clone().into_inner().find(|pair| matches!(pair.as_rule(), Rule::TYPE_KEYWORD | Rule::MODEL_KEYWORD) ).expect(\"Expected model or type keyword\");\n\n\n\n match keyword.as_rule() {\n\n Rule::TYPE_KEYWORD => {\n\n top_level_definitions.push(Top::CompositeType(parse_composite_type(&current, diagnostics)))\n\n }\n\n Rule::MODEL_KEYWORD => {\n\n top_level_definitions.push(Top::Model(parse_model(&current, diagnostics)))\n\n }\n\n _ => unreachable!(),\n", "file_path": "libs/datamodel/schema-ast/src/parser/parse_schema.rs", "rank": 52, "score": 188502.4667215444 }, { "content": "pub fn run() -> String {\n\n serde_json::to_string(&GENERATOR.active_features()).unwrap()\n\n}\n", "file_path": "prisma-fmt/src/preview.rs", "rank": 53, "score": 188435.91729942686 }, { "content": "fn plug<F: Fn(&str) -> String>(f: F) {\n\n let mut datamodel_string = String::new();\n\n\n\n io::stdin()\n\n .read_to_string(&mut datamodel_string)\n\n .expect(\"Unable to read from stdin.\");\n\n\n\n print!(\"{}\", f(&datamodel_string))\n\n}\n", "file_path": "prisma-fmt/src/main.rs", "rank": 54, "score": 188212.751286467 }, { "content": "fn insert_name(top_id: TopId, top: &ast::Top, namespace: &mut HashMap<StringId, TopId>, ctx: &mut Context<'_>) {\n\n let name = ctx.interner.intern(top.name());\n\n if let Some(existing) = namespace.insert(name, top_id) {\n\n ctx.push_error(duplicate_top_error(&ctx.ast[existing], top));\n\n }\n\n}\n\n\n", "file_path": "libs/datamodel/parser-database/src/names.rs", "rank": 55, "score": 187281.42509569132 }, { "content": "pub fn preview_features() -> String {\n\n preview::run()\n\n}\n\n\n", "file_path": "prisma-fmt/src/lib.rs", "rank": 56, "score": 185858.65189902688 }, { "content": "pub fn string_combination() -> String {\n\n let schema = indoc! {\n\n r#\"model TestModel {\n\n #id(id, Int, @id)\n\n fieldA String\n\n fieldB String\n\n fieldC String\n\n fieldD String\n\n }\"#\n\n };\n\n\n\n schema.to_owned()\n\n}\n\n\n", "file_path": "query-engine/connector-test-kit-rs/query-engine-tests/src/schemas/basic.rs", "rank": 57, "score": 184546.30325466226 }, { "content": "fn validate_dbgenerated_args(args: &[ast::Argument], mut accept: impl FnMut(), ctx: &mut Context<'_>) {\n\n let mut bail = || {\n\n // let's not mention what we don't want to see.\n\n ctx.push_attribute_validation_error(\"`dbgenerated()` takes a single String argument\")\n\n };\n\n\n\n if args.len() > 1 {\n\n bail()\n\n }\n\n\n\n match args.get(0).map(|arg| &arg.value) {\n\n Some(ast::Expression::StringValue(val, _)) if val.is_empty() => {\n\n ctx.push_attribute_validation_error(\n\n \"dbgenerated() takes either no argument, or a single nonempty string argument.\",\n\n );\n\n }\n\n None | Some(ast::Expression::StringValue(_, _)) => accept(),\n\n _ => bail(),\n\n }\n\n}\n\n\n\nconst FN_AUTOINCREMENT: &str = \"autoincrement\";\n\nconst FN_CUID: &str = \"cuid\";\n\nconst FN_DBGENERATED: &str = \"dbgenerated\";\n\nconst FN_NOW: &str = \"now\";\n\nconst FN_UUID: &str = \"uuid\";\n\nconst FN_AUTO: &str = \"auto\";\n\n\n\nconst KNOWN_FUNCTIONS: &[&str] = &[FN_AUTOINCREMENT, FN_CUID, FN_DBGENERATED, FN_NOW, FN_UUID, FN_AUTO];\n", "file_path": "libs/datamodel/parser-database/src/attributes/default.rs", "rank": 58, "score": 183841.77795438463 }, { "content": "fn validate_auto_args(args: &[ast::Argument], mut accept: impl FnMut(), ctx: &mut Context<'_>) {\n\n if !args.is_empty() {\n\n ctx.push_attribute_validation_error(\"`auto()` takes no arguments\");\n\n } else {\n\n accept()\n\n }\n\n}\n\n\n", "file_path": "libs/datamodel/parser-database/src/attributes/default.rs", "rank": 59, "score": 183841.77795438463 }, { "content": "pub fn string_combination_unique() -> String {\n\n let schema = indoc! {\n\n r#\"model TestModel {\n\n #id(id, Int, @id)\n\n fieldA String\n\n fieldB String\n\n fieldC String\n\n fieldD String\n\n\n\n @@unique([fieldA, fieldB, fieldC, fieldD])\n\n }\"#\n\n };\n\n\n\n schema.to_owned()\n\n}\n\n\n", "file_path": "query-engine/connector-test-kit-rs/query-engine-tests/src/schemas/basic.rs", "rank": 60, "score": 182871.67323948577 }, { "content": "fn sanitize_string(s: &str) -> String {\n\n let needs_sanitation = RE_START.is_match(s) || RE.is_match(s);\n\n\n\n if needs_sanitation {\n\n let start_cleaned: String = RE_START.replace_all(s, \"\").parse().unwrap();\n\n let sanitized: String = RE.replace_all(start_cleaned.as_str(), \"_\").parse().unwrap();\n\n\n\n sanitized\n\n } else {\n\n s.to_owned()\n\n }\n\n}\n\n\n", "file_path": "introspection-engine/connectors/sql-introspection-connector/src/sanitize_datamodel_names.rs", "rank": 61, "score": 182802.46756308127 }, { "content": "pub fn unquote_string(val: &str) -> String {\n\n val.trim_start_matches('\\'')\n\n .trim_end_matches('\\'')\n\n .trim_start_matches('\\\\')\n\n .trim_start_matches('\"')\n\n .trim_end_matches('\"')\n\n .trim_end_matches('\\\\')\n\n .into()\n\n}\n\n\n", "file_path": "libs/sql-schema-describer/src/lib.rs", "rank": 62, "score": 181998.41326754552 }, { "content": "fn json_filters(ctx: &mut BuilderContext) -> impl Iterator<Item = InputField> {\n\n // TODO: also add json-specific \"keys\" filters\n\n // TODO: add json_type filter\n\n let path_type = if ctx.capabilities.contains(ConnectorCapability::JsonFilteringJsonPath) {\n\n InputType::string()\n\n } else if ctx.capabilities.contains(ConnectorCapability::JsonFilteringArrayPath) {\n\n InputType::list(InputType::string())\n\n } else {\n\n unreachable!()\n\n };\n\n\n\n vec![\n\n input_field(filters::PATH, vec![path_type], None).optional(),\n\n input_field(filters::STRING_CONTAINS, InputType::string(), None).optional(),\n\n input_field(filters::STRING_STARTS_WITH, InputType::string(), None).optional(),\n\n input_field(filters::STRING_ENDS_WITH, InputType::string(), None).optional(),\n\n input_field(filters::ARRAY_CONTAINS, InputType::json(), None)\n\n .optional()\n\n .nullable(),\n\n input_field(filters::ARRAY_STARTS_WITH, InputType::json(), None)\n\n .optional()\n\n .nullable(),\n\n input_field(filters::ARRAY_ENDS_WITH, InputType::json(), None)\n\n .optional()\n\n .nullable(),\n\n ]\n\n .into_iter()\n\n}\n\n\n", "file_path": "query-engine/schema-builder/src/input_types/fields/field_filter_types.rs", "rank": 63, "score": 181650.51001562437 }, { "content": "/// Reformats a reserved string as \"Renamed{}\"\n\nfn reformat_reserved_string(s: &str) -> String {\n\n if is_reserved_type_name(s) {\n\n format!(\"Renamed{}\", s)\n\n } else {\n\n s.to_owned()\n\n }\n\n}\n", "file_path": "introspection-engine/connectors/sql-introspection-connector/src/sanitize_datamodel_names.rs", "rank": 64, "score": 181022.39276664457 }, { "content": "fn type_to_string(scalar: &ScalarType) -> String {\n\n scalar.to_string()\n\n}\n\n\n", "file_path": "query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs", "rank": 65, "score": 181015.7235799444 }, { "content": "fn new_shadow_database_name() -> String {\n\n format!(\"prisma_migrate_shadow_db_{}\", uuid::Uuid::new_v4())\n\n}\n\n\n\n/// Try to reset the database to an empty state. This should only be used\n\n/// when we don't have the permissions to do a full reset.\n\n#[tracing::instrument(skip(flavour))]\n\nasync fn best_effort_reset(flavour: &mut (dyn SqlFlavour + Send + Sync)) -> ConnectorResult<()> {\n\n best_effort_reset_impl(flavour)\n\n .await\n\n .map_err(|err| err.into_soft_reset_failed_error())\n\n}\n\n\n\nasync fn best_effort_reset_impl(flavour: &mut (dyn SqlFlavour + Send + Sync)) -> ConnectorResult<()> {\n\n tracing::info!(\"Attempting best_effort_reset\");\n\n\n\n let source_schema = flavour.describe_schema().await?;\n\n let target_schema = SqlSchema::default();\n\n let mut steps = Vec::new();\n\n\n", "file_path": "migration-engine/connectors/sql-migration-connector/src/lib.rs", "rank": 66, "score": 180668.79254840885 }, { "content": "fn parse_base64_string(s: &str) -> PrismaResult<String> {\n\n match base64::decode(s) {\n\n Ok(bytes) => String::from_utf8(bytes).map_err(|e| {\n\n trace!(\"Error decoding {} from Base64 (invalid UTF-8): {:?}\", s, e);\n\n\n\n PrismaError::ConfigurationError(\"Invalid Base64\".into())\n\n }),\n\n Err(e) => {\n\n trace!(\"Decoding Base64 failed (might not be encoded): {:?}\", e);\n\n Ok(String::from(s))\n\n }\n\n }\n\n}\n\n\n", "file_path": "query-engine/query-engine/src/opt.rs", "rank": 67, "score": 180087.37283021986 }, { "content": "fn escape_string_literal(s: &str) -> String {\n\n s.replace('\\'', r#\"''\"#)\n\n}\n\n\n", "file_path": "migration-engine/connectors/sql-migration-connector/src/sql_renderer/mssql_renderer.rs", "rank": 68, "score": 179286.45343872055 }, { "content": "/// Returns whether the elements of the two slices match, regardless of ordering.\n\npub fn columns_match(a_cols: &[String], b_cols: &[String]) -> bool {\n\n a_cols.len() == b_cols.len() && a_cols.iter().all(|a_col| b_cols.iter().any(|b_col| a_col == b_col))\n\n}\n\n\n", "file_path": "introspection-engine/connectors/sql-introspection-connector/src/introspection_helpers.rs", "rank": 69, "score": 179272.16247219802 }, { "content": "fn fresh_db_name() -> String {\n\n /// An atomic counter to get a unique identifier for each test database.\n\n static DATABASE_ID: AtomicUsize = AtomicUsize::new(0);\n\n const PREFIX: &str = \"test_database_\";\n\n\n\n let id = DATABASE_ID.fetch_add(1, std::sync::atomic::Ordering::Relaxed);\n\n let mut out = String::with_capacity(PREFIX.len() + 4);\n\n out.push_str(PREFIX);\n\n out.write_fmt(format_args!(\"{:04}\", id)).unwrap();\n\n out\n\n}\n\n\n", "file_path": "migration-engine/connectors/mongodb-migration-connector/tests/migrations/test_api.rs", "rank": 70, "score": 178674.38018705917 }, { "content": "fn validate_invalid_scalar_default(scalar_type: ScalarType, value: &ast::Expression, ctx: &mut Context<'_>) {\n\n ctx.push_attribute_validation_error(&format!(\n\n \"Expected a {scalar_type} value, but found `{bad_value}`.\",\n\n scalar_type = scalar_type.as_str(),\n\n bad_value = value\n\n ));\n\n}\n\n\n", "file_path": "libs/datamodel/parser-database/src/attributes/default.rs", "rank": 71, "score": 178466.91371634637 }, { "content": "fn sanitize_string(s: &str) -> Option<String> {\n\n static RE_START: Lazy<Regex> = Lazy::new(|| Regex::new(\"^[^a-zA-Z]+\").unwrap());\n\n static RE: Lazy<Regex> = Lazy::new(|| Regex::new(\"[^_a-zA-Z0-9]\").unwrap());\n\n\n\n let needs_sanitation = RE_START.is_match(s) || RE.is_match(s);\n\n\n\n if needs_sanitation {\n\n let start_cleaned: String = RE_START.replace_all(s, \"\").parse().unwrap();\n\n let sanitized: String = RE.replace_all(start_cleaned.as_str(), \"_\").parse().unwrap();\n\n\n\n Some(sanitized)\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "introspection-engine/connectors/mongodb-introspection-connector/src/sampler/statistics.rs", "rank": 72, "score": 178240.11434522382 }, { "content": "fn parse_string_literal(token: &Token<'_>) -> String {\n\n let current = token.first_relevant_child();\n\n assert!(current.as_rule() == Rule::string_content);\n\n\n\n // this will overallocate a bit for strings with escaped characters, but it\n\n // shouldn't make a dramatic difference.\n\n let mut out = String::with_capacity(current.as_str().len());\n\n\n\n for pair in current.into_inner() {\n\n match pair.as_rule() {\n\n Rule::string_raw => {\n\n out.push_str(pair.as_str());\n\n }\n\n Rule::string_escape => {\n\n let escaped = pair.into_inner().next().unwrap();\n\n assert!(escaped.as_rule() == Rule::string_escaped_predefined);\n\n\n\n let unescaped = match escaped.as_str() {\n\n \"n\" => \"\\n\",\n\n \"r\" => \"\\r\",\n", "file_path": "libs/datamodel/schema-ast/src/parser/parse_expression.rs", "rank": 73, "score": 178240.11434522382 }, { "content": "/// Convert a PrismaValue into Bson, with a special case for `_count` aggregation filter.\n\n///\n\n/// When converting the value of a `_count` aggregation filter for a field that's _not_ numerical,\n\n/// we force the `TypeIdentifier` to be `Int` to prevent panics.\n\nfn into_bson_coerce_count(sf: &ScalarFieldRef, value: PrismaValue, is_count_aggregation: bool) -> crate::Result<Bson> {\n\n if is_count_aggregation && !sf.is_numeric() {\n\n (&TypeIdentifier::Int, value).into_bson()\n\n } else {\n\n (sf, value).into_bson()\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Default)]\n\npub(crate) struct FilterPrefix {\n\n parts: Vec<String>,\n\n /// Whether the `target` should be rendered by the `render_with` method\n\n ignore_target: bool,\n\n}\n\n\n\nimpl FilterPrefix {\n\n pub fn append_cloned<T>(&self, elem: T) -> Self\n\n where\n\n T: Into<String>,\n\n {\n", "file_path": "query-engine/connectors/mongodb-query-connector/src/filter.rs", "rank": 74, "score": 176703.1168593484 }, { "content": "pub fn invalid_connection_string_description(error_details: &str) -> String {\n\n let docs = r#\"https://www.prisma.io/docs/reference/database-reference/connection-urls\"#;\n\n\n\n let details = formatdoc! {r#\"\n\n {} in database URL. Please refer to the documentation in {} for constructing a correct\n\n connection string. In some cases, certain characters must be escaped. Please\n\n check the string for any illegal characters.\"#, error_details, docs};\n\n\n\n details.replace('\\n', \" \")\n\n}\n\n\n", "file_path": "libs/user-facing-errors/src/quaint.rs", "rank": 75, "score": 176453.37036208698 }, { "content": "fn preview_features_guardrail(args: &HashMap<&str, (Span, ValueValidator<'_>)>, diagnostics: &mut Diagnostics) {\n\n let arg = args.get(PREVIEW_FEATURES_KEY);\n\n\n\n if let Some(val) = arg {\n\n let span = val.0;\n\n if let Ok(features) = val.1.as_array().to_str_vec() {\n\n if features.is_empty() {\n\n return;\n\n }\n\n }\n\n let msg = \"Preview features are only supported in the generator block. Please move this field to the generator block.\";\n\n diagnostics.push_error(DatamodelError::new(std::borrow::Cow::Borrowed(msg), span));\n\n }\n\n}\n", "file_path": "libs/datamodel/core/src/transform/ast_to_dml/datasource_loader.rs", "rank": 76, "score": 176236.14116292688 }, { "content": "fn convert_op_class(raw: crate::value_validator::OperatorClass<'_>, ctx: &mut Context<'_>) -> OperatorClassStore {\n\n match raw {\n\n crate::value_validator::OperatorClass::Constant(class) => OperatorClassStore::from(class),\n\n crate::value_validator::OperatorClass::Raw(s) => OperatorClassStore::raw(ctx.interner.intern(s)),\n\n }\n\n}\n\n\n", "file_path": "libs/datamodel/parser-database/src/attributes.rs", "rank": 77, "score": 176178.456134061 }, { "content": "fn parse_base64_string(s: &str) -> Result<String, ConnectorError> {\n\n match base64::decode(s) {\n\n Ok(bytes) => match String::from_utf8(bytes) {\n\n Ok(s) => Ok(s),\n\n Err(e) => Err(ConnectorError::user_facing(SchemaParserError {\n\n full_error: e.to_string(),\n\n })),\n\n },\n\n Err(_) => Ok(String::from(s)),\n\n }\n\n}\n", "file_path": "migration-engine/cli/src/commands.rs", "rank": 78, "score": 176092.40308029405 }, { "content": "fn compound_object_name(alias: Option<&String>, from_fields: &[ScalarFieldRef]) -> String {\n\n alias.map(capitalize).unwrap_or_else(|| {\n\n let field_names: Vec<String> = from_fields.iter().map(|field| capitalize(&field.name)).collect();\n\n field_names.join(\"\")\n\n })\n\n}\n\n\n", "file_path": "query-engine/schema-builder/src/input_types/mod.rs", "rank": 79, "score": 176076.41795908648 }, { "content": "/// Lowercases first character.\n\n/// Assumes 1-byte characters!\n\nfn decapitalize(s: &str) -> String {\n\n let first_char = s.chars().next().unwrap();\n\n format!(\"{}{}\", first_char.to_lowercase(), &s[1..])\n\n}\n", "file_path": "libs/datamodel/core/src/common/preview_features.rs", "rank": 80, "score": 175777.71763314932 }, { "content": "fn reformat(input: &str) -> String {\n\n datamodel::reformat(input, 2).unwrap_or_else(|_| input.to_owned())\n\n}\n\n\n", "file_path": "libs/datamodel/core/tests/reformat/reformat.rs", "rank": 81, "score": 175777.71763314932 }, { "content": "fn invalid_connection_string_description(error_details: impl Display) -> String {\n\n let docs = r#\"https://www.prisma.io/docs/reference/database-reference/connection-urls\"#;\n\n\n\n format! {r#\"{} in database URL. Please refer to the documentation in {} for constructing a correct connection string. In some cases, certain characters must be escaped. Please check the string for any illegal characters.\"#, error_details, docs}\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::ConnectorError;\n\n\n\n #[test]\n\n fn connector_error_has_the_expected_size() {\n\n assert_eq!(std::mem::size_of::<ConnectorError>(), std::mem::size_of::<*mut ()>());\n\n }\n\n}\n", "file_path": "migration-engine/connectors/migration-connector/src/error.rs", "rank": 82, "score": 174724.10022086313 }, { "content": "#[cfg(not(debug_assertions))]\n\nfn indent(_: usize) -> String {\n\n String::from(\" \")\n\n}\n\n\n", "file_path": "query-engine/connectors/mongodb-query-connector/src/logger.rs", "rank": 83, "score": 173519.82170874573 }, { "content": "pub fn trace_parent_to_string(context: &SpanContext) -> String {\n\n let trace_id = context.trace_id().to_hex();\n\n let span_id = context.span_id().to_hex();\n\n\n\n // see https://www.w3.org/TR/trace-context/#traceparent-header-field-values\n\n format!(\"traceparent=00-{}-{}-01\", trace_id, span_id)\n\n}\n\n\n", "file_path": "query-engine/connectors/sql-query-connector/src/sql_trace.rs", "rank": 84, "score": 173049.47020568667 }, { "content": "fn format_line_number(line_number: usize) -> colored::ColoredString {\n\n if line_number > 0 {\n\n format!(\"{:2} | \", line_number).bold().bright_blue()\n\n } else {\n\n \" | \".bold().bright_blue()\n\n }\n\n}\n", "file_path": "libs/datamodel/diagnostics/src/pretty_print.rs", "rank": 85, "score": 172621.07142829377 }, { "content": "/// Renders an AST to a string.\n\npub fn render_schema_ast_to_string(schema: &ast::SchemaAst) -> String {\n\n let mut writable_string = String::with_capacity(schema.tops.len() * 20);\n\n\n\n render_schema_ast_to(&mut writable_string, schema, 2);\n\n\n\n writable_string\n\n}\n\n\n", "file_path": "libs/datamodel/core/src/lib.rs", "rank": 86, "score": 172465.1606343754 }, { "content": "pub fn string_to_base64(str: &str) -> String {\n\n encode(str.as_bytes())\n\n}\n", "file_path": "query-engine/connector-test-kit-rs/query-engine-tests/src/utils/bytes.rs", "rank": 87, "score": 171426.83578635863 }, { "content": "fn render_schema_json(schema: &str) -> String {\n\n let config = parse_configuration(schema);\n\n datamodel::mcf::render_sources_to_json(&config.datasources)\n\n}\n", "file_path": "libs/datamodel/core/tests/config/datasources.rs", "rank": 88, "score": 171355.2753831625 }, { "content": "fn pascal_case(input: &str) -> String {\n\n let mut c = input.chars();\n\n match c.next() {\n\n None => String::new(),\n\n Some(f) => f.to_uppercase().collect::<String>() + c.as_str(),\n\n }\n\n}\n\n\n", "file_path": "libs/datamodel/parser-database/src/walkers/relation.rs", "rank": 89, "score": 171355.2753831625 }, { "content": "fn camel_case(input: &str) -> String {\n\n let mut c = input.chars();\n\n match c.next() {\n\n None => String::new(),\n\n Some(f) => f.to_lowercase().collect::<String>() + c.as_str(),\n\n }\n\n}\n", "file_path": "libs/datamodel/parser-database/src/walkers/relation.rs", "rank": 90, "score": 171355.2753831625 }, { "content": "#[cfg(debug_assertions)]\n\nfn indent(depth: usize) -> String {\n\n \" \".repeat(4 * depth)\n\n}\n\n\n", "file_path": "query-engine/connectors/mongodb-query-connector/src/logger.rs", "rank": 91, "score": 171355.2753831625 }, { "content": "fn sqlite_test_file(db_name: &str) -> String {\n\n static WORKSPACE_ROOT: Lazy<std::path::PathBuf> = Lazy::new(|| {\n\n std::env::var(\"WORKSPACE_ROOT\")\n\n .map(|root| std::path::Path::new(&root).join(\"db\"))\n\n .unwrap_or_else(|_| {\n\n let dir = std::env::temp_dir().join(\"prisma_tests_workspace_root\");\n\n let path = dir.to_string_lossy().into_owned();\n\n\n\n std::fs::create_dir_all(&path).expect(\"failed to create WORKSPACE_ROOT directory\");\n\n\n\n path.into()\n\n })\n\n });\n\n\n\n let file_path = WORKSPACE_ROOT.join(db_name);\n\n\n\n // Truncate the file.\n\n std::fs::File::create(&file_path).expect(\"Failed to create or truncate SQLite database.\");\n\n\n\n file_path.to_string_lossy().into_owned()\n\n}\n", "file_path": "libs/test-setup/src/sqlite.rs", "rank": 92, "score": 171355.2753831625 }, { "content": "fn reformat(input: &str) -> String {\n\n datamodel::reformat(input, 2).unwrap_or_else(|_| input.to_owned())\n\n}\n\n\n", "file_path": "libs/datamodel/core/tests/reformat/reformat_implicit_relations.rs", "rank": 93, "score": 171355.2753831625 }, { "content": "#[test_connector(capabilities(Enums))]\n\nfn changing_all_values_of_enums_used_in_defaults_works(api: TestApi) {\n\n let dm1 = r#\"\n\n model Cat {\n\n id Int @id\n\n morningMood CatMood @default(HUNGRY)\n\n alternateMorningMood CatMood @default(HUNGRY)\n\n afternoonMood CatMood @default(HAPPY)\n\n eveningMood CatMood @default(HUNGRY)\n\n defaultMood CatMood\n\n }\n\n\n\n enum CatMood {\n\n HAPPY\n\n HUNGRY\n\n }\n\n \"#;\n\n\n\n api.schema_push_w_datasource(dm1).send().assert_green();\n\n\n\n let dm2 = r#\"\n", "file_path": "migration-engine/migration-engine-tests/tests/migrations/enums.rs", "rank": 94, "score": 171294.75126873387 }, { "content": "pub fn doc_comments_to_string(comments: &[String]) -> Option<Comment> {\n\n if comments.is_empty() {\n\n None\n\n } else {\n\n Some(Comment {\n\n text: comments.join(\"\\n\"),\n\n })\n\n }\n\n}\n", "file_path": "libs/datamodel/schema-ast/src/parser/parse_comments.rs", "rank": 95, "score": 170729.13047093732 }, { "content": "/// Convenience date string (UTC, RFC 3339) constructor for the datetime right now,\n\n/// for cases you don't care about the concrete DateTime in the tests.\n\npub fn now() -> String {\n\n Utc::now().to_rfc3339()\n\n}\n\n\n", "file_path": "query-engine/connector-test-kit-rs/query-engine-tests/src/utils/time.rs", "rank": 96, "score": 170632.8162877729 }, { "content": "/// Most basic datamodel containing only a model with ID\n\n/// for the most rudimentary testing.\n\npub fn generic() -> String {\n\n let schema = indoc! {\n\n \"model TestModel {\n\n #id(id, Int, @id)\n\n field String?\n\n }\"\n\n };\n\n\n\n schema.to_owned()\n\n}\n\n\n", "file_path": "query-engine/connector-test-kit-rs/query-engine-tests/src/schemas/basic.rs", "rank": 97, "score": 170627.07315753424 }, { "content": "/// User model with some basic fields and unique constraints.\n\npub fn user() -> String {\n\n let schema = indoc! {\n\n \"model User {\n\n #id(id, Int, @id)\n\n first_name String\n\n last_name String\n\n email String @unique\n\n birthday DateTime?\n\n\n\n @@unique([first_name, last_name])\n\n }\"\n\n };\n\n\n\n schema.to_owned()\n\n}\n\n\n", "file_path": "query-engine/connector-test-kit-rs/query-engine-tests/src/schemas/basic.rs", "rank": 98, "score": 170627.07315753424 }, { "content": "/// Basic Test model containing a single json field.\n\npub fn json() -> String {\n\n let schema = indoc! {\n\n \"model TestModel {\n\n #id(id, Int, @id)\n\n json Json\n\n }\"\n\n };\n\n\n\n schema.to_owned()\n\n}\n\n\n", "file_path": "query-engine/connector-test-kit-rs/query-engine-tests/src/schemas/json.rs", "rank": 99, "score": 170627.07315753424 } ]
Rust
sdk/tests/t_nft_contract.rs
fgfm999/trampoline
e7c2f0172087973f8fc0b4884e459fdd5855c235
use std::path::Path; use ckb_hash::blake2b_256; use trampoline_sdk::ckb_types::packed::{CellInput, CellOutput}; use ckb_jsonrpc_types::JsonBytes; use trampoline_sdk::chain::{MockChain, MockChainTxProvider as ChainRpc}; use trampoline_sdk::ckb_types::{ self, bytes::Bytes, core::{TransactionBuilder, TransactionView}, error::Error, packed::*, prelude::*, H256, }; use trampoline_sdk::contract::*; use trampoline_sdk::contract::{builtins::t_nft::*, generator::*}; use trampoline_sdk::contract::{schema::*, ContractSource}; fn _assert_script_error(err: Error, err_code: i8) { let error_string = err.to_string(); assert!( error_string.contains(format!("error code {} ", err_code).as_str()), "error_string: {}, expected_error_code: {}", error_string, err_code ); } fn _generate_always_success_lock( args: Option<ckb_types::packed::Bytes>, ) -> ckb_types::packed::Script { let data: Bytes = ckb_always_success_script::ALWAYS_SUCCESS.to_vec().into(); let data_hash = H256::from(blake2b_256(data.to_vec().as_slice())); ckb_types::packed::Script::default() .as_builder() .args(args.unwrap_or([0u8].pack())) .code_hash(data_hash.pack()) .hash_type(ckb_types::core::ScriptHashType::Data1.into()) .build() } fn gen_nft_contract() -> TrampolineNFTContract { let out_dir = std::env::var_os("OUT_DIR").unwrap(); let path_to_nft_bin = Path::new(&out_dir).join("trampoline-nft"); let bin = ContractSource::load_from_path(path_to_nft_bin).unwrap(); let mut contract = TrampolineNFTContract::default(); contract.code = Some(JsonBytes::from_bytes(bin)); contract } fn _gen_tnft_cell_output(contract: &TrampolineNFTContract) -> CellOutput { let lock = contract .lock .clone() .unwrap_or(_generate_always_success_lock(None).into()); CellOutput::new_builder() .capacity(200_u64.pack()) .type_( Some(ckb_types::packed::Script::from( contract.as_script().unwrap(), )) .pack(), ) .lock(lock.into()) .build() } fn _generate_mock_tx( inputs: Vec<CellInput>, outputs: Vec<CellOutput>, outputs_data: Vec<ckb_types::packed::Bytes>, ) -> TransactionView { TransactionBuilder::default() .inputs(inputs) .outputs(outputs) .outputs_data(outputs_data) .build() } fn genesis_id_from(input: OutPoint) -> GenesisId { let seed_tx_hash = input.tx_hash(); let seed_idx = input.index(); let mut seed = Vec::with_capacity(36); seed.extend_from_slice(seed_tx_hash.as_slice()); seed.extend_from_slice(seed_idx.as_slice()); let hash = blake2b_256(&seed); GenesisId::from_mol(hash.pack()) } type NftArgs = SchemaPrimitiveType<Bytes, ckb_types::packed::Bytes>; type NftField = ContractCellField<NftArgs, TrampolineNFT>; #[test] fn test_success_deploy() { let mut tnft_contract = gen_nft_contract(); let mut chain = MockChain::default(); let minter_lock_cell = chain.get_default_script_outpoint(); let minter_lock_script = chain.build_script(&minter_lock_cell, vec![1_u8].into()); let tx_input_cell = chain.deploy_random_cell_with_default_lock(2000, Some(vec![1_u8].into())); let tnft_code_cell = tnft_contract.as_code_cell(); let tnft_code_cell_outpoint = chain.create_cell(tnft_code_cell.0, tnft_code_cell.1); tnft_contract.source = Some(ContractSource::Chain(tnft_code_cell_outpoint.into())); let genesis_seed = genesis_id_from(tx_input_cell); tnft_contract.add_input_rule(move |_tx| -> CellQuery { CellQuery { _query: QueryStatement::Single(CellQueryAttribute::LockHash( minter_lock_script .clone() .unwrap() .calc_script_hash() .into(), )), _limit: 1, } }); tnft_contract.add_output_rule(ContractField::Data, move |ctx| -> NftField { let nft: NftField = ctx.load(ContractField::Data); if let ContractCellField::Data(nft_data) = nft { let mut t_nft_data = nft_data; t_nft_data.genesis_id = genesis_seed.clone(); NftField::Data(t_nft_data) } else { nft } }); let chain_rpc = ChainRpc::new(chain); let generator = Generator::new() .chain_service(&chain_rpc) .query_service(&chain_rpc) .pipeline(vec![&tnft_contract]); let new_mint_tx = generator.generate(); let is_valid = chain_rpc.verify_tx(new_mint_tx.tx.into()); assert!(is_valid); } #[test] fn test_invalid_mismatched_genesis_id() { let mut tnft_contract = gen_nft_contract(); let mut chain = MockChain::default(); let minter_lock_cell = chain.get_default_script_outpoint(); let minter_lock_script = chain.build_script(&minter_lock_cell, vec![1_u8].into()); let _tx_input_cell = chain.deploy_random_cell_with_default_lock(2000, Some(vec![1_u8].into())); let genesis_id_seed_cell = chain.deploy_random_cell_with_default_lock(2000, Some(vec![2_u8].into())); let tnft_code_cell = tnft_contract.as_code_cell(); let tnft_code_cell_outpoint = chain.create_cell(tnft_code_cell.0, tnft_code_cell.1); tnft_contract.source = Some(ContractSource::Chain(tnft_code_cell_outpoint.into())); let genesis_seed = genesis_id_from(genesis_id_seed_cell); tnft_contract.add_input_rule(move |_tx| -> CellQuery { CellQuery { _query: QueryStatement::Single(CellQueryAttribute::LockHash( minter_lock_script .clone() .unwrap() .calc_script_hash() .into(), )), _limit: 1, } }); tnft_contract.add_output_rule(ContractField::Data, move |ctx| -> NftField { let nft: NftField = ctx.load(ContractField::Data); if let ContractCellField::Data(nft_data) = nft { let mut t_nft_data = nft_data; t_nft_data.genesis_id = genesis_seed.clone(); NftField::Data(t_nft_data) } else { nft } }); let chain_rpc = ChainRpc::new(chain); let generator = Generator::new() .chain_service(&chain_rpc) .query_service(&chain_rpc) .pipeline(vec![&tnft_contract]); let new_mint_tx = generator.generate(); let is_valid = chain_rpc.verify_tx(new_mint_tx.tx.into()); assert!(!is_valid); } #[test] fn test_invalid_mint_of_pre_existing_tnft() { let mut tnft_contract = gen_nft_contract(); let mut chain = MockChain::default(); let minter_lock_cell = chain.get_default_script_outpoint(); let minter_lock_script = chain.build_script(&minter_lock_cell, vec![1_u8].into()); let _tx_input_cell = chain.deploy_random_cell_with_default_lock(2000, Some(vec![1_u8].into())); let input_tnft_seed = chain.deploy_random_cell_with_default_lock(2000, Some(vec![2_u8].into())); let tnft_code_cell = tnft_contract.as_code_cell(); let tnft_code_cell_outpoint = chain.create_cell(tnft_code_cell.0, tnft_code_cell.1); tnft_contract.source = Some(ContractSource::Chain(tnft_code_cell_outpoint.into())); let tnft_input_cell = CellOutput::new_builder() .lock(minter_lock_script.clone().unwrap()) .capacity(150_u64.pack()) .type_(Some(Script::from(tnft_contract.as_script().unwrap())).pack()) .build(); let tnft_input_cell_data = TrampolineNFT { genesis_id: genesis_id_from(input_tnft_seed), cid: Default::default(), }; let _tnft_input_outpoint = chain.deploy_cell_output(tnft_input_cell_data.to_bytes(), tnft_input_cell); tnft_contract.add_input_rule(move |_tx| -> CellQuery { CellQuery { _query: QueryStatement::Single(CellQueryAttribute::LockHash( minter_lock_script .clone() .unwrap() .calc_script_hash() .into(), )), _limit: 1, } }); tnft_contract.add_output_rule(ContractField::Data, move |ctx| -> NftField { let nft: NftField = ctx.load(ContractField::Data); if let NftField::ResolvedInputs(inputs) = ctx.load(TransactionField::ResolvedInputs) { if let ContractCellField::Data(nft_data) = nft { let mut t_nft_data = nft_data; let genesis_id = genesis_id_from(inputs.first().unwrap().out_point.clone()); t_nft_data.genesis_id = genesis_id; NftField::Data(t_nft_data) } else { nft } } else { nft } }); let chain_rpc = ChainRpc::new(chain); let generator = Generator::new() .chain_service(&chain_rpc) .query_service(&chain_rpc) .pipeline(vec![&tnft_contract]); let new_mint_tx = generator.generate(); let is_valid = chain_rpc.verify_tx(new_mint_tx.tx.into()); assert!(is_valid); }
use std::path::Path; use ckb_hash::blake2b_256; use trampoline_sdk::ckb_types::packed::{CellInput, CellOutput}; use ckb_jsonrpc_types::JsonBytes; use trampoline_sdk::chain::{MockChain, MockChainTxProvider as ChainRpc}; use trampoline_sdk::ckb_types::{ self, bytes::Bytes, core::{TransactionBuilder, TransactionView}, error::Error, packed::*, prelude::*, H256, }; use trampoline_sdk::contract::*; use trampoline_sdk::contract::{builtins::t_nft::*, generator::*}; use trampoline_sdk::contract::{schema::*, ContractSource}; fn _assert_script_error(err: Error, err_code: i8) { let error_string = err.to_string(); assert!( error_string.contains(format!("error code {} ", err_code).as_str()), "error_string: {}, expected_error_code: {}", error_string, err_code ); } fn _generate_always_success_lock( args: Option<ckb_types::packed::Bytes>, ) -> ckb_types::packed::Script { let data: Bytes = ckb_always_success_script::ALWAYS_SUCCESS.to_vec().into(); let data_hash = H256::from(blake2b_256(data.to_vec().as_slice())); ckb_types::packed::Script::default() .as_builder() .args(args.unwrap_or([0u8].pack())) .code_hash(data_hash.pack()) .hash_type(ckb_types::core::ScriptHashType::Data1.into()) .build() } fn gen_nft_contract() -> TrampolineNFTContract { let out_dir = std::env::var_os("OUT_DIR").unwrap(); let path_to_nft_bin = Path::new(&out_dir).join("trampoline-nft"); let bin = ContractSource::load_from_path(path_to_nft_bin).unwrap(); let mut contract = TrampolineNFTContract::default(); contract.code = Some(JsonBytes::from_bytes(bin)); contract } fn _gen_tnft_cell_output(contract: &TrampolineNFTContract) -> CellOutput { let lock = contract .lock .clone() .unwrap_or(_generate_always_success_lock(None).into()); CellOutput::new_builder() .capacity(200_u64.pack()) .type_( Some(ckb_types::packed::Script::from( contract.as_script().unwrap(), )) .pack(), ) .lock(lock.into()) .build() } fn _generate_mock_tx( inputs: Vec<CellInput>, outputs: Vec<CellOutput>, outputs_data: Vec<ckb_types::packed::Byte
fn genesis_id_from(input: OutPoint) -> GenesisId { let seed_tx_hash = input.tx_hash(); let seed_idx = input.index(); let mut seed = Vec::with_capacity(36); seed.extend_from_slice(seed_tx_hash.as_slice()); seed.extend_from_slice(seed_idx.as_slice()); let hash = blake2b_256(&seed); GenesisId::from_mol(hash.pack()) } type NftArgs = SchemaPrimitiveType<Bytes, ckb_types::packed::Bytes>; type NftField = ContractCellField<NftArgs, TrampolineNFT>; #[test] fn test_success_deploy() { let mut tnft_contract = gen_nft_contract(); let mut chain = MockChain::default(); let minter_lock_cell = chain.get_default_script_outpoint(); let minter_lock_script = chain.build_script(&minter_lock_cell, vec![1_u8].into()); let tx_input_cell = chain.deploy_random_cell_with_default_lock(2000, Some(vec![1_u8].into())); let tnft_code_cell = tnft_contract.as_code_cell(); let tnft_code_cell_outpoint = chain.create_cell(tnft_code_cell.0, tnft_code_cell.1); tnft_contract.source = Some(ContractSource::Chain(tnft_code_cell_outpoint.into())); let genesis_seed = genesis_id_from(tx_input_cell); tnft_contract.add_input_rule(move |_tx| -> CellQuery { CellQuery { _query: QueryStatement::Single(CellQueryAttribute::LockHash( minter_lock_script .clone() .unwrap() .calc_script_hash() .into(), )), _limit: 1, } }); tnft_contract.add_output_rule(ContractField::Data, move |ctx| -> NftField { let nft: NftField = ctx.load(ContractField::Data); if let ContractCellField::Data(nft_data) = nft { let mut t_nft_data = nft_data; t_nft_data.genesis_id = genesis_seed.clone(); NftField::Data(t_nft_data) } else { nft } }); let chain_rpc = ChainRpc::new(chain); let generator = Generator::new() .chain_service(&chain_rpc) .query_service(&chain_rpc) .pipeline(vec![&tnft_contract]); let new_mint_tx = generator.generate(); let is_valid = chain_rpc.verify_tx(new_mint_tx.tx.into()); assert!(is_valid); } #[test] fn test_invalid_mismatched_genesis_id() { let mut tnft_contract = gen_nft_contract(); let mut chain = MockChain::default(); let minter_lock_cell = chain.get_default_script_outpoint(); let minter_lock_script = chain.build_script(&minter_lock_cell, vec![1_u8].into()); let _tx_input_cell = chain.deploy_random_cell_with_default_lock(2000, Some(vec![1_u8].into())); let genesis_id_seed_cell = chain.deploy_random_cell_with_default_lock(2000, Some(vec![2_u8].into())); let tnft_code_cell = tnft_contract.as_code_cell(); let tnft_code_cell_outpoint = chain.create_cell(tnft_code_cell.0, tnft_code_cell.1); tnft_contract.source = Some(ContractSource::Chain(tnft_code_cell_outpoint.into())); let genesis_seed = genesis_id_from(genesis_id_seed_cell); tnft_contract.add_input_rule(move |_tx| -> CellQuery { CellQuery { _query: QueryStatement::Single(CellQueryAttribute::LockHash( minter_lock_script .clone() .unwrap() .calc_script_hash() .into(), )), _limit: 1, } }); tnft_contract.add_output_rule(ContractField::Data, move |ctx| -> NftField { let nft: NftField = ctx.load(ContractField::Data); if let ContractCellField::Data(nft_data) = nft { let mut t_nft_data = nft_data; t_nft_data.genesis_id = genesis_seed.clone(); NftField::Data(t_nft_data) } else { nft } }); let chain_rpc = ChainRpc::new(chain); let generator = Generator::new() .chain_service(&chain_rpc) .query_service(&chain_rpc) .pipeline(vec![&tnft_contract]); let new_mint_tx = generator.generate(); let is_valid = chain_rpc.verify_tx(new_mint_tx.tx.into()); assert!(!is_valid); } #[test] fn test_invalid_mint_of_pre_existing_tnft() { let mut tnft_contract = gen_nft_contract(); let mut chain = MockChain::default(); let minter_lock_cell = chain.get_default_script_outpoint(); let minter_lock_script = chain.build_script(&minter_lock_cell, vec![1_u8].into()); let _tx_input_cell = chain.deploy_random_cell_with_default_lock(2000, Some(vec![1_u8].into())); let input_tnft_seed = chain.deploy_random_cell_with_default_lock(2000, Some(vec![2_u8].into())); let tnft_code_cell = tnft_contract.as_code_cell(); let tnft_code_cell_outpoint = chain.create_cell(tnft_code_cell.0, tnft_code_cell.1); tnft_contract.source = Some(ContractSource::Chain(tnft_code_cell_outpoint.into())); let tnft_input_cell = CellOutput::new_builder() .lock(minter_lock_script.clone().unwrap()) .capacity(150_u64.pack()) .type_(Some(Script::from(tnft_contract.as_script().unwrap())).pack()) .build(); let tnft_input_cell_data = TrampolineNFT { genesis_id: genesis_id_from(input_tnft_seed), cid: Default::default(), }; let _tnft_input_outpoint = chain.deploy_cell_output(tnft_input_cell_data.to_bytes(), tnft_input_cell); tnft_contract.add_input_rule(move |_tx| -> CellQuery { CellQuery { _query: QueryStatement::Single(CellQueryAttribute::LockHash( minter_lock_script .clone() .unwrap() .calc_script_hash() .into(), )), _limit: 1, } }); tnft_contract.add_output_rule(ContractField::Data, move |ctx| -> NftField { let nft: NftField = ctx.load(ContractField::Data); if let NftField::ResolvedInputs(inputs) = ctx.load(TransactionField::ResolvedInputs) { if let ContractCellField::Data(nft_data) = nft { let mut t_nft_data = nft_data; let genesis_id = genesis_id_from(inputs.first().unwrap().out_point.clone()); t_nft_data.genesis_id = genesis_id; NftField::Data(t_nft_data) } else { nft } } else { nft } }); let chain_rpc = ChainRpc::new(chain); let generator = Generator::new() .chain_service(&chain_rpc) .query_service(&chain_rpc) .pipeline(vec![&tnft_contract]); let new_mint_tx = generator.generate(); let is_valid = chain_rpc.verify_tx(new_mint_tx.tx.into()); assert!(is_valid); }
s>, ) -> TransactionView { TransactionBuilder::default() .inputs(inputs) .outputs(outputs) .outputs_data(outputs_data) .build() }
function_block-function_prefixed
[ { "content": "#[test]\n\nfn test_contract_pack_and_unpack_data() {\n\n let mut sudt_contract = gen_sudt_contract(None, None);\n\n\n\n sudt_contract.set_args(OwnerLockHash::from_mol(Byte32::default()));\n\n sudt_contract.set_data(SudtAmount::from_mol(1200_u128.pack()));\n\n\n\n let uint128_data: u128 = sudt_contract.read_data().to_mol().unpack();\n\n assert_eq!(uint128_data, 1200_u128);\n\n}\n\n\n", "file_path": "sdk/tests/sudt_contract.rs", "rank": 2, "score": 122859.36566707902 }, { "content": "pub fn parse_hex(mut input: &str) -> Result<Vec<u8>> {\n\n if input.starts_with(\"0x\") || input.starts_with(\"0X\") {\n\n input = &input[2..];\n\n }\n\n if input.len() % 2 != 0 {\n\n return Err(anyhow!(\"Invalid hex string lenth: {}\", input.len()));\n\n }\n\n let mut bytes = vec![0u8; input.len() / 2];\n\n hex_decode(input.as_bytes(), &mut bytes)\n\n .map_err(|err| anyhow!(format!(\"parse hex string failed: {:?}\", err)))?;\n\n Ok(bytes)\n\n}\n", "file_path": "src/lib.rs", "rank": 5, "score": 103992.1961444084 }, { "content": "fn generate_simple_udt_cell(sudt_contract: &SudtContract) -> CellOutput {\n\n let lock = sudt_contract\n\n .lock\n\n .clone()\n\n .unwrap_or(generate_always_success_lock(None).into());\n\n CellOutput::new_builder()\n\n .capacity(100_u64.pack())\n\n .type_(\n\n Some(ckb_types::packed::Script::from(\n\n sudt_contract.as_script().unwrap(),\n\n ))\n\n .pack(),\n\n )\n\n .lock(lock.into())\n\n .build()\n\n}\n\n\n", "file_path": "sdk/tests/sudt_contract.rs", "rank": 6, "score": 103206.4340239079 }, { "content": "fn generate_always_success_lock(\n\n args: Option<ckb_types::packed::Bytes>,\n\n) -> ckb_types::packed::Script {\n\n let data: Bytes = ckb_always_success_script::ALWAYS_SUCCESS.to_vec().into();\n\n let data_hash = H256::from(blake2b_256(data.to_vec().as_slice()));\n\n ckb_types::packed::Script::default()\n\n .as_builder()\n\n .args(args.unwrap_or([0u8].pack()))\n\n .code_hash(data_hash.pack())\n\n .hash_type(ckb_types::core::ScriptHashType::Data1.into())\n\n .build()\n\n}\n", "file_path": "sdk/tests/sudt_contract.rs", "rank": 8, "score": 92301.97519618153 }, { "content": "#[test]\n\nfn test_add_output_rule() {\n\n let mut sudt_contract = gen_sudt_contract(None, None);\n\n\n\n sudt_contract.add_output_rule(\n\n ContractField::Data,\n\n |ctx| -> ContractCellField<OwnerLockHash, SudtAmount> {\n\n let amount: ContractCellField<OwnerLockHash, SudtAmount> =\n\n ctx.load(ContractField::Data);\n\n\n\n if let ContractCellField::Data(amount) = amount {\n\n let amt: u128 = amount.into();\n\n ContractCellField::Data(SudtAmount::from(amt + 17))\n\n } else {\n\n amount\n\n }\n\n },\n\n );\n\n}\n\n\n", "file_path": "sdk/tests/sudt_contract.rs", "rank": 9, "score": 92282.91394642844 }, { "content": "pub fn gen_bindings(input: impl Into<PathBuf>, output: impl Into<PathBuf>) -> SchemaResult<()> {\n\n let mut compiler = Compiler::new();\n\n compiler.input_schema_file(input.into().as_path());\n\n compiler.output_dir(output.into().as_path());\n\n compiler.generate_code(Language::Rust);\n\n compiler.run().map_err(SchemaError::Molecule)\n\n}\n", "file_path": "src/schema.rs", "rank": 12, "score": 90009.87554544045 }, { "content": "#[test]\n\nfn test_sudt_data_hash_gen() {\n\n let sudt_contract = gen_sudt_contract(None, None);\n\n\n\n let code_hash = sudt_contract.data_hash().unwrap().pack();\n\n let hash_hex_str = format!(\"0x{}\", hex::encode(&code_hash.raw_data()));\n\n assert_eq!(EXPECTED_SUDT_HASH, hash_hex_str.as_str());\n\n}\n", "file_path": "sdk/tests/sudt_contract.rs", "rank": 13, "score": 89524.948385713 }, { "content": "#[test]\n\nfn test_sudt_data_hash_gen_json() {\n\n let sudt_contract = gen_sudt_contract(None, None);\n\n\n\n let json_code_hash = ckb_jsonrpc_types::Byte32::from(sudt_contract.data_hash().unwrap().pack());\n\n\n\n let as_json_hex_str = serde_json::to_string(&json_code_hash).unwrap();\n\n\n\n assert_eq!(\n\n &format!(\"\\\"{}\\\"\", EXPECTED_SUDT_HASH),\n\n as_json_hex_str.as_str()\n\n );\n\n}\n\n\n", "file_path": "sdk/tests/sudt_contract.rs", "rank": 14, "score": 86948.11989569868 }, { "content": "fn main() {\n\n includedir_codegen::start(\"DAPP_FILES\")\n\n .dir(\"templates/\", Compression::Gzip)\n\n .build(\"templates.rs\")\n\n .unwrap();\n\n}\n", "file_path": "build.rs", "rank": 15, "score": 85280.19472199045 }, { "content": "fn main() {\n\n let bins = [\"simple_udt\", \"trampoline-nft\"];\n\n let out_dir = env::var_os(\"OUT_DIR\").unwrap();\n\n bins.into_iter().for_each(|bin| {\n\n let dest_path = Path::new(&out_dir).join(bin);\n\n let dest_bytes = fs::read(format!(\"./binaries/{}\", bin)).unwrap();\n\n assert!(!dest_bytes.is_empty());\n\n fs::write(&dest_path, dest_bytes)\n\n .unwrap_or_else(|_| panic!(\"Unable to write {} to output during build\", bin));\n\n });\n\n println!(\"cargo:rerun-if-changed=contract/builtins/\");\n\n}\n", "file_path": "sdk/build.rs", "rank": 16, "score": 82038.4090670749 }, { "content": "fn gen_sudt_contract(\n\n minter_lock: Option<ckb_types::packed::Script>,\n\n initial_supply: Option<u128>,\n\n) -> SudtContract {\n\n let out_dir = std::env::var_os(\"OUT_DIR\").unwrap();\n\n\n\n let path_to_sudt_bin = Path::new(&out_dir).join(\"simple_udt\");\n\n\n\n let lock = {\n\n if let Some(lock_script) = minter_lock {\n\n Some(JsonBytes::from_bytes(\n\n lock_script.calc_script_hash().as_bytes(),\n\n ))\n\n } else {\n\n Some(JsonBytes::from_bytes(Byte32::default().as_bytes()))\n\n }\n\n };\n\n\n\n let init_supply = {\n\n if let Some(supply) = initial_supply {\n", "file_path": "sdk/tests/sudt_contract.rs", "rank": 17, "score": 76267.4599380387 }, { "content": "#[test]\n\nfn test_sudt_issuance_tx_with_contract_pipeline() {\n\n let mut chain = MockChain::default();\n\n\n\n // Create always success lock cell and add to chain\n\n let minter_lock_code_cell_data: Bytes =\n\n ckb_always_success_script::ALWAYS_SUCCESS.to_vec().into();\n\n let minter_lock_cell = chain.deploy_cell_with_data(minter_lock_code_cell_data);\n\n let minter_lock_script = chain.build_script(&minter_lock_cell, vec![1_u8].into());\n\n let minter_lock_hash = minter_lock_script.clone().unwrap().calc_script_hash();\n\n chain.create_cell(\n\n CellOutput::new_builder()\n\n .capacity(2000_u64.pack())\n\n .lock(minter_lock_script.clone().unwrap())\n\n .build(),\n\n Default::default(),\n\n );\n\n\n\n // Deploy SUDT to chain\n\n let mut sudt_contract = gen_sudt_contract(minter_lock_script, Some(1500));\n\n let sudt_code_cell = sudt_contract.as_code_cell();\n", "file_path": "sdk/tests/sudt_contract.rs", "rank": 18, "score": 71011.5349116699 }, { "content": "pub fn hex_encode(src: &[u8], dst: &mut [u8]) -> Result<()> {\n\n let len = src.len().checked_mul(2).unwrap();\n\n if dst.len() < len {\n\n return Err(anyhow!(\n\n \"Invalid length in dst {}, expected: {}\",\n\n dst.len(),\n\n len\n\n ));\n\n }\n\n\n\n hex::encode_to_slice(src, dst)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 19, "score": 68269.25487713772 }, { "content": "pub fn hex_decode(src: &[u8], dst: &mut [u8]) -> Result<()> {\n\n if src.is_empty() {\n\n return Err(anyhow!(\"Invalid length in dst {}\", dst.len()));\n\n }\n\n let len = dst.len().checked_mul(2).unwrap();\n\n if src.len() < len || ((src.len() & 1) != 0) {\n\n return Err(anyhow!(\n\n \"Invalid length in dst {}, expected: {}\",\n\n dst.len(),\n\n len\n\n ));\n\n }\n\n hex::decode_to_slice(src, dst)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/utils.rs", "rank": 20, "score": 68269.25487713772 }, { "content": "fn generate_mock_tx(\n\n outputs: Vec<CellOutput>,\n\n outputs_data: Vec<ckb_types::packed::Bytes>,\n\n) -> TransactionView {\n\n TransactionBuilder::default()\n\n .outputs(outputs)\n\n .outputs_data(outputs_data)\n\n .build()\n\n}\n\n\n", "file_path": "sdk/tests/sudt_contract.rs", "rank": 22, "score": 68180.20298860488 }, { "content": "#[test]\n\nfn test_failed_issuance_tx_no_permissions() {\n\n let mut chain = MockChain::default();\n\n\n\n // Create always success lock cell and add to chain\n\n let minter_lock_code_cell_data: Bytes =\n\n ckb_always_success_script::ALWAYS_SUCCESS.to_vec().into();\n\n let minter_lock_cell = chain.deploy_cell_with_data(minter_lock_code_cell_data);\n\n let minter_lock_script = chain.build_script(&minter_lock_cell, vec![1_u8].into());\n\n let non_minter_lock = chain.build_script(&minter_lock_cell, vec![200_u8].into());\n\n let non_minter_lock_hash = non_minter_lock.clone().unwrap().calc_script_hash();\n\n\n\n chain.create_cell(\n\n CellOutput::new_builder()\n\n .capacity(2000_u64.pack())\n\n .lock(non_minter_lock.unwrap())\n\n .build(),\n\n Default::default(),\n\n );\n\n\n\n // Deploy SUDT to chain\n", "file_path": "sdk/tests/sudt_contract.rs", "rank": 24, "score": 64430.13472450059 }, { "content": "#[test]\n\nfn test_update_sudt_with_rule_pipeline() {\n\n // Load SUDT contract\n\n let mut sudt_contract = gen_sudt_contract(None, None);\n\n // Create SUDT Cell Output\n\n let sudt_cell = generate_simple_udt_cell(&sudt_contract);\n\n // Mock Transaction with a single output\n\n let transaction = generate_mock_tx(vec![sudt_cell], vec![2000_u128.to_le_bytes().pack()]);\n\n\n\n // Add output rule to sudt contract to increase balance by 17\n\n sudt_contract.add_output_rule(\n\n ContractField::Data,\n\n |ctx| -> ContractCellField<OwnerLockHash, SudtAmount> {\n\n let amount: ContractCellField<OwnerLockHash, SudtAmount> =\n\n ctx.load(ContractField::Data);\n\n if let ContractCellField::Data(amount) = amount {\n\n let amt: u128 = amount.into();\n\n ContractCellField::Data(SudtAmount::from(amt + 17))\n\n } else {\n\n amount\n\n }\n", "file_path": "sdk/tests/sudt_contract.rs", "rank": 25, "score": 64430.13472450059 }, { "content": "fn find_ancestor(curr_path: &mut PathBuf, target: &str) -> Option<PathBuf> {\n\n let target_path = curr_path.join(target);\n\n if target_path.exists() {\n\n Some(target_path)\n\n } else if curr_path.pop() {\n\n find_ancestor(curr_path, target)\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "src/project.rs", "rank": 26, "score": 63904.02839103803 }, { "content": "pub trait JsonByteConversion {\n\n fn to_json_bytes(&self) -> JsonBytes;\n\n fn from_json_bytes(bytes: JsonBytes) -> Self;\n\n}\n\n\n", "file_path": "sdk/src/contract/schema.rs", "rank": 29, "score": 48745.17672132572 }, { "content": "pub trait BytesConversion: MolConversion {\n\n fn from_bytes(bytes: Bytes) -> Self;\n\n\n\n fn to_bytes(&self) -> Bytes;\n\n}\n\n\n\n// TO DO: Think about the tradeoffs of deriving these traits?\n\n// This is a wrapper type for schema primitive types that works\n\n// for all primitives that have conversion trait implemented.\n\n// Saves from having to implement mol conversion traits etc\n\n#[derive(Clone, Debug, Default, Eq, PartialEq, Hash)]\n\npub struct SchemaPrimitiveType<T, M> {\n\n pub inner: T,\n\n _entity_type: std::marker::PhantomData<M>,\n\n}\n\n\n\nimpl<T, M> SchemaPrimitiveType<T, M>\n\nwhere\n\n M: Entity + Unpack<T>,\n\n T: Pack<M>,\n", "file_path": "sdk/src/contract/schema.rs", "rank": 30, "score": 46922.433676893066 }, { "content": "#[test]\n\nfn test_success() {\n\n assert!(true);\n\n}", "file_path": "templates/generators/src/lib.rs", "rank": 31, "score": 45706.521752963374 }, { "content": "#[derive(Debug, Clone, Default)]\n\nstruct InnerOwnerLockHash([u8; 32]);\n\n\n", "file_path": "sdk/src/contract/builtins/sudt/mod.rs", "rank": 32, "score": 43661.07291379749 }, { "content": "pub fn random_hash() -> Byte32 {\n\n let mut rng = thread_rng();\n\n let mut buf = [0u8; 32];\n\n rng.fill(&mut buf);\n\n buf.pack()\n\n}\n\n\n", "file_path": "sdk/src/chain/mock_chain/mod.rs", "rank": 33, "score": 38207.197668612374 }, { "content": "pub fn random_out_point() -> OutPoint {\n\n OutPoint::new_builder().tx_hash(random_hash()).build()\n\n}\n\n\n\npub type CellOutputWithData = (CellOutput, Bytes);\n\n\n\npub struct MockChain {\n\n pub cells: HashMap<OutPoint, CellOutputWithData>,\n\n pub outpoint_txs: HashMap<OutPoint, TransactionInfo>,\n\n pub headers: HashMap<Byte32, HeaderView>,\n\n pub epoches: HashMap<Byte32, EpochExt>,\n\n pub cells_by_data_hash: HashMap<Byte32, OutPoint>,\n\n pub cells_by_lock_hash: HashMap<Byte32, Vec<OutPoint>>,\n\n pub cells_by_type_hash: HashMap<Byte32, Vec<OutPoint>>,\n\n pub debug: bool,\n\n messages: Arc<Mutex<Vec<Message>>>,\n\n}\n\n\n\nimpl Default for MockChain {\n\n fn default() -> Self {\n", "file_path": "sdk/src/chain/mock_chain/mod.rs", "rank": 34, "score": 38207.197668612374 }, { "content": "// Adapted from HexParser in ckb-cli/utils/arg_parser\n\npub fn hex_string(src: &[u8]) -> String {\n\n let mut buffer = vec![0; src.len() * 2];\n\n hex_encode(src, &mut buffer)\n\n .map(|_| unsafe { String::from_utf8_unchecked(buffer) })\n\n .expect(\"hex_string\")\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 35, "score": 37684.12266155357 }, { "content": "use includedir_codegen::Compression;\n", "file_path": "build.rs", "rank": 36, "score": 35783.61145777151 }, { "content": "use std::path::Path;\n\nuse std::{env, fs};\n", "file_path": "sdk/build.rs", "rank": 37, "score": 33971.751741291504 }, { "content": "{\n\n // The lock script of the cell containing contract code\n\n pub fn lock(mut self, lock: Script) -> Self {\n\n self.lock = Some(lock);\n\n self\n\n }\n\n\n\n // The type script of the cell containing contract code\n\n pub fn type_(mut self, type_: Script) -> Self {\n\n self.type_ = Some(type_);\n\n self\n\n }\n\n\n\n pub fn data_hash(&self) -> Option<H256> {\n\n if let Some(data) = &self.code {\n\n let byte_slice = data.as_bytes();\n\n\n\n let raw_hash = blake2b_256(&byte_slice);\n\n H256::from_slice(&raw_hash).ok()\n\n } else {\n", "file_path": "sdk/src/contract/mod.rs", "rank": 38, "score": 26873.737847144 }, { "content": " None\n\n }\n\n }\n\n\n\n // Returns a script structure which can be used as a lock or type script on other cells.\n\n // This is an easy way to let other cells use this contract\n\n pub fn as_script(&self) -> Option<ckb_jsonrpc_types::Script> {\n\n self.data_hash().map(|data_hash| {\n\n Script::from(\n\n packed::ScriptBuilder::default()\n\n .args(self.args.to_bytes().pack())\n\n .code_hash(data_hash.pack())\n\n .hash_type(ckb_types::core::ScriptHashType::Data1.into())\n\n .build(),\n\n )\n\n })\n\n }\n\n\n\n // Return a CellOutputWithData which is the code cell storing this contract's logic\n\n pub fn as_code_cell(&self) -> CellOutputWithData {\n", "file_path": "sdk/src/contract/mod.rs", "rank": 39, "score": 26872.747122619745 }, { "content": " let data: Bytes = self.code.clone().unwrap_or_default().into_bytes();\n\n let type_script = self.type_.clone().unwrap_or_default();\n\n let type_script = {\n\n if self.type_.is_some() {\n\n Some(ckb_types::packed::Script::from(type_script))\n\n } else {\n\n None\n\n }\n\n };\n\n\n\n let cell_output = CellOutputBuilder::default()\n\n .capacity((data.len() as u64).pack())\n\n .lock(self.lock.clone().unwrap_or_default().into())\n\n .type_(type_script.pack())\n\n .build();\n\n (cell_output, data)\n\n }\n\n\n\n pub fn script_hash(&self) -> Option<ckb_jsonrpc_types::Byte32> {\n\n let script: ckb_types::packed::Script = self.as_script().unwrap().into();\n", "file_path": "sdk/src/contract/mod.rs", "rank": 40, "score": 26869.877141169396 }, { "content": " let supply = supply.to_le_bytes();\n\n let mut bytes_buf = [0u8; 16];\n\n bytes_buf.copy_from_slice(&supply);\n\n Some(JsonBytes::from_vec(bytes_buf.to_vec()))\n\n } else {\n\n let supply = 0_u128.to_le_bytes();\n\n let mut bytes_buf = [0u8; 16];\n\n bytes_buf.copy_from_slice(&supply);\n\n Some(JsonBytes::from_vec(bytes_buf.to_vec()))\n\n }\n\n };\n\n\n\n let sudt_src = ContractSource::load_from_path(path_to_sudt_bin).unwrap();\n\n\n\n SudtContract {\n\n args: OwnerLockHash::from_json_bytes(lock.unwrap()),\n\n data: SudtAmount::from_json_bytes(init_supply.unwrap()),\n\n source: Some(ContractSource::Immediate(sudt_src.clone())),\n\n lock: None,\n\n type_: None,\n\n code: Some(JsonBytes::from_bytes(sudt_src)),\n\n output_rules: vec![],\n\n input_rules: vec![],\n\n }\n\n}\n\n\n", "file_path": "sdk/tests/sudt_contract.rs", "rank": 42, "score": 26869.410250733737 }, { "content": " let sudt_code_cell_outpoint = chain.create_cell(sudt_code_cell.0, sudt_code_cell.1);\n\n sudt_contract.source = Some(ContractSource::Chain(sudt_code_cell_outpoint.into()));\n\n // Create Mint SUDT transaction, using as input a cell locked with the minter's lock script\n\n // let tx = TransactionBuilder::default()\n\n // .cell_dep(\n\n // sudt_contract\n\n // .as_cell_dep(sudt_code_cell_outpoint.into())\n\n // .into(),\n\n // )\n\n // .cell_dep(chain.find_cell_dep_for_script(&minter_lock_script.unwrap()))\n\n // .output(generate_simple_udt_cell(&sudt_contract))\n\n // .outputs_data(vec![0_u128.to_le_bytes().pack()])\n\n // .build();\n\n\n\n // Add rule to sudt output generation to increase the amount field.\n\n sudt_contract.add_output_rule(\n\n ContractField::Data,\n\n |ctx| -> ContractCellField<OwnerLockHash, SudtAmount> {\n\n let amount: ContractCellField<OwnerLockHash, SudtAmount> =\n\n ctx.load(ContractField::Data);\n", "file_path": "sdk/tests/sudt_contract.rs", "rank": 43, "score": 26868.82780565467 }, { "content": " .set_inputs(\n\n total_inputs\n\n .into_iter()\n\n .collect::<Vec<crate::ckb_types::packed::CellInput>>(),\n\n )\n\n .set_outputs_data(\n\n total_outputs_data\n\n .into_iter()\n\n .collect::<Vec<crate::ckb_types::packed::Bytes>>(),\n\n )\n\n .build();\n\n let mut idx = 0;\n\n let outputs = tx.clone().outputs().into_iter().filter_map(|output| {\n\n let self_script_hash: ckb_types::packed::Byte32 = self.script_hash().unwrap().into();\n\n\n\n if let Some(type_) = output.type_().to_opt() {\n\n if type_.calc_script_hash() == self_script_hash {\n\n return Some((idx, tx.output_with_data(idx).unwrap()));\n\n }\n\n }\n", "file_path": "sdk/src/contract/mod.rs", "rank": 44, "score": 26867.92160812189 }, { "content": "#[cfg(not(feature = \"script\"))]\n\n#[derive(Default)]\n\n#[cfg(not(feature = \"script\"))]\n\npub struct Contract<A, D> {\n\n pub source: Option<ContractSource>,\n\n pub data: D,\n\n pub args: A,\n\n pub lock: Option<Script>,\n\n pub type_: Option<Script>,\n\n pub code: Option<JsonBytes>,\n\n #[allow(clippy::type_complexity)]\n\n pub output_rules: Vec<OutputRule<A, D>>,\n\n pub input_rules: Vec<Box<dyn Fn(TransactionView) -> CellQuery>>,\n\n}\n\n\n\n#[cfg(not(feature = \"script\"))]\n\nimpl<A, D> Contract<A, D>\n\nwhere\n\n D: JsonByteConversion + MolConversion + BytesConversion + Clone,\n\n A: JsonByteConversion + MolConversion + BytesConversion + Clone,\n", "file_path": "sdk/src/contract/mod.rs", "rank": 45, "score": 26867.750482915253 }, { "content": " let arg_size = self.args.to_mol().as_builder().expected_length() as u64;\n\n let data_size = self.data.to_mol().as_builder().expected_length() as u64;\n\n println!(\"DATA SIZE EXPECTED: {:?}\", data_size);\n\n let mut data = Vec::with_capacity(data_size as usize);\n\n (0..data_size as usize).into_iter().for_each(|_| {\n\n data.push(0u8);\n\n });\n\n let mut tx = TransactionBuilder::default()\n\n .output(\n\n CellOutput::new_builder()\n\n .capacity((data_size + arg_size).pack())\n\n .type_(Some(ckb_types::packed::Script::from(self.as_script().unwrap())).pack())\n\n .build(),\n\n )\n\n .output_data(data.pack());\n\n\n\n if let Some(ContractSource::Chain(outp)) = self.source.clone() {\n\n tx = tx.cell_dep(self.as_cell_dep(outp).into());\n\n }\n\n\n", "file_path": "sdk/src/contract/mod.rs", "rank": 46, "score": 26867.543382098203 }, { "content": " let mut sudt_contract = gen_sudt_contract(minter_lock_script, Some(1500));\n\n let sudt_code_cell = sudt_contract.as_code_cell();\n\n let sudt_code_cell_outpoint = chain.create_cell(sudt_code_cell.0, sudt_code_cell.1);\n\n\n\n // Create Mint SUDT transaction, using as input a cell locked with a different user's lock script\n\n // Should fail because the user does not have mint permissions\n\n sudt_contract.source = Some(ContractSource::Chain(sudt_code_cell_outpoint.into()));\n\n //let fail_tx = TransactionBuilder::default().build();\n\n\n\n // Add rule to sudt output generation to increase the amount field.\n\n sudt_contract.add_output_rule(\n\n ContractField::Data,\n\n |ctx| -> ContractCellField<OwnerLockHash, SudtAmount> {\n\n let amount: ContractCellField<OwnerLockHash, SudtAmount> =\n\n ctx.load(ContractField::Data);\n\n if let ContractCellField::Data(amount) = amount {\n\n let amt: u128 = amount.into();\n\n ContractCellField::Data(SudtAmount::from(amt + 2000))\n\n } else {\n\n amount\n", "file_path": "sdk/tests/sudt_contract.rs", "rank": 47, "score": 26867.195626338 }, { "content": " ctx = ctx.clone().tx(updated_tx);\n\n (output.0, d.to_bytes())\n\n },\n\n ContractCellField::LockScript(_) => todo!(),\n\n ContractCellField::TypeScript(_) => todo!(),\n\n ContractCellField::Capacity(_) => todo!(),\n\n _ => {\n\n panic!(\"Error: Contract-level rule attempted transaction-level update.\")\n\n }\n\n }\n\n });\n\n println!(\"Output bytes of processed output: {:?}\", processed.1.pack());\n\n processed\n\n })\n\n .collect::<Vec<OutputWithData>>();\n\n\n\n let final_inner_tx = tx\n\n .as_advanced_builder()\n\n .set_outputs(\n\n outputs\n", "file_path": "sdk/src/contract/mod.rs", "rank": 48, "score": 26866.96769463945 }, { "content": " ContractCellField::Args(_) => todo!(),\n\n ContractCellField::Data(d) => {\n\n if rule.scope != ContractField::Data.into() {\n\n panic!(\"Error, mismatch of output rule scope and returned field\");\n\n }\n\n let updated_tx = ctx.get_tx();\n\n let inner_tx_view = updated_tx.tx.clone();\n\n let updated_outputs_data = inner_tx_view.outputs_with_data_iter()\n\n .enumerate().map(|(i, output)| {\n\n if i == ctx.idx {\n\n (output.0, d.to_bytes())\n\n } else {\n\n output\n\n }\n\n }).collect::<Vec<CellOutputWithData>>();\n\n let updated_inner_tx = inner_tx_view.as_advanced_builder()\n\n .set_outputs(updated_outputs_data.iter().map(|o| o.0.clone()).collect::<Vec<_>>())\n\n .set_outputs_data(updated_outputs_data.iter().map(|o| o.1.pack()).collect::<Vec<_>>())\n\n .build();\n\n let updated_tx = updated_tx.tx(updated_inner_tx);\n", "file_path": "sdk/src/contract/mod.rs", "rank": 49, "score": 26866.81910724377 }, { "content": " }\n\n }\n\n pub fn exec(&self, ctx: &RuleContext) -> ContractCellField<A, D> {\n\n self.rule.as_ref()(ctx.clone()) //call((ctx,))\n\n }\n\n}\n\n\n\n#[cfg(not(feature = \"script\"))]\n\n\n\npub enum ContractCellField<A, D> {\n\n Args(A),\n\n Data(D),\n\n LockScript(ckb_types::packed::Script),\n\n TypeScript(ckb_types::packed::Script),\n\n Capacity(Uint64),\n\n Inputs(Vec<CellInput>),\n\n ResolvedInputs(Vec<CellMeta>),\n\n Outputs(Vec<CellOutputWithData>),\n\n CellDeps(Vec<ckb_types::packed::CellDep>),\n\n}\n", "file_path": "sdk/src/contract/mod.rs", "rank": 50, "score": 26866.803403202044 }, { "content": " Some(script.calc_script_hash().into())\n\n }\n\n\n\n pub fn as_cell_dep(&self, out_point: OutPoint) -> CellDep {\n\n CellDep {\n\n out_point,\n\n dep_type: DepType::Code,\n\n }\n\n }\n\n\n\n // Set data of a cell that will *reference* (i.e., use) this contract\n\n pub fn set_raw_data(&mut self, data: impl Into<JsonBytes>) {\n\n self.data = D::from_json_bytes(data.into());\n\n }\n\n\n\n pub fn set_data(&mut self, data: D) {\n\n self.data = data;\n\n }\n\n\n\n // Set args of a cell that will *reference* (i.e., use) this contract\n", "file_path": "sdk/src/contract/mod.rs", "rank": 51, "score": 26866.228119283598 }, { "content": " D: JsonByteConversion + MolConversion + BytesConversion + Clone + Default,\n\n A: JsonByteConversion + MolConversion + BytesConversion + Clone,\n\n {\n\n match scope.into() {\n\n RuleScope::ContractField(field) => match field {\n\n ContractField::Args => todo!(),\n\n ContractField::Data => match self.curr_field {\n\n TransactionField::Outputs => {\n\n let data_reader = self.inner.outputs_data();\n\n let data_reader = data_reader.as_reader();\n\n let data = data_reader.get(self.idx);\n\n if let Some(data) = data {\n\n ContractCellField::Data(D::from_bytes(data.raw_data().to_vec().into()))\n\n } else {\n\n ContractCellField::Data(D::default())\n\n }\n\n }\n\n _ => ContractCellField::Data(D::default()),\n\n },\n\n ContractField::LockScript => todo!(),\n", "file_path": "sdk/src/contract/mod.rs", "rank": 52, "score": 26866.223871530736 }, { "content": " pub fn set_raw_args(&mut self, args: impl Into<JsonBytes>) {\n\n self.args = A::from_json_bytes(args.into());\n\n }\n\n\n\n pub fn set_args(&mut self, args: A) {\n\n self.args = args;\n\n }\n\n\n\n pub fn read_data(&self) -> D {\n\n self.data.clone()\n\n }\n\n\n\n pub fn read_args(&self) -> A {\n\n self.args.clone()\n\n }\n\n\n\n pub fn read_raw_data(&self, data: Bytes) -> D {\n\n D::from_bytes(data)\n\n }\n\n\n", "file_path": "sdk/src/contract/mod.rs", "rank": 53, "score": 26865.495889256516 }, { "content": " pub fn read_raw_args(&self, args: Bytes) -> A {\n\n A::from_bytes(args)\n\n }\n\n\n\n pub fn add_output_rule<F>(&mut self, scope: impl Into<RuleScope>, transform_func: F)\n\n where\n\n F: Fn(RuleContext) -> ContractCellField<A, D> + 'static,\n\n {\n\n self.output_rules\n\n .push(OutputRule::new(scope.into(), transform_func));\n\n }\n\n\n\n pub fn add_input_rule<F>(&mut self, query_func: F)\n\n where\n\n F: Fn(TransactionView) -> CellQuery + 'static,\n\n {\n\n self.input_rules.push(Box::new(query_func))\n\n }\n\n\n\n pub fn tx_template(&self) -> TransactionView {\n", "file_path": "sdk/src/contract/mod.rs", "rank": 54, "score": 26865.341314599023 }, { "content": " .iter()\n\n .map(|out| out.0.clone())\n\n .collect::<Vec<CellOutput>>(),\n\n )\n\n .set_outputs_data(\n\n outputs\n\n .iter()\n\n .map(|out| out.1.clone().pack())\n\n .collect::<Vec<ckb_types::packed::Bytes>>(),\n\n )\n\n .build();\n\n tx_meta.tx(final_inner_tx)\n\n }\n\n}\n", "file_path": "sdk/src/contract/mod.rs", "rank": 55, "score": 26865.334238152485 }, { "content": "use std::prelude::v1::*;\n\npub mod builtins;\n\npub mod schema;\n\nuse self::generator::CellMetaTransaction;\n\nuse self::schema::*;\n\n\n\nuse crate::ckb_types::packed::{CellInput, CellOutput, CellOutputBuilder, Uint64};\n\nuse crate::ckb_types::{bytes::Bytes, packed, prelude::*};\n\n\n\n#[cfg(not(feature = \"script\"))]\n\npub mod generator;\n\n#[cfg(not(feature = \"script\"))]\n\nuse self::generator::{CellQuery, GeneratorMiddleware};\n\n#[cfg(not(feature = \"script\"))]\n\nuse crate::chain::CellOutputWithData;\n\n#[cfg(not(feature = \"script\"))]\n\nuse crate::ckb_types::core::TransactionView;\n\n#[cfg(not(feature = \"script\"))]\n\nuse crate::ckb_types::{core::TransactionBuilder, H256};\n\n#[cfg(not(feature = \"script\"))]\n", "file_path": "sdk/src/contract/mod.rs", "rank": 56, "score": 26865.313798837946 }, { "content": " bytes.into_bytes().pack()\n\n }\n\n}\n\n\n\n#[cfg(feature = \"script\")]\n\nimpl From<ckb_standalone_types::packed::Bytes> for JsonBytes {\n\n fn from(bytes: ckb_standalone_types::packed::Bytes) -> Self {\n\n Self(bytes.unpack())\n\n }\n\n}\n\n\n\nuse crate::ckb_types::{bytes::Bytes, prelude::*};\n\n\n", "file_path": "sdk/src/contract/schema.rs", "rank": 57, "score": 26863.79250266817 }, { "content": "extern crate ckb_always_success_script;\n\nextern crate trampoline_sdk;\n\n\n\nuse trampoline_sdk::chain::{MockChain, MockChainTxProvider as ChainRpc};\n\nuse trampoline_sdk::contract::*;\n\nuse trampoline_sdk::contract::{builtins::sudt::*, generator::*, schema::*};\n\n\n\nuse ckb_types::{\n\n bytes::Bytes,\n\n core::{TransactionBuilder, TransactionView},\n\n packed::{Byte32, CellOutput},\n\n prelude::*,\n\n H256,\n\n};\n\n\n\nuse ckb_hash::blake2b_256;\n\nuse ckb_jsonrpc_types::JsonBytes;\n\n\n\nuse std::path::Path;\n\nuse std::sync::{Arc, Mutex};\n\n\n\n// Generated from ckb-cli util blake2b --binary-path /path/to/builtins/bins/simple_udt\n\nconst EXPECTED_SUDT_HASH: &str =\n\n \"0xe1e354d6d643ad42724d40967e334984534e0367405c5ae42a9d7d63d77df419\";\n\n\n", "file_path": "sdk/tests/sudt_contract.rs", "rank": 59, "score": 26863.626761124488 }, { "content": " ContractField::TypeScript => todo!(),\n\n ContractField::Capacity => todo!(),\n\n },\n\n RuleScope::TransactionField(field) => match field {\n\n TransactionField::Inputs => ContractCellField::Inputs(\n\n self.inner.inputs().into_iter().collect::<Vec<CellInput>>(),\n\n ),\n\n TransactionField::Outputs => ContractCellField::Outputs(\n\n self.inner\n\n .outputs_with_data_iter()\n\n .collect::<Vec<CellOutputWithData>>(),\n\n ),\n\n TransactionField::Dependencies => ContractCellField::CellDeps(\n\n self.inner\n\n .cell_deps_iter()\n\n .collect::<Vec<crate::ckb_types::packed::CellDep>>(),\n\n ),\n\n TransactionField::ResolvedInputs => {\n\n ContractCellField::ResolvedInputs(self.inner.inputs.clone())\n\n }\n", "file_path": "sdk/src/contract/mod.rs", "rank": 60, "score": 26863.55218724968 }, { "content": "#[cfg(not(feature = \"script\"))]\n\npub use ckb_jsonrpc_types::JsonBytes;\n\n\n\nuse std::marker::PhantomData;\n\nuse std::prelude::v1::*;\n\n#[cfg(feature = \"script\")]\n\npub struct JsonBytes(crate::ckb_types::bytes::Bytes);\n\n#[cfg(feature = \"script\")]\n\nimpl JsonBytes {\n\n pub fn from_bytes(bytes: Bytes) -> Self {\n\n Self(bytes)\n\n }\n\n pub fn into_bytes(self) -> Bytes {\n\n self.0\n\n }\n\n}\n\n\n\n#[cfg(feature = \"script\")]\n\nimpl From<JsonBytes> for ckb_standalone_types::packed::Bytes {\n\n fn from(bytes: JsonBytes) -> Self {\n", "file_path": "sdk/src/contract/schema.rs", "rank": 62, "score": 26863.385075449587 }, { "content": "#[cfg(not(feature = \"script\"))]\n\nimpl ContractSource {\n\n pub fn load_from_path(path: PathBuf) -> std::io::Result<Bytes> {\n\n let file = fs::read(path)?;\n\n println!(\"SUDT CODE SIZE: {}\", file.len());\n\n Ok(Bytes::from(file))\n\n }\n\n}\n\n\n\n#[cfg(not(feature = \"script\"))]\n\n#[derive(Clone, PartialEq)]\n\npub enum ContractField {\n\n Args,\n\n Data,\n\n LockScript,\n\n TypeScript,\n\n Capacity,\n\n}\n\n\n\n#[cfg(not(feature = \"script\"))]\n", "file_path": "sdk/src/contract/mod.rs", "rank": 63, "score": 26862.98559117445 }, { "content": " },\n\n );\n\n\n\n // Add output rule to sudt contract to increase balance by 20\n\n sudt_contract.add_output_rule(\n\n ContractField::Data,\n\n |ctx| -> ContractCellField<OwnerLockHash, SudtAmount> {\n\n let amount: ContractCellField<OwnerLockHash, SudtAmount> =\n\n ctx.load(ContractField::Data);\n\n if let ContractCellField::Data(amount) = amount {\n\n let amt: u128 = amount.into();\n\n ContractCellField::Data(SudtAmount::from(amt + 20))\n\n } else {\n\n amount\n\n }\n\n },\n\n );\n\n\n\n // Pipe transaction into sudt contract\n\n let new_tx = sudt_contract.pipe(transaction.into(), Arc::new(Mutex::new(vec![])));\n\n\n\n // Check that sudt contract updated correctly with a total balance increase of 37 (17 + 20)\n\n let new_tx_amt = new_tx.tx.output_with_data(0).unwrap().1;\n\n println!(\"New tx amt as bytes: {:?}\", new_tx_amt.pack());\n\n let new_tx_amt: u128 = sudt_contract.read_raw_data(new_tx_amt).into();\n\n assert_eq!(new_tx_amt, 2037_u128);\n\n}\n", "file_path": "sdk/tests/sudt_contract.rs", "rank": 64, "score": 26862.63219685047 }, { "content": " .as_builder()\n\n .extend(tx_template.inputs())\n\n .build();\n\n let total_outputs_data = tx\n\n .outputs_data()\n\n .as_builder()\n\n .extend(tx_template.outputs_data())\n\n .build();\n\n let tx = tx\n\n .as_advanced_builder()\n\n .set_cell_deps(\n\n total_deps\n\n .into_iter()\n\n .collect::<Vec<crate::ckb_types::packed::CellDep>>(),\n\n )\n\n .set_outputs(\n\n total_outputs\n\n .into_iter()\n\n .collect::<Vec<crate::ckb_types::packed::CellOutput>>(),\n\n )\n", "file_path": "sdk/src/contract/mod.rs", "rank": 65, "score": 26862.324123015165 }, { "content": " tx_meta: CellMetaTransaction,\n\n _query_queue: Arc<Mutex<Vec<CellQuery>>>,\n\n ) -> CellMetaTransaction {\n\n type OutputWithData = (CellOutput, Bytes);\n\n\n\n let tx = tx_meta.tx.clone();\n\n let tx_template = self.tx_template();\n\n\n\n let total_deps = tx\n\n .cell_deps()\n\n .as_builder()\n\n .extend(tx_template.cell_deps_iter())\n\n .build();\n\n let total_outputs = tx\n\n .outputs()\n\n .as_builder()\n\n .extend(tx_template.outputs())\n\n .build();\n\n let total_inputs = tx\n\n .inputs()\n", "file_path": "sdk/src/contract/mod.rs", "rank": 66, "score": 26862.04759307997 }, { "content": "#[derive(Clone, PartialEq)]\n\npub enum TransactionField {\n\n ResolvedInputs,\n\n Inputs,\n\n Outputs,\n\n Dependencies,\n\n}\n\n\n\n#[cfg(not(feature = \"script\"))]\n\n#[derive(PartialEq)]\n\npub enum RuleScope {\n\n ContractField(ContractField),\n\n TransactionField(TransactionField),\n\n}\n\n#[cfg(not(feature = \"script\"))]\n\nimpl From<ContractField> for RuleScope {\n\n fn from(f: ContractField) -> Self {\n\n Self::ContractField(f)\n\n }\n\n}\n", "file_path": "sdk/src/contract/mod.rs", "rank": 68, "score": 26861.572434865422 }, { "content": "\n\n if output.lock().calc_script_hash() == self_script_hash {\n\n return Some((idx, tx.output_with_data(idx).unwrap()));\n\n }\n\n\n\n idx += 1;\n\n None\n\n });\n\n\n\n let mut ctx = RuleContext::new(tx_meta.clone());\n\n\n\n let outputs = outputs\n\n .into_iter()\n\n .map(|output_with_idx| {\n\n ctx.idx(output_with_idx.0);\n\n let processed = self.output_rules.iter().fold(output_with_idx.1, |output, rule| {\n\n let data = self.read_raw_data(output.1.clone());\n\n println!(\"Data before update {:?}\", data.to_mol());\n\n let updated_field = rule.exec(&ctx);\n\n match updated_field {\n", "file_path": "sdk/src/contract/mod.rs", "rank": 69, "score": 26861.307449135784 }, { "content": " tx.build()\n\n }\n\n}\n\n#[cfg(not(feature = \"script\"))]\n\nimpl<A, D> GeneratorMiddleware for Contract<A, D>\n\nwhere\n\n D: JsonByteConversion + MolConversion + BytesConversion + Clone,\n\n A: JsonByteConversion + MolConversion + BytesConversion + Clone,\n\n{\n\n fn update_query_register(\n\n &self,\n\n tx: CellMetaTransaction,\n\n query_register: Arc<Mutex<Vec<CellQuery>>>,\n\n ) {\n\n let queries = self.input_rules.iter().map(|rule| rule(tx.clone().tx));\n\n\n\n query_register.lock().unwrap().extend(queries);\n\n }\n\n fn pipe(\n\n &self,\n", "file_path": "sdk/src/contract/mod.rs", "rank": 70, "score": 26861.169931238128 }, { "content": "impl<T, M> BytesConversion for SchemaPrimitiveType<T, M>\n\nwhere\n\n M: Entity + Unpack<T>,\n\n T: Pack<M>,\n\n{\n\n fn from_bytes(bytes: Bytes) -> Self {\n\n Self {\n\n inner: M::from_compatible_slice(bytes.as_ref())\n\n .expect(\"Unable to build primitive type from bytes\")\n\n .unpack(),\n\n _entity_type: PhantomData::<M>,\n\n }\n\n }\n\n\n\n fn to_bytes(&self) -> Bytes {\n\n self.to_mol().as_bytes()\n\n }\n\n}\n\n\n\nimpl<T, M> JsonByteConversion for SchemaPrimitiveType<T, M>\n", "file_path": "sdk/src/contract/schema.rs", "rank": 71, "score": 26859.950212468062 }, { "content": " if let ContractCellField::Data(amount) = amount {\n\n let amt: u128 = amount.into();\n\n ContractCellField::Data(SudtAmount::from(amt + 2000))\n\n } else {\n\n amount\n\n }\n\n },\n\n );\n\n sudt_contract.add_input_rule(move |_tx| -> CellQuery {\n\n CellQuery {\n\n _query: QueryStatement::Single(CellQueryAttribute::LockHash(\n\n minter_lock_hash.clone().into(),\n\n )),\n\n _limit: 1,\n\n }\n\n });\n\n\n\n // Instantiate chain rpc and tx generator\n\n let chain_rpc = ChainRpc::new(chain);\n\n let generator = Generator::new()\n", "file_path": "sdk/tests/sudt_contract.rs", "rank": 73, "score": 26859.680923348205 }, { "content": "where\n\n M: Entity + Unpack<T>,\n\n T: Pack<M>,\n\n{\n\n fn to_json_bytes(&self) -> JsonBytes {\n\n self.to_mol().as_bytes().pack().into()\n\n }\n\n\n\n fn from_json_bytes(bytes: JsonBytes) -> Self {\n\n Self::from_bytes(bytes.into_bytes())\n\n }\n\n}\n\n\n\nimpl<T, M> JsonConversion for SchemaPrimitiveType<T, M>\n\nwhere\n\n M: Entity + Unpack<T>,\n\n T: Pack<M>,\n\n{\n\n type JsonType = JsonBytes;\n\n\n\n fn to_json(&self) -> Self::JsonType {\n\n self.to_json_bytes()\n\n }\n\n\n\n fn from_json(json: Self::JsonType) -> Self {\n\n Self::from_json_bytes(json)\n\n }\n\n}\n", "file_path": "sdk/src/contract/schema.rs", "rank": 74, "score": 26858.63963058809 }, { "content": " },\n\n }\n\n }\n\n}\n\n\n\n#[cfg(not(feature = \"script\"))]\n\npub struct OutputRule<A, D> {\n\n pub scope: RuleScope,\n\n pub rule: Box<dyn Fn(RuleContext) -> ContractCellField<A, D>>,\n\n}\n\n\n\n#[cfg(not(feature = \"script\"))]\n\nimpl<A, D> OutputRule<A, D> {\n\n pub fn new<F>(scope: impl Into<RuleScope>, rule: F) -> Self\n\n where\n\n F: 'static + Fn(RuleContext) -> ContractCellField<A, D>,\n\n {\n\n OutputRule {\n\n scope: scope.into(),\n\n rule: Box::new(rule),\n", "file_path": "sdk/src/contract/mod.rs", "rank": 75, "score": 26857.859674353756 }, { "content": "use ckb_hash::blake2b_256;\n\n#[cfg(not(feature = \"script\"))]\n\nuse ckb_jsonrpc_types::{CellDep, DepType, JsonBytes, OutPoint, Script};\n\nuse ckb_types::core::cell::CellMeta;\n\n\n\n#[cfg(not(feature = \"script\"))]\n\nuse std::fs;\n\n#[cfg(not(feature = \"script\"))]\n\nuse std::path::PathBuf;\n\n\n\n#[cfg(not(feature = \"script\"))]\n\nuse std::sync::{Arc, Mutex};\n\n\n\n#[cfg(not(feature = \"script\"))]\n\n#[derive(Debug, Clone)]\n\npub enum ContractSource {\n\n LocalPath(PathBuf),\n\n Immediate(Bytes),\n\n Chain(OutPoint),\n\n}\n", "file_path": "sdk/src/contract/mod.rs", "rank": 76, "score": 26857.02809815213 }, { "content": " }\n\n }\n\n pub fn tx(mut self, tx: impl Into<CellMetaTransaction>) -> Self {\n\n self.inner = tx.into();\n\n self\n\n }\n\n\n\n pub fn get_tx(&self) -> CellMetaTransaction {\n\n self.inner.clone()\n\n }\n\n pub fn idx(&mut self, idx: usize) {\n\n self.idx = idx;\n\n }\n\n\n\n pub fn curr_field(&mut self, field: TransactionField) {\n\n self.curr_field = field;\n\n }\n\n\n\n pub fn load<A, D>(&self, scope: impl Into<RuleScope>) -> ContractCellField<A, D>\n\n where\n", "file_path": "sdk/src/contract/mod.rs", "rank": 77, "score": 26856.60400628433 }, { "content": " }\n\n },\n\n );\n\n\n\n sudt_contract.add_input_rule(move |_tx| -> CellQuery {\n\n CellQuery {\n\n _query: QueryStatement::Single(CellQueryAttribute::LockHash(\n\n non_minter_lock_hash.clone().into(),\n\n )),\n\n _limit: 1,\n\n }\n\n });\n\n\n\n // Instantiate chain rpc and tx generator\n\n let chain_rpc = ChainRpc::new(chain);\n\n let generator = Generator::new()\n\n .chain_service(&chain_rpc)\n\n .query_service(&chain_rpc)\n\n .pipeline(vec![&sudt_contract]);\n\n\n\n let new_fail_tx = generator.generate(); //generator.pipe(fail_tx, Arc::new(Mutex::new(vec![])));\n\n // Test that failure transaction failed\n\n let is_valid = chain_rpc.verify_tx(new_fail_tx.tx.into());\n\n assert!(!is_valid);\n\n}\n\n\n", "file_path": "sdk/tests/sudt_contract.rs", "rank": 78, "score": 26856.525168373482 }, { "content": " .chain_service(&chain_rpc)\n\n .query_service(&chain_rpc)\n\n .pipeline(vec![&sudt_contract]);\n\n\n\n // Generate transaction\n\n let new_tx = generator.generate(); //generator.pipe(tx, Arc::new(Mutex::new(vec![])));\n\n\n\n // Test that success transaction succeeded & has correct sudt amount minted\n\n let new_tx_amt = new_tx.tx.output_with_data(0).unwrap().1;\n\n let new_tx_amt: u128 = sudt_contract.read_raw_data(new_tx_amt).to_mol().unpack();\n\n assert_eq!(new_tx_amt, 2000_u128);\n\n\n\n let is_valid = chain_rpc.verify_tx(new_tx.tx.into());\n\n assert!(is_valid);\n\n}\n\n\n", "file_path": "sdk/tests/sudt_contract.rs", "rank": 79, "score": 26856.178794479885 }, { "content": "}\n\n\n\nimpl<T, M> MolConversion for SchemaPrimitiveType<T, M>\n\nwhere\n\n M: Entity + Unpack<T>,\n\n T: Pack<M>,\n\n{\n\n type MolType = M;\n\n fn to_mol(&self) -> Self::MolType {\n\n self.inner.pack()\n\n }\n\n\n\n fn from_mol(entity: Self::MolType) -> Self {\n\n Self {\n\n inner: entity.unpack(),\n\n _entity_type: std::marker::PhantomData::<M>,\n\n }\n\n }\n\n}\n\n\n", "file_path": "sdk/src/contract/schema.rs", "rank": 80, "score": 26856.080401623203 }, { "content": "{\n\n pub fn byte_size(&self) -> usize {\n\n self.to_mol().as_builder().expected_length()\n\n }\n\n}\n\nimpl<T, M> SchemaPrimitiveType<T, M> {\n\n pub fn new(inner: T) -> Self {\n\n Self {\n\n inner,\n\n _entity_type: std::marker::PhantomData::<M>,\n\n }\n\n }\n\n\n\n pub fn from(native_type: T) -> Self {\n\n SchemaPrimitiveType::new(native_type)\n\n }\n\n\n\n pub fn into(self) -> T {\n\n self.inner\n\n }\n", "file_path": "sdk/src/contract/schema.rs", "rank": 81, "score": 26854.732876748738 }, { "content": "#[cfg(not(feature = \"script\"))]\n\nimpl From<TransactionField> for RuleScope {\n\n fn from(f: TransactionField) -> Self {\n\n Self::TransactionField(f)\n\n }\n\n}\n\n#[cfg(not(feature = \"script\"))]\n\n#[derive(Clone)]\n\npub struct RuleContext {\n\n inner: CellMetaTransaction,\n\n pub idx: usize,\n\n pub curr_field: TransactionField,\n\n}\n\n#[cfg(not(feature = \"script\"))]\n\nimpl RuleContext {\n\n pub fn new(tx: impl Into<CellMetaTransaction>) -> Self {\n\n Self {\n\n inner: tx.into(),\n\n idx: 0,\n\n curr_field: TransactionField::Outputs,\n", "file_path": "sdk/src/contract/mod.rs", "rank": 82, "score": 26854.391783090337 }, { "content": " }\n\n\n\n pub fn output_pts_iter(&self) -> impl Iterator<Item = crate::ckb_types::packed::OutPoint> {\n\n self.tx.output_pts_iter()\n\n }\n\n\n\n pub fn input_pts_iter(&self) -> impl Iterator<Item = crate::ckb_types::packed::OutPoint> {\n\n self.tx.input_pts_iter()\n\n }\n\n\n\n pub fn outputs_with_data_iter(&self) -> impl Iterator<Item = CellOutputWithData> {\n\n self.tx.outputs_with_data_iter()\n\n }\n\n\n\n pub fn outputs_capacity(\n\n &self,\n\n ) -> Result<crate::ckb_types::core::Capacity, ckb_types::core::CapacityError> {\n\n self.tx.outputs_capacity()\n\n }\n\n pub fn fake_hash(mut self, hash: crate::ckb_types::packed::Byte32) -> Self {\n", "file_path": "sdk/src/contract/generator/mod.rs", "rank": 84, "score": 25683.750220246802 }, { "content": "use ckb_jsonrpc_types::{Byte32, Capacity, OutPoint, Script, TransactionView as JsonTransaction};\n\nuse ckb_types::packed::CellDepBuilder;\n\nuse std::prelude::v1::*;\n\n\n\nuse crate::ckb_types::{\n\n core::{cell::CellMeta, TransactionBuilder, TransactionView},\n\n packed::CellInputBuilder,\n\n prelude::*,\n\n};\n\n\n\nuse std::collections::HashSet;\n\nuse std::sync::{Arc, Mutex};\n\n\n\nuse crate::chain::CellOutputWithData;\n\n\n\n#[derive(Clone, Debug)]\n\npub struct CellMetaTransaction {\n\n pub tx: TransactionView,\n\n pub inputs: Vec<CellMeta>,\n\n}\n", "file_path": "sdk/src/contract/generator/mod.rs", "rank": 85, "score": 25683.052768529127 }, { "content": " }\n\n\n\n pub fn as_advanced_builder(&self) -> TransactionBuilder {\n\n self.tx.as_advanced_builder()\n\n }\n\n\n\n pub fn cell_deps(&self) -> crate::ckb_types::packed::CellDepVec {\n\n self.tx.cell_deps()\n\n }\n\n\n\n pub fn inputs(&self) -> crate::ckb_types::packed::CellInputVec {\n\n self.tx.inputs()\n\n }\n\n\n\n pub fn outputs(&self) -> crate::ckb_types::packed::CellOutputVec {\n\n self.tx.outputs()\n\n }\n\n\n\n pub fn outputs_data(&self) -> crate::ckb_types::packed::BytesVec {\n\n self.tx.outputs_data()\n", "file_path": "sdk/src/contract/generator/mod.rs", "rank": 86, "score": 25682.039905883033 }, { "content": " println!(\"RESOLVED INPUTS IN GENERATOR PIPE: {:?}\", inputs);\n\n let inner_tx = tx\n\n .as_advanced_builder()\n\n .set_inputs(\n\n inputs\n\n .iter()\n\n .map(|inp| {\n\n CellInputBuilder::default()\n\n .previous_output(inp.out_point.clone())\n\n .build()\n\n })\n\n .collect::<Vec<_>>(),\n\n )\n\n .build();\n\n let tx = tx.tx(inner_tx).with_inputs(inputs);\n\n let tx = self.middleware.iter().fold(tx, |tx, middleware| {\n\n middleware.pipe(tx, query_register.clone())\n\n });\n\n #[allow(clippy::mutable_key_type)]\n\n let mut queries = HashSet::new();\n", "file_path": "sdk/src/contract/generator/mod.rs", "rank": 87, "score": 25679.753970293114 }, { "content": " }\n\n\n\n pub fn witnesses(&self) -> crate::ckb_types::packed::BytesVec {\n\n self.tx.witnesses()\n\n }\n\n\n\n pub fn output(&self, idx: usize) -> Option<crate::ckb_types::packed::CellOutput> {\n\n self.tx.output(idx)\n\n }\n\n\n\n pub fn output_with_data(&self, idx: usize) -> Option<CellOutputWithData> {\n\n self.tx.output_with_data(idx)\n\n }\n\n\n\n pub fn output_pts(&self) -> Vec<crate::ckb_types::packed::OutPoint> {\n\n self.tx.output_pts()\n\n }\n\n\n\n pub fn cell_deps_iter(&self) -> impl Iterator<Item = crate::ckb_types::packed::CellDep> {\n\n self.tx.cell_deps_iter()\n", "file_path": "sdk/src/contract/generator/mod.rs", "rank": 88, "score": 25678.931844764917 }, { "content": " tx.inputs.iter().for_each(|cell| {\n\n if let Some(script) = cell.cell_output.type_().to_opt() {\n\n let query = CellQuery {\n\n _query: QueryStatement::Single(CellQueryAttribute::DataHash(\n\n script.code_hash().into(),\n\n )),\n\n _limit: 1,\n\n };\n\n queries.insert(query);\n\n }\n\n queries.insert(CellQuery {\n\n _query: QueryStatement::Single(CellQueryAttribute::DataHash(\n\n cell.cell_output.lock().code_hash().into(),\n\n )),\n\n _limit: 1,\n\n });\n\n });\n\n let deps = queries.into_iter().flat_map(|q| {\n\n self.query(q).unwrap().into_iter().map(|cell_dep_meta| {\n\n CellDepBuilder::default()\n", "file_path": "sdk/src/contract/generator/mod.rs", "rank": 89, "score": 25678.060724058138 }, { "content": " self.tx = self.tx.fake_hash(hash);\n\n self\n\n }\n\n\n\n /// Sets a fake witness hash.\n\n pub fn fake_witness_hash(mut self, witness_hash: crate::ckb_types::packed::Byte32) -> Self {\n\n self.tx = self.tx.fake_witness_hash(witness_hash);\n\n self\n\n }\n\n}\n\n\n\n// Note: Uses ckb_jsonrpc_types\n", "file_path": "sdk/src/contract/generator/mod.rs", "rank": 90, "score": 25676.10814959406 }, { "content": " tx: Some(TransactionBuilder::default().build().into()),\n\n query_queue: Arc::new(Mutex::new(vec![])),\n\n }\n\n }\n\n\n\n pub fn pipeline(mut self, pipes: Vec<&'a dyn GeneratorMiddleware>) -> Self {\n\n self.middleware = pipes;\n\n self\n\n }\n\n\n\n pub fn chain_service(mut self, chain_service: &'b dyn TransactionProvider) -> Self {\n\n self.chain_service = Some(chain_service);\n\n self\n\n }\n\n\n\n pub fn query_service(mut self, query_service: &'b dyn QueryProvider) -> Self {\n\n self.query_service = Some(query_service);\n\n self\n\n }\n\n\n", "file_path": "sdk/src/contract/generator/mod.rs", "rank": 91, "score": 25675.26463754432 }, { "content": "\n\nimpl From<TransactionView> for CellMetaTransaction {\n\n fn from(tx: TransactionView) -> Self {\n\n Self { tx, inputs: vec![] }\n\n }\n\n}\n\n\n\nimpl CellMetaTransaction {\n\n pub fn tx(self, tx: TransactionView) -> Self {\n\n Self {\n\n tx,\n\n inputs: self.inputs,\n\n }\n\n }\n\n\n\n pub fn with_inputs(self, inputs: Vec<CellMeta>) -> Self {\n\n Self {\n\n tx: self.tx,\n\n inputs,\n\n }\n", "file_path": "sdk/src/contract/generator/mod.rs", "rank": 92, "score": 25675.255666992554 }, { "content": " .out_point(cell_dep_meta.out_point)\n\n .build()\n\n })\n\n });\n\n\n\n let inner_tx = tx\n\n .as_advanced_builder()\n\n .cell_deps(tx.cell_deps().as_builder().extend(deps).build())\n\n .build();\n\n // TO DO: Resolve cell deps of inputs\n\n // Will have to accommodate some cells being deptype of depgroup\n\n\n\n println!(\n\n \"FINAL TX GENERATED: {:#?}\",\n\n ckb_jsonrpc_types::TransactionView::from(tx.clone().tx)\n\n );\n\n tx.tx(inner_tx)\n\n }\n\n}\n", "file_path": "sdk/src/contract/generator/mod.rs", "rank": 93, "score": 25674.31491778896 }, { "content": " }\n\n}\n\n\n\nimpl GeneratorMiddleware for Generator<'_, '_> {\n\n fn update_query_register(\n\n &self,\n\n tx: CellMetaTransaction,\n\n query_register: Arc<Mutex<Vec<CellQuery>>>,\n\n ) {\n\n self.middleware\n\n .iter()\n\n .for_each(|m| m.update_query_register(tx.clone(), query_register.clone()));\n\n }\n\n fn pipe(\n\n &self,\n\n tx: CellMetaTransaction,\n\n query_register: Arc<Mutex<Vec<CellQuery>>>,\n\n ) -> CellMetaTransaction {\n\n self.update_query_register(tx.clone(), query_register.clone());\n\n let inputs = self.resolve_queries(query_register.clone());\n", "file_path": "sdk/src/contract/generator/mod.rs", "rank": 94, "score": 25672.32147188762 }, { "content": " pub fn query(&self, query: CellQuery) -> Option<Vec<CellMeta>> {\n\n let res = self.query_service.unwrap().query_cell_meta(query.clone());\n\n println!(\n\n \"Res in generator.query for cell_query {:?} is {:?}\",\n\n query, res\n\n );\n\n res\n\n }\n\n\n\n pub fn generate(&self) -> CellMetaTransaction {\n\n self.pipe(self.tx.as_ref().unwrap().clone(), self.query_queue.clone())\n\n }\n\n\n\n pub fn resolve_queries(&self, query_register: Arc<Mutex<Vec<CellQuery>>>) -> Vec<CellMeta> {\n\n query_register\n\n .lock()\n\n .unwrap()\n\n .iter()\n\n .flat_map(|query| self.query(query.to_owned()).unwrap())\n\n .collect::<Vec<_>>()\n", "file_path": "sdk/src/contract/generator/mod.rs", "rank": 95, "score": 25671.430326952282 }, { "content": " MinCapacity(Capacity),\n\n MaxCapacity(Capacity),\n\n DataHash(Byte32),\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub enum QueryStatement {\n\n Single(CellQueryAttribute),\n\n FilterFrom(CellQueryAttribute, CellQueryAttribute),\n\n Any(Vec<CellQueryAttribute>),\n\n All(Vec<CellQueryAttribute>),\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct CellQuery {\n\n pub _query: QueryStatement,\n\n pub _limit: u64,\n\n}\n\n\n", "file_path": "sdk/src/contract/generator/mod.rs", "rank": 96, "score": 25671.22669557523 }, { "content": "#[allow(clippy::needless_lifetimes, clippy::derivable_impls)]\n\npub mod sudt;\n\n#[allow(clippy::needless_lifetimes, clippy::derivable_impls)]\n\npub mod t_nft;\n", "file_path": "sdk/src/contract/builtins/mod.rs", "rank": 97, "score": 25666.665342748507 }, { "content": "use std::prelude::v1::*;\n\n\n\nuse crate::ckb_types::packed::{Byte32, Uint128};\n\n#[cfg(not(feature = \"script\"))]\n\nuse crate::contract::Contract;\n\n\n\nuse crate::contract::schema::SchemaPrimitiveType;\n\n\n\n#[derive(Debug, Clone, Default)]\n", "file_path": "sdk/src/contract/builtins/sudt/mod.rs", "rank": 98, "score": 24601.264220305166 }, { "content": "use std::prelude::v1::*;\n\n\n\npub mod mol_defs;\n\nuse crate::ckb_types::{bytes::Bytes, prelude::*};\n\n\n\nuse mol_defs::{Byte32, Byte32Reader, NFTBuilder, NFT};\n\n\n\n#[cfg(not(feature = \"script\"))]\n\nuse crate::contract::Contract;\n\nuse crate::{\n\n contract::schema::SchemaPrimitiveType,\n\n contract::schema::{BytesConversion, JsonByteConversion, JsonBytes, MolConversion},\n\n impl_entity_unpack, impl_pack_for_fixed_byte_array, impl_primitive_reader_unpack,\n\n};\n\n\n\n#[cfg(not(feature = \"script\"))]\n", "file_path": "sdk/src/contract/builtins/t_nft/mod.rs", "rank": 99, "score": 24601.146876889907 } ]
Rust
libs/editor/libs/paths/src/paths.rs
Ryan1729IsParanoidAboutWriteAccess/rote
fbe20e4627c1b63d2af7141dcffd424563b8fe9f
use std::path::{Path, PathBuf}; pub fn find_in<'path, I: Iterator<Item = &'path Path>>( paths: I, needle: &str, ) -> Vec<PathBuf> { if needle.is_empty() { return Vec::new(); } let len = { if let (_, Some(l)) = paths.size_hint() { l } else { 128 } }; let mut output: Vec<PathBuf> = Vec::with_capacity(len); for path in paths { let mut contains_needle = false; macro_rules! cmp_match_indices { ($p1: expr, $p2: expr, $path_cmp: expr) => {{ use std::cmp::Ordering::*; let mut p1_iter = $p1.match_indices(needle); let mut p2_iter = $p2.match_indices(needle); let mut backup_ordering = Equal; let mut p1_needle_count = 0; let mut p2_needle_count = 0; loop { match (p1_iter.next(), p2_iter.next()) { (Some((p1_i, _)), Some((p2_i, _))) => { contains_needle = true; backup_ordering = p1_i.cmp(&p2_i); p1_needle_count += 1; p2_needle_count += 1; } (Some(_), None) => { contains_needle = true; p1_needle_count += 1; break; } (None, Some(_)) => { p2_needle_count += 1; break; } (None, None) => { break; } } } p1_needle_count .cmp(&p2_needle_count) .then_with(|| backup_ordering) .then_with(|| $path_cmp) }}; } let i = if output.is_empty() { contains_needle = if let Some(s) = path.to_str() { s.match_indices(needle).count() > 0 } else { let s = path.to_string_lossy(); s.match_indices(needle).count() > 0 }; 0 } else { output .binary_search_by(|p| { match (path.to_str(), p.to_str()) { (Some(s1), Some(s2)) => cmp_match_indices!(s1, s2, p.as_path().cmp(path)), (Some(s1), None) => { let s2 = p.to_string_lossy(); cmp_match_indices!(s1, s2, p.as_path().cmp(path)) } (None, Some(s2)) => { let s1 = path.to_string_lossy(); cmp_match_indices!(s1, s2, p.as_path().cmp(path)) } (None, None) => { let s1 = path.to_string_lossy(); let s2 = p.to_string_lossy(); cmp_match_indices!(s1, s2, p.as_path().cmp(path)) } } }) .unwrap_or_else(|i| i) }; if contains_needle { output.insert(i, path.to_path_buf()); } } output } #[cfg(test)] mod tests { use super::*; #[test] fn find_in_paths_works_on_this_small_example() { let needle = "b"; let searched_paths = vec![ PathBuf::from("C:\\Users\\ryan1729\\Documents\\bartog.txt"), PathBuf::from("C:\\Users\\ryan1729\\Documents\\beans.txt"), PathBuf::from("C:\\Users\\ryan1729\\Documents\\unrelated.txt"), ]; let expected = vec![ PathBuf::from("C:\\Users\\ryan1729\\Documents\\bartog.txt"), PathBuf::from("C:\\Users\\ryan1729\\Documents\\beans.txt"), ]; assert_eq!( find_in(searched_paths.iter().map(|p| p.as_path()), needle), expected ); } #[test] fn find_in_paths_sorts_things_with_multiple_needles_higher() { let needle = "b"; let searched_paths = vec![ PathBuf::from("C:\\Users\\ryan1729\\Documents\\basketball.txt"), PathBuf::from("C:\\Users\\ryan1729\\Documents\\beans.txt"), PathBuf::from("C:\\Users\\ryan1729\\Documents\\beebasketball.txt"), ]; let expected = vec![ PathBuf::from("C:\\Users\\ryan1729\\Documents\\beebasketball.txt"), PathBuf::from("C:\\Users\\ryan1729\\Documents\\basketball.txt"), PathBuf::from("C:\\Users\\ryan1729\\Documents\\beans.txt"), ]; assert_eq!( find_in(searched_paths.iter().map(|p| p.as_path()), needle), expected ); } }
use std::path::{Path, PathBuf}; pub fn find_in<'path, I: Iterator<Item = &'path Path>>( paths: I, needle: &str, ) -> Vec<PathBuf> { if needle.is_empty() { return Vec::new(); } let len = { if let (_, Some(l)) = paths.size_hint() { l } else { 128 } }; let mut output: Vec<PathBuf> = Vec::with_capacity(len); for path in paths { let mut contains_needle = false; macro_rules! cmp_match_indices { ($p1: expr, $p2: expr, $path_cmp: expr) => {{ use std::cmp::Ordering::*; let mut p1_iter = $p1.match_indices(needle); let mut p2_iter = $p2.match_indices(needle); let mut backup_ordering = Equal; let mut p1_needle_count = 0; let mut p2_needle_count = 0; loop { match (p1_iter.next(), p2_iter.next()) { (Some((p1_i, _)), Some((p2_i, _))) => { contains_needle = true; backup_ordering = p1_i.cmp(&p2_i); p1_needle_count += 1; p2_needle_count += 1; } (Some(_), None) => { contains_needle = true; p1_needle_count += 1; break; } (None, Some(_)) => { p2_needle_count += 1; break; } (None, None) => { break; } } } p1_needle_count .cmp(&p2_needle_count) .then_with(|| backup_ordering) .then_with(|| $path_cmp) }}; } let i = if output.is_empty() { contains_needle = if let Some(s) = path.to_str() { s.match_indices(needle).count() > 0 } else { let s = path.to_string_lossy(); s.match_indices(needle).count() > 0 }; 0 } else { output .binary_search_by(|p| { match (path.to_str(), p.to_str()) { (Some(s1), Some(s2)) => cmp_match_indices!(s1, s2, p.as_path().cmp(path)), (Some(s1), None) => { let s2 = p.to_string_lossy(); cmp_match_indices!(s1, s2, p.as_path().cmp(path)) } (None, Some(s2)) => { let s1 = path.to_string_lossy(); cmp_match_indices!(s1, s2, p.as_path().cmp(path)) } (None, None) => { let s1 = path.to_string_lossy(); let s2 = p.to_string_lossy(); cmp_match_indices!(s1, s2, p.as_path().cmp(path)) } } }) .unwrap_or_else(|i| i) }; if contains_needle { output.insert(i, path.to_path_buf()); } } output } #[cfg(test)] mod tests { use super::*; #[test] fn find_in_paths_works_on_this_small_example() { let needle = "b"; let searched_paths = vec![ PathBuf::from("C:\\Users\\rya
rom("C:\\Users\\ryan1729\\Documents\\beans.txt"), ]; assert_eq!( find_in(searched_paths.iter().map(|p| p.as_path()), needle), expected ); } #[test] fn find_in_paths_sorts_things_with_multiple_needles_higher() { let needle = "b"; let searched_paths = vec![ PathBuf::from("C:\\Users\\ryan1729\\Documents\\basketball.txt"), PathBuf::from("C:\\Users\\ryan1729\\Documents\\beans.txt"), PathBuf::from("C:\\Users\\ryan1729\\Documents\\beebasketball.txt"), ]; let expected = vec![ PathBuf::from("C:\\Users\\ryan1729\\Documents\\beebasketball.txt"), PathBuf::from("C:\\Users\\ryan1729\\Documents\\basketball.txt"), PathBuf::from("C:\\Users\\ryan1729\\Documents\\beans.txt"), ]; assert_eq!( find_in(searched_paths.iter().map(|p| p.as_path()), needle), expected ); } }
n1729\\Documents\\bartog.txt"), PathBuf::from("C:\\Users\\ryan1729\\Documents\\beans.txt"), PathBuf::from("C:\\Users\\ryan1729\\Documents\\unrelated.txt"), ]; let expected = vec![ PathBuf::from("C:\\Users\\ryan1729\\Documents\\bartog.txt"), PathBuf::f
function_block-random_span
[]
Rust
src/mesh.rs
ArvinSKushwaha/dynamics_engine
a1189653d3dc5c9b4dfd4eee3eee75b7b6f3b9ed
use crate::object::{Ray, RayCollision}; use crate::{Float, Mat3, Point3, UnitVec3, Vec3, EPSILON}; use na::{point, vector}; use nalgebra as na; use std::io::{BufRead, BufReader}; #[derive(Debug, Clone)] pub struct MeshOptions { pub smooth_normals: bool, } #[derive(Debug, Clone)] pub struct PolygonMesh { pub vertex_list: Vec<Point3>, pub vertices_to_faces: Vec<Vec<usize>>, pub faces_to_vertices: Vec<Vec<usize>>, pub face_normals: Vec<UnitVec3>, pub vertex_normals: Vec<UnitVec3>, pub mesh_options: MeshOptions, pub smooth_transform_matrices: Option<Vec<Mat3>>, } impl PolygonMesh { pub fn get_mesh(&self) -> &PolygonMesh { self } pub fn get_vertex_count(&self) -> usize { self.vertex_list.len() } pub fn get_face_count(&self) -> usize { self.face_normals.len() } pub fn from_many(x: &[PolygonMesh]) -> PolygonMesh { x.iter() .fold( PolygonMesh { vertex_list: vec![], vertices_to_faces: vec![], faces_to_vertices: vec![], face_normals: vec![], vertex_normals: vec![], mesh_options: MeshOptions { smooth_normals: false, }, smooth_transform_matrices: None, }, |a: PolygonMesh, b: &PolygonMesh| { let mut a = a.clone(); let vertex_count = a.get_vertex_count(); let face_count = a.get_face_count(); a.vertex_list.extend(b.vertex_list.iter()); a.face_normals.extend(b.face_normals.iter()); a.vertex_normals.extend(b.vertex_normals.iter()); a.faces_to_vertices.extend(b.faces_to_vertices.clone()); a.vertices_to_faces.extend(b.faces_to_vertices.clone()); for i in face_count..a.faces_to_vertices.len() { a.faces_to_vertices[i] = a.faces_to_vertices[i] .iter() .map(|v| *v + vertex_count) .collect(); } for i in vertex_count..a.vertices_to_faces.len() { a.vertices_to_faces[i] = a.vertices_to_faces[i] .iter() .map(|v| *v + face_count) .collect(); } a }, ) .clone() } } pub fn read_obj(path: &str, smoothing: bool) -> Result<PolygonMesh, &str> { let f = match std::fs::File::open(path) { Ok(t) => t, Err(_) => return Err("OBJ File opening failed"), }; let mut smooth_transform_matrices = vec![]; let mut mesh = PolygonMesh { vertex_list: vec![], vertices_to_faces: vec![], faces_to_vertices: vec![], face_normals: vec![], vertex_normals: vec![], mesh_options: MeshOptions { smooth_normals: smoothing, }, smooth_transform_matrices: None, }; for line in BufReader::new(f).lines() { let data: String = match line { Ok(t) => t.trim().to_string(), Err(_) => return Err("OBJ File reading failed"), }; if data.len() == 0 || data.starts_with("#") { continue; } let mut iter = data.split_whitespace(); match iter.next() { Some("v") => { mesh.vertex_list.push(point![ iter.next().unwrap_or("0").parse::<Float>().unwrap_or(0.), iter.next().unwrap_or("0").parse::<Float>().unwrap_or(0.), iter.next().unwrap_or("0").parse::<Float>().unwrap_or(0.) ]); mesh.vertices_to_faces.push(vec![]); } Some("f") => { let mut face_vertices: Vec<usize> = Vec::with_capacity(3); loop { if let Some(t) = iter.next() { match t.split('/').collect::<Vec<&str>>()[0].parse::<usize>() { Ok(v_num) => { face_vertices.push(v_num - 1); } Err(_) => { return Err("Integer parsing failed in face generation"); } } } else { break; } } assert!(face_vertices.len() >= 3); let pts = [ mesh.vertex_list[face_vertices[2]], mesh.vertex_list[face_vertices[1]], mesh.vertex_list[face_vertices[0]], ]; let norm: Vec3 = (pts[1] - pts[0]).cross(&(pts[2] - pts[1])); let unit_normal; if let Some(unit_norm) = na::Unit::try_new(norm, 1.0e-7) { unit_normal = unit_norm; } else { std::panic::panic_any("Yikes, the normals didn't compute right?!"); } for i in 0..face_vertices.len() - 2 { mesh.vertices_to_faces[face_vertices[0]].push(mesh.faces_to_vertices.len()); mesh.vertices_to_faces[face_vertices[i + 1]].push(mesh.faces_to_vertices.len()); mesh.vertices_to_faces[face_vertices[i + 2]].push(mesh.faces_to_vertices.len()); if smoothing { smooth_transform_matrices.push( match na::Matrix3::from_columns(&[ mesh.vertex_list[face_vertices[0]] .coords .add_scalar(EPSILON), mesh.vertex_list[face_vertices[i + 1]] .coords .add_scalar(EPSILON), mesh.vertex_list[face_vertices[i + 2]] .coords .add_scalar(EPSILON), ]) .try_inverse() { Some(m) => m, None => return Err("Found a non-invertible matrix. Yikes."), }, ); } mesh.faces_to_vertices.push(vec![ face_vertices[0], face_vertices[i + 1], face_vertices[i + 2], ]); mesh.face_normals.push(unit_normal); } } Some("vt") => continue, Some("vn") => { mesh.vertex_normals.push( na::Unit::try_new( vector![ iter.next().unwrap_or("0").parse::<Float>().unwrap_or(0.), iter.next().unwrap_or("0").parse::<Float>().unwrap_or(0.), iter.next().unwrap_or("0").parse::<Float>().unwrap_or(0.) ], 1e-7, ) .unwrap(), ); } _ => continue, }; } if mesh.vertex_normals.len() != mesh.vertex_list.len() { mesh.vertex_normals.resize( mesh.vertex_list.len(), na::UnitVector3::new_unchecked(vector![1., 0., 0.]), ); for (i, faces) in mesh.vertices_to_faces.iter().enumerate() { let current_vertex = mesh.vertex_list[i]; let mut normal = vector![0., 0., 0.]; for face in faces { let vertex_index = mesh.faces_to_vertices[*face] .iter() .position(|&r| r == i) .unwrap(); let prev_vertex = mesh.vertex_list[mesh.faces_to_vertices[*face][(vertex_index as isize - 1) .rem_euclid(mesh.faces_to_vertices[*face].len() as isize) as usize]]; let next_vertex = mesh.vertex_list[mesh.faces_to_vertices[*face][(vertex_index as isize + 1) .rem_euclid(mesh.faces_to_vertices[*face].len() as isize) as usize]]; let angle = (std::f64::consts::PI as Float) - (current_vertex - prev_vertex) .angle(&(next_vertex - current_vertex)) .abs(); normal += mesh.face_normals[*face].into_inner() * angle / (2. * (std::f64::consts::PI as Float)); } mesh.vertex_normals[i] = na::Unit::new_normalize(normal); } } for i in 0..mesh.get_face_count() { assert_eq!(mesh.faces_to_vertices[i].len(), 3); } mesh.smooth_transform_matrices = Some(smooth_transform_matrices); assert_eq!(mesh.face_normals.len(), mesh.faces_to_vertices.len()); assert_eq!(mesh.vertex_list.len(), mesh.vertices_to_faces.len()); assert_eq!(mesh.vertex_list.len(), mesh.vertex_normals.len()); return Result::Ok(mesh); } pub fn get_potential_collisions(mesh: &PolygonMesh, ray: &Ray) -> Vec<RayCollision> { let mut probable_faces = vec![]; for i in 0..mesh.get_face_count() { let vertices = ( mesh.vertex_list[mesh.faces_to_vertices[i][0]], mesh.vertex_list[mesh.faces_to_vertices[i][1]], mesh.vertex_list[mesh.faces_to_vertices[i][2]], ); let normals = ( mesh.vertex_normals[mesh.faces_to_vertices[i][0]], mesh.vertex_normals[mesh.faces_to_vertices[i][1]], mesh.vertex_normals[mesh.faces_to_vertices[i][2]], ); let (edge1, edge2, h, s, q); let (a, f, u, v); edge1 = vertices.1 - vertices.0; edge2 = vertices.2 - vertices.0; h = ray.direction.cross(&edge2); a = edge1.dot(&h); if a > -EPSILON && a < EPSILON { continue; } f = 1.0 / a; s = ray.origin - vertices.0; u = f * s.dot(&h); if u < 0. || u > 1. { continue; } q = s.cross(&edge1); v = f * ray.direction.dot(&q); if v < 0. || u + v > 1. { continue; } let t = f * edge2.dot(&q); if t > 0. { assert!(!t.is_nan()); let pos = ray.origin + ray.direction.scale(t); probable_faces.push(RayCollision { mesh: unsafe { std::mem::transmute(mesh) }, distance: t, face_normal: mesh.face_normals[i], position: pos, smooth_normal: if mesh.mesh_options.smooth_normals { let matrices = mesh.smooth_transform_matrices.as_ref().unwrap(); let components = matrices[i] * pos.coords.add_scalar(EPSILON); Some(UnitVec3::new_normalize( normals.0.scale(components.x) + normals.1.scale(components.y) + normals.2.scale(components.z), )) } else { None }, }); } } probable_faces.sort_unstable_by(|a, b| a.distance.partial_cmp(&b.distance).unwrap()); probable_faces }
use crate::object::{Ray, RayCollision}; use crate::{Float, Mat3, Point3, UnitVec3, Vec3, EPSILON}; use na::{point, vector}; use nalgebra as na; use std::io::{BufRead, BufReader}; #[derive(Debug, Clone)] pub struct MeshOptions { pub smooth_normals: bool, } #[derive(Debug, Clone)] pub struct PolygonMesh { pub vertex_list: Vec<Point3>, pub vertices_to_faces: Vec<Vec<usize>>, pub faces_to_vertices: Vec<Vec<usize>>, pub face_normals: Vec<UnitVec3>, pub vertex_normals: Vec<UnitVec3>, pub mesh_options: MeshOptions, pub smooth_transform_matrices: Option<Vec<Mat3>>, } impl PolygonMesh { pub fn get_mesh(&self) -> &PolygonMesh { self } pub fn get_vertex_count(&self) -> usize { self.vertex_list.len() } pub fn get_face_count(&self) -> usize { self.face_normals.len() } pub fn from_many(x: &[PolygonMesh]) -> PolygonMesh { x.iter() .fold( PolygonMesh { vertex_list: vec![], vertices_to_faces: vec![], faces_to_vertices: vec![], face_normals: vec![], vertex_normals: vec![], mesh_options: MeshOptions { smooth_normals: false, }, smooth_transform_matrices: None, }, |a: PolygonMesh, b: &PolygonMesh| { let mut a = a.clone(); let vertex_count = a.get_vertex_count(); let face_count = a.get_face_count(); a.vertex_list.extend(b.vertex_list.iter()); a.face_normals.extend(b.face_normals.iter()); a.vertex_normals.extend(b.vertex_normals.iter()); a.faces_to_vertices.extend(b.faces_to_vertices.clone()); a.vertices_to_faces.extend(b.faces_to_vertices.clone()); for i in face_count..a.faces_to_vertices.len() { a.faces_to_vertices[i] = a.faces_to_vertices[i] .iter() .map(|v| *v + vertex_count) .collect(); } for i in vertex_count..a.vertices_to_faces.len() { a.vertices_to_faces[i] = a.vertices_to_faces[i] .iter() .map(|v| *v + face_count) .collect(); } a }, ) .clone() } } pub fn read_obj(path: &str, smoothing: bool) -> Result<PolygonMesh, &str> { let f = match std::fs::File::open(path) { Ok(t) => t, Err(_) => return Err("OBJ File opening failed"), }; let mut smooth_transform_matrices = vec![]; let mut mesh = PolygonMesh { vertex_list: vec![], vertices_to_faces: vec![], faces_to_vertices: vec![], face_normals: vec![], vertex_normals: vec![], mesh_options: MeshOptions { smooth_normals: smoothing, }, smooth_transform_matrices: None, }; for line in BufReader::new(f).lines() { let data: String = match line { Ok(t) => t.trim().to_string(), Err(_) => return Err("OBJ File reading failed"), }; if data.len() == 0 || data.starts_with("#") { continue; } let mut iter = data.split_whitespace(); match iter.next() { Some("v") => { mesh.vertex_list.push(point![ iter.next().unwrap_or("0").parse::<Float>().unwrap_or(0.), iter.next().unwrap_or("0").parse::<Float>().unwrap_or(0.), iter.next().unwrap_or("0").parse::<Float>().unwrap_or(0.) ]); mesh.vertices_to_faces.push(vec![]); } Some("f") => { let mut face_vertices: Vec<usize> = Vec::with_capacity(3); loop { if let Some(t) = iter.next() { match t.split('/').collect::<Vec<&str>>()[0].parse::<usize>() { Ok(v_num) => { face_vertices.push(v_num - 1); } Err(_) => { return Err("Integer parsing failed in face generation"); } } } else { break; } } assert!(face_vertices.len() >= 3); let pts = [ mesh.vertex_list[face_vertices[2]], mesh.vertex_list[face_vertices[1]], mesh.vertex_list[face_vertices[0]], ]; let norm: Vec3 = (pts[1] - pts[0]).cross(&(pts[2] - pts[1])); let unit_normal; if let Some(unit_norm) = na::Unit::try_new(norm, 1.0e-7) { unit_normal = unit_norm; } else {
* (std::f64::consts::PI as Float)); } mesh.vertex_normals[i] = na::Unit::new_normalize(normal); } } for i in 0..mesh.get_face_count() { assert_eq!(mesh.faces_to_vertices[i].len(), 3); } mesh.smooth_transform_matrices = Some(smooth_transform_matrices); assert_eq!(mesh.face_normals.len(), mesh.faces_to_vertices.len()); assert_eq!(mesh.vertex_list.len(), mesh.vertices_to_faces.len()); assert_eq!(mesh.vertex_list.len(), mesh.vertex_normals.len()); return Result::Ok(mesh); } pub fn get_potential_collisions(mesh: &PolygonMesh, ray: &Ray) -> Vec<RayCollision> { let mut probable_faces = vec![]; for i in 0..mesh.get_face_count() { let vertices = ( mesh.vertex_list[mesh.faces_to_vertices[i][0]], mesh.vertex_list[mesh.faces_to_vertices[i][1]], mesh.vertex_list[mesh.faces_to_vertices[i][2]], ); let normals = ( mesh.vertex_normals[mesh.faces_to_vertices[i][0]], mesh.vertex_normals[mesh.faces_to_vertices[i][1]], mesh.vertex_normals[mesh.faces_to_vertices[i][2]], ); let (edge1, edge2, h, s, q); let (a, f, u, v); edge1 = vertices.1 - vertices.0; edge2 = vertices.2 - vertices.0; h = ray.direction.cross(&edge2); a = edge1.dot(&h); if a > -EPSILON && a < EPSILON { continue; } f = 1.0 / a; s = ray.origin - vertices.0; u = f * s.dot(&h); if u < 0. || u > 1. { continue; } q = s.cross(&edge1); v = f * ray.direction.dot(&q); if v < 0. || u + v > 1. { continue; } let t = f * edge2.dot(&q); if t > 0. { assert!(!t.is_nan()); let pos = ray.origin + ray.direction.scale(t); probable_faces.push(RayCollision { mesh: unsafe { std::mem::transmute(mesh) }, distance: t, face_normal: mesh.face_normals[i], position: pos, smooth_normal: if mesh.mesh_options.smooth_normals { let matrices = mesh.smooth_transform_matrices.as_ref().unwrap(); let components = matrices[i] * pos.coords.add_scalar(EPSILON); Some(UnitVec3::new_normalize( normals.0.scale(components.x) + normals.1.scale(components.y) + normals.2.scale(components.z), )) } else { None }, }); } } probable_faces.sort_unstable_by(|a, b| a.distance.partial_cmp(&b.distance).unwrap()); probable_faces }
std::panic::panic_any("Yikes, the normals didn't compute right?!"); } for i in 0..face_vertices.len() - 2 { mesh.vertices_to_faces[face_vertices[0]].push(mesh.faces_to_vertices.len()); mesh.vertices_to_faces[face_vertices[i + 1]].push(mesh.faces_to_vertices.len()); mesh.vertices_to_faces[face_vertices[i + 2]].push(mesh.faces_to_vertices.len()); if smoothing { smooth_transform_matrices.push( match na::Matrix3::from_columns(&[ mesh.vertex_list[face_vertices[0]] .coords .add_scalar(EPSILON), mesh.vertex_list[face_vertices[i + 1]] .coords .add_scalar(EPSILON), mesh.vertex_list[face_vertices[i + 2]] .coords .add_scalar(EPSILON), ]) .try_inverse() { Some(m) => m, None => return Err("Found a non-invertible matrix. Yikes."), }, ); } mesh.faces_to_vertices.push(vec![ face_vertices[0], face_vertices[i + 1], face_vertices[i + 2], ]); mesh.face_normals.push(unit_normal); } } Some("vt") => continue, Some("vn") => { mesh.vertex_normals.push( na::Unit::try_new( vector![ iter.next().unwrap_or("0").parse::<Float>().unwrap_or(0.), iter.next().unwrap_or("0").parse::<Float>().unwrap_or(0.), iter.next().unwrap_or("0").parse::<Float>().unwrap_or(0.) ], 1e-7, ) .unwrap(), ); } _ => continue, }; } if mesh.vertex_normals.len() != mesh.vertex_list.len() { mesh.vertex_normals.resize( mesh.vertex_list.len(), na::UnitVector3::new_unchecked(vector![1., 0., 0.]), ); for (i, faces) in mesh.vertices_to_faces.iter().enumerate() { let current_vertex = mesh.vertex_list[i]; let mut normal = vector![0., 0., 0.]; for face in faces { let vertex_index = mesh.faces_to_vertices[*face] .iter() .position(|&r| r == i) .unwrap(); let prev_vertex = mesh.vertex_list[mesh.faces_to_vertices[*face][(vertex_index as isize - 1) .rem_euclid(mesh.faces_to_vertices[*face].len() as isize) as usize]]; let next_vertex = mesh.vertex_list[mesh.faces_to_vertices[*face][(vertex_index as isize + 1) .rem_euclid(mesh.faces_to_vertices[*face].len() as isize) as usize]]; let angle = (std::f64::consts::PI as Float) - (current_vertex - prev_vertex) .angle(&(next_vertex - current_vertex)) .abs(); normal += mesh.face_normals[*face].into_inner() * angle / (2.
random
[]
Rust
pallets/root-of-trust/src/lib.rs
NodleCode/PKI
748aaddb8ce3ab7c90cfa3fd2610275f00f5cb7b
#![cfg_attr(not(feature = "std"), no_std)] #[cfg(test)] mod tests; use codec::{Decode, Encode}; use frame_support::{ decl_error, decl_event, decl_module, decl_storage, dispatch::DispatchResult, ensure, traits::{ChangeMembers, Currency, ExistenceRequirement, Get, OnUnbalanced, WithdrawReasons}, Parameter, }; use frame_system::{self as system, ensure_signed}; use sp_runtime::traits::{MaybeDisplay, MaybeSerializeDeserialize, Member}; use sp_std::{fmt::Debug, prelude::Vec}; type BalanceOf<T> = <<T as Trait>::Currency as Currency<<T as system::Trait>::AccountId>>::Balance; type NegativeImbalanceOf<T> = <<T as Trait>::Currency as Currency<<T as system::Trait>::AccountId>>::NegativeImbalance; #[derive(Encode, Decode, Default, Clone, PartialEq)] pub struct RootCertificate<AccountId, CertificateId, BlockNumber> { owner: AccountId, key: CertificateId, created: BlockNumber, renewed: BlockNumber, revoked: bool, validity: BlockNumber, child_revocations: Vec<CertificateId>, } pub trait Trait: system::Trait { type Event: From<Event<Self>> + Into<<Self as system::Trait>::Event>; type Currency: Currency<Self::AccountId>; type CertificateId: Member + Parameter + MaybeSerializeDeserialize + Debug + MaybeDisplay + Ord + Default; type SlotBookingCost: Get<BalanceOf<Self>>; type SlotRenewingCost: Get<BalanceOf<Self>>; type SlotValidity: Get<Self::BlockNumber>; type FundsCollector: OnUnbalanced<NegativeImbalanceOf<Self>>; } decl_event!( pub enum Event<T> where AccountId = <T as system::Trait>::AccountId, CertificateId = <T as Trait>::CertificateId, { SlotTaken(AccountId, CertificateId), SlotRenewed(CertificateId), SlotRevoked(CertificateId), ChildSlotRevoked(CertificateId, CertificateId), } ); decl_error! { pub enum Error for Module<T: Trait> { NotAMember, SlotTaken, NotEnoughFunds, NoLongerValid, NotTheOwner, } } decl_storage! { trait Store for Module<T: Trait> as RootOfTrustModule { Members get(members): Vec<T::AccountId>; Slots get(slots): map hasher(blake2_256) T::CertificateId => RootCertificate<T::AccountId, T::CertificateId, T::BlockNumber>; } } decl_module! { pub struct Module<T: Trait> for enum Call where origin: T::Origin { fn deposit_event() = default; fn book_slot(origin, certificate_id: T::CertificateId) -> DispatchResult { let sender = ensure_signed(origin)?; ensure!(Self::is_member(&sender), Error::<T>::NotAMember); ensure!(!<Slots<T>>::contains_key(&certificate_id), Error::<T>::SlotTaken); match T::Currency::withdraw(&sender, T::SlotBookingCost::get(), WithdrawReasons::all(), ExistenceRequirement::AllowDeath) { Ok(imbalance) => T::FundsCollector::on_unbalanced(imbalance), Err(_) => Err(Error::<T>::NotEnoughFunds)?, }; let now = <system::Module<T>>::block_number(); <Slots<T>>::insert(&certificate_id, RootCertificate { owner: sender.clone(), key: certificate_id.clone(), created: now, renewed: now, revoked: false, validity: T::SlotValidity::get(), child_revocations: Vec::new(), }); Self::deposit_event(RawEvent::SlotTaken(sender, certificate_id)); Ok(()) } fn renew_slot(origin, certificate: T::CertificateId) -> DispatchResult { let sender = ensure_signed(origin)?; let mut slot = <Slots<T>>::get(&certificate); ensure!(Self::is_slot_valid(&slot), Error::<T>::NoLongerValid); ensure!(slot.owner == sender, Error::<T>::NotTheOwner); match T::Currency::withdraw(&sender, T::SlotRenewingCost::get(), WithdrawReasons::all(), ExistenceRequirement::AllowDeath) { Ok(imbalance) => T::FundsCollector::on_unbalanced(imbalance), Err(_) => Err(Error::<T>::NotEnoughFunds)?, }; slot.renewed = <system::Module<T>>::block_number(); <Slots<T>>::insert(&certificate, slot); Self::deposit_event(RawEvent::SlotRenewed(certificate)); Ok(()) } fn revoke_slot(origin, certificate: T::CertificateId) -> DispatchResult { let sender = ensure_signed(origin)?; let mut slot = <Slots<T>>::get(&certificate); ensure!(Self::is_slot_valid(&slot), Error::<T>::NoLongerValid); ensure!(slot.owner == sender, Error::<T>::NotTheOwner); slot.revoked = true; <Slots<T>>::insert(&certificate, slot); Self::deposit_event(RawEvent::SlotRevoked(certificate)); Ok(()) } fn revoke_child(origin, root: T::CertificateId, child: T::CertificateId) -> DispatchResult { let sender = ensure_signed(origin)?; let mut slot = <Slots<T>>::get(&root); ensure!(Self::is_slot_valid(&slot), Error::<T>::NoLongerValid); ensure!(slot.owner == sender, Error::<T>::NotTheOwner); ensure!(!slot.child_revocations.contains(&child), Error::<T>::NoLongerValid); slot.child_revocations.push(child.clone()); <Slots<T>>::insert(&root, slot); Self::deposit_event(RawEvent::ChildSlotRevoked(root, child)); Ok(()) } } } impl<T: Trait> Module<T> { fn is_member(who: &T::AccountId) -> bool { Self::members().contains(who) } fn is_slot_valid( slot: &RootCertificate<T::AccountId, T::CertificateId, T::BlockNumber>, ) -> bool { let owner_is_member = Self::is_member(&slot.owner); let revoked = slot.revoked; let expired = slot.renewed + slot.validity <= <system::Module<T>>::block_number(); owner_is_member && !revoked && !expired } #[allow(dead_code)] pub fn is_root_certificate_valid(cert: &T::CertificateId) -> bool { let exists = <Slots<T>>::contains_key(cert); let slot = <Slots<T>>::get(cert); exists && Self::is_slot_valid(&slot) } #[allow(dead_code)] pub fn is_child_certificate_valid(root: &T::CertificateId, child: &T::CertificateId) -> bool { let equals = root == child; let root_valid = Self::is_root_certificate_valid(root); let revoked = <Slots<T>>::get(root).child_revocations.contains(child); !equals && root_valid && !revoked } } impl<T: Trait> ChangeMembers<T::AccountId> for Module<T> { fn change_members_sorted( _incoming: &[T::AccountId], _outgoing: &[T::AccountId], new: &[T::AccountId], ) { <Members<T>>::put(new); } }
#![cfg_attr(not(feature = "std"), no_std)] #[cfg(test)] mod tests; use codec::{Decode, Encode}; use frame_support::{ decl_error, decl_event, decl_module, decl_storage, dispatch::DispatchResult, ensure, traits::{ChangeMembers, Currency, ExistenceRequirement, Get, OnUnbalanced, WithdrawReasons}, Parameter, }; use frame_system::{self as system, ensure_signed}; use sp_runtime::traits::{MaybeDisplay, MaybeSerializeDeserialize, Member}; use sp_std::{fmt::Debug, prelude::Vec}; type BalanceOf<T> = <<T as Trait>::Currency as Currency<<T as system::Trait>::AccountId>>::Balance; type NegativeImbalanceOf<T> = <<T as Trait>::Currency as Currency<<T as system::Trait>::AccountId>>::NegativeImbalance; #[derive(Encode, Decode, Default, Clone, PartialEq)] pub struct RootCertificate<AccountId, CertificateId, BlockNumber> { owner: AccountId, key: CertificateId, created: BlockNumber, renewed: BlockNumber, revoked: bool, validity: BlockNumber, child_revocations: Vec<CertificateId>, } pub trait Trait: system::Trait { type Event: From<Event<Self>> + Into<<Self as system::Trait>::Event>; type Currency: Currency<Self::AccountId>; type CertificateId: Member + Parameter + MaybeSerializeDeserialize + Debug + MaybeDisplay + Ord + Default; type SlotBookingCost: Get<BalanceOf<Self>>; type SlotRenewingCost: Get<BalanceOf<Self>>; type SlotValidity: Get<Self::BlockNumber>; type FundsCollector: OnUnbalanced<NegativeImbalanceOf<Self>>; } decl_event!( pub enum Event<T> where AccountId = <T as system::Trait>::AccountId, CertificateId = <T as Trait>::CertificateId, { SlotTaken(AccountId, CertificateId), SlotRenewed(CertificateId), SlotRevoked(CertificateId), ChildSlotRevoked(CertificateId, CertificateId), } ); decl_error! { pub enum Error for Module<T: Trait> { NotAMember, SlotTaken, NotEnoughFunds, NoLongerValid, NotTheOwner, } } decl_storage! { trait Store for Module<T: Trait> as RootOfTrustModule { Members get(members): Vec<T::AccountId>; Slots get(slots): map hasher(blake2_256) T::CertificateId => RootCertificate<T::AccountId, T::CertificateId, T::BlockNumber>; } } decl_module! { pub struct Module<T: Trait> for enum Call where origin: T::Origin { fn deposit_event() = default; fn book_slot(origin, certificate_id: T::CertificateId) -> DispatchResult { let sender = ensure_signed(origin)?; ensure!(Self::is_member(&sender), Error::<T>::NotAMember); ensure!(!<Slots<T>>::contains_key(&certificate_id), Error::<T>::SlotTaken); match T::Currency::withdraw(&sender, T::SlotBookingCost::get(), WithdrawReasons::all(), ExistenceRequirement::AllowDeath) { Ok(imbalance) => T::FundsCollector::on_unbalanced(imbalance), Err(_) => Err(Error::<T>::NotEnoughFunds)?, }; let now = <system::Module<T>>::block_number(); <Slots<T>>::insert(&certificate_id, RootCertificate { owner: sender.clone(), key: certificate_id.clone(), created: now, renewed: now, revoked: false, validity: T::SlotValidity::get(), child_revocations: Vec::new(), }); Self::deposit_event(RawEvent::SlotTaken(sender, certificate_id)); Ok(()) } fn renew_slot(origin, certificate: T::CertificateId) -> DispatchResult { let sender = ensure_signed(origin)?; let mut slot = <Slots<T>>::get(&certificate); ensure!(Self::is_slot_valid(&slot), Error::<T>::NoLongerValid); ensure!(slot.owner == sender, Error::<T>::NotTheOwner); match T::Currency::withdraw(&sender, T::SlotRenewingCost::get(), WithdrawReasons::all(), ExistenceRequirement::AllowDeath) { Ok(imbalance) => T::FundsCollector::on_unbalanced(imbalance), Err(_) => Err(Error::<T>::NotEnoughFunds)?, }; slot.renewed = <system::Module<T>>::block_number(); <Slots<T>>::insert(&certificate, slot); Self::deposit_event(RawEvent::SlotRenewed(certificate)); Ok(()) } fn revoke_slot(origin, certificate: T::CertificateId) -> DispatchResult { let sender = ensure_signed(origin)?; let mut slot = <Slots<T>>::get(&certificate); ensure!(Self::is_slot_valid(&slot), Error::<T>::NoLongerValid); ensure!(slot.owner == sender, Error::<T>::NotTheOwner); slot.revoked = true; <Slots<T>>::insert(&certificate, slot); Self::deposit_event(RawEvent::SlotRevoked(certificate)); Ok(()) } fn revoke_child(origin, root: T::CertificateId, child: T::CertificateId) -> DispatchResult { let sender = ensure_signed(origin)?; let mut slot = <Slots<T>>::get(&root); ensure!(Self::is_slot_valid(&slot), Error::<T>::NoLongerValid); ensure!(slot.owner == sender, Error::<T>::NotTheOwner); ensure!(!slot.child_revocations.contains(&child), Error::<T>::NoLongerValid); slot.child_revocations.push(child.clone()); <Slots<T>>::insert(&root, slot); Self::deposit_event(RawEvent::ChildSlotRevoked(root, child)); Ok(()) } } } impl<T: Trait> Module<T> { fn is_member(who: &T::AccountId) -> bool { Self::members().contains(who) } fn is_slot_valid( slot: &RootCertificate<T::AccountId, T::CertificateId, T::BlockNumber>, ) -> bool { let owner_is_member = Self::is_member(&slot.owner); let revoked = slot.revoked; let expired = slot.renewed + slot.validity <= <system::Module<T>>::block_number(); owner_is_member && !revoked && !expired } #[allow(dead_code)] pub fn is_root_certificate_valid(cert: &T::CertificateId) -> bool { let exists = <Slots<T>>::contains_key(cert); let slot = <Slots<T>>::get(cert); exists && Self::is_slot_valid(&slot) } #[allow(dead_code)]
} impl<T: Trait> ChangeMembers<T::AccountId> for Module<T> { fn change_members_sorted( _incoming: &[T::AccountId], _outgoing: &[T::AccountId], new: &[T::AccountId], ) { <Members<T>>::put(new); } }
pub fn is_child_certificate_valid(root: &T::CertificateId, child: &T::CertificateId) -> bool { let equals = root == child; let root_valid = Self::is_root_certificate_valid(root); let revoked = <Slots<T>>::get(root).child_revocations.contains(child); !equals && root_valid && !revoked }
function_block-full_function
[ { "content": "#[test]\n\nfn child_certificate_not_valid_if_revoked_in_root_certificate() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n do_register();\n\n\n\n let now = <system::Module<Test>>::block_number();\n\n <Slots<Test>>::insert(\n\n &OFFCHAIN_CERTIFICATE_SIGNER_1,\n\n RootCertificate {\n\n owner: ROOT_MANAGER,\n\n key: OFFCHAIN_CERTIFICATE_SIGNER_1,\n\n created: now,\n\n renewed: now,\n\n revoked: false,\n\n validity: SlotValidity::get(),\n\n child_revocations: vec![OFFCHAIN_CERTIFICATE_SIGNER_2],\n\n },\n\n );\n\n\n\n assert_eq!(\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 0, "score": 254229.88999467046 }, { "content": "#[test]\n\nfn root_certificate_not_valid_if_owner_is_no_longer_a_member() {\n\n new_test_ext().execute_with(|| {\n\n let now = <system::Module<Test>>::block_number();\n\n <Slots<Test>>::insert(\n\n &OFFCHAIN_CERTIFICATE_SIGNER_1,\n\n RootCertificate {\n\n owner: ROOT_MANAGER,\n\n key: OFFCHAIN_CERTIFICATE_SIGNER_1,\n\n created: now,\n\n renewed: now,\n\n revoked: false,\n\n validity: SlotValidity::get(),\n\n child_revocations: vec![],\n\n },\n\n );\n\n\n\n assert_eq!(\n\n TestModule::is_root_certificate_valid(&OFFCHAIN_CERTIFICATE_SIGNER_1),\n\n false\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 1, "score": 245343.88922529874 }, { "content": "#[test]\n\nfn child_certificate_still_valid_if_revoked_under_non_parent_certificate() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n do_register();\n\n\n\n assert_ok!(TestModule::book_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n\n\n let now = <system::Module<Test>>::block_number();\n\n <Slots<Test>>::insert(\n\n &OFFCHAIN_CERTIFICATE_SIGNER_3,\n\n RootCertificate {\n\n owner: ROOT_MANAGER,\n\n key: OFFCHAIN_CERTIFICATE_SIGNER_3,\n\n created: now,\n\n renewed: now,\n\n revoked: false,\n\n validity: SlotValidity::get(),\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 2, "score": 236409.86627986457 }, { "content": "#[test]\n\nfn child_certificate_not_valid_if_root_certificate_not_valid() {\n\n new_test_ext().execute_with(|| {\n\n assert_eq!(\n\n TestModule::is_root_certificate_valid(&OFFCHAIN_CERTIFICATE_SIGNER_1),\n\n false\n\n );\n\n\n\n assert_eq!(\n\n TestModule::is_child_certificate_valid(\n\n &OFFCHAIN_CERTIFICATE_SIGNER_1,\n\n &OFFCHAIN_CERTIFICATE_SIGNER_2\n\n ),\n\n false\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 3, "score": 232335.87077562453 }, { "content": "#[test]\n\nfn root_certificate_not_valid_if_expired() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n do_register();\n\n\n\n assert_ok!(TestModule::book_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n\n\n <system::Module<Test>>::set_block_number(SlotValidity::get() + 1);\n\n\n\n assert_eq!(\n\n TestModule::is_root_certificate_valid(&OFFCHAIN_CERTIFICATE_SIGNER_1),\n\n false\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 4, "score": 222598.840202339 }, { "content": "#[test]\n\nfn root_certificate_not_valid_if_does_not_exists() {\n\n new_test_ext().execute_with(|| {\n\n assert_eq!(\n\n TestModule::is_root_certificate_valid(&OFFCHAIN_CERTIFICATE_SIGNER_1),\n\n false\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 5, "score": 222578.05602493737 }, { "content": "#[test]\n\nfn root_certificate_not_valid_if_revoked() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n do_register();\n\n\n\n let now = <system::Module<Test>>::block_number();\n\n <Slots<Test>>::insert(\n\n &OFFCHAIN_CERTIFICATE_SIGNER_1,\n\n RootCertificate {\n\n owner: ROOT_MANAGER,\n\n key: OFFCHAIN_CERTIFICATE_SIGNER_1,\n\n created: now,\n\n renewed: now,\n\n revoked: true,\n\n validity: SlotValidity::get(),\n\n child_revocations: vec![],\n\n },\n\n );\n\n\n\n assert_eq!(\n\n TestModule::is_root_certificate_valid(&OFFCHAIN_CERTIFICATE_SIGNER_1),\n\n false\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 6, "score": 222388.26647685163 }, { "content": "#[test]\n\nfn child_certificate_is_valid() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n do_register();\n\n\n\n assert_ok!(TestModule::book_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n\n\n assert_eq!(\n\n TestModule::is_child_certificate_valid(\n\n &OFFCHAIN_CERTIFICATE_SIGNER_1,\n\n &OFFCHAIN_CERTIFICATE_SIGNER_2\n\n ),\n\n true\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 7, "score": 218379.65700265593 }, { "content": "#[test]\n\nfn can_not_revoke_child_if_root_not_valid_anymore() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n do_register();\n\n\n\n assert_ok!(TestModule::book_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n\n\n assert_ok!(TestModule::revoke_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n\n\n assert_noop!(\n\n TestModule::revoke_child(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1,\n\n OFFCHAIN_CERTIFICATE_SIGNER_2\n\n ),\n\n Error::<Test>::NoLongerValid\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 8, "score": 214360.8098939931 }, { "content": "#[test]\n\nfn can_not_revoke_child_if_not_owner() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n do_register();\n\n\n\n assert_ok!(TestModule::book_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n\n\n assert_noop!(\n\n TestModule::revoke_child(\n\n Origin::signed(OFFCHAIN_CERTIFICATE_SIGNER_1),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1,\n\n OFFCHAIN_CERTIFICATE_SIGNER_2\n\n ),\n\n Error::<Test>::NotTheOwner\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 9, "score": 214255.04925266368 }, { "content": "#[test]\n\nfn can_not_revoke_slot_if_not_owner() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n do_register();\n\n\n\n assert_ok!(TestModule::book_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n\n\n assert_noop!(\n\n TestModule::revoke_slot(\n\n Origin::signed(OFFCHAIN_CERTIFICATE_SIGNER_1),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ),\n\n Error::<Test>::NotTheOwner\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 10, "score": 214255.0492526637 }, { "content": "#[test]\n\nfn can_not_revoke_slot_if_not_valid_anymore() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n do_register();\n\n\n\n assert_ok!(TestModule::book_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n\n\n // Best to way to make it invalid would be to revoke it once already!\n\n assert_ok!(TestModule::revoke_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n\n\n assert_noop!(\n\n TestModule::revoke_slot(Origin::signed(ROOT_MANAGER), OFFCHAIN_CERTIFICATE_SIGNER_1),\n\n Error::<Test>::NoLongerValid\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 11, "score": 209683.52788138165 }, { "content": "#[test]\n\nfn can_not_revoke_child_if_not_valid_anymore() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n do_register();\n\n\n\n assert_ok!(TestModule::book_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n\n\n assert_ok!(TestModule::revoke_child(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1,\n\n OFFCHAIN_CERTIFICATE_SIGNER_2\n\n ));\n\n\n\n assert_noop!(\n\n TestModule::revoke_child(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1,\n\n OFFCHAIN_CERTIFICATE_SIGNER_2\n\n ),\n\n Error::<Test>::NoLongerValid\n\n );\n\n })\n\n}\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 12, "score": 209683.52788138165 }, { "content": "#[test]\n\nfn root_certificate_is_valid() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n do_register();\n\n\n\n assert_ok!(TestModule::book_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n\n\n assert_eq!(\n\n TestModule::is_root_certificate_valid(&OFFCHAIN_CERTIFICATE_SIGNER_1),\n\n true\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 14, "score": 193534.7134974704 }, { "content": "#[test]\n\nfn can_not_renew_if_not_owner() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n do_register();\n\n\n\n assert_ok!(TestModule::book_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n\n\n assert_noop!(\n\n TestModule::renew_slot(\n\n Origin::signed(OFFCHAIN_CERTIFICATE_SIGNER_1),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ),\n\n Error::<Test>::NotTheOwner\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 16, "score": 185677.5415209374 }, { "content": "#[test]\n\nfn revoke_child_works() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n do_register();\n\n\n\n assert_ok!(TestModule::book_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n\n\n assert_ok!(TestModule::revoke_child(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1,\n\n OFFCHAIN_CERTIFICATE_SIGNER_2\n\n ));\n\n\n\n assert_eq!(\n\n TestModule::is_root_certificate_valid(&OFFCHAIN_CERTIFICATE_SIGNER_1),\n\n true\n\n );\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 17, "score": 185406.5249825985 }, { "content": "#[test]\n\nfn revoke_slot_works() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n do_register();\n\n\n\n assert_ok!(TestModule::book_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n\n\n assert_ok!(TestModule::revoke_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n\n\n assert_eq!(\n\n TestModule::is_root_certificate_valid(&OFFCHAIN_CERTIFICATE_SIGNER_1),\n\n false\n\n );\n\n assert_eq!(\n\n <Slots<Test>>::get(&OFFCHAIN_CERTIFICATE_SIGNER_1).revoked,\n\n true\n\n );\n\n assert_eq!(TestModule::is_root_certificate_valid(&ROOT_MANAGER), false);\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 18, "score": 185406.5249825985 }, { "content": "#[test]\n\nfn member_can_buy_slots() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n do_register();\n\n\n\n assert_ok!(TestModule::book_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n assert_eq!(\n\n TestModule::slots(OFFCHAIN_CERTIFICATE_SIGNER_1).key,\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n );\n\n assert_eq!(\n\n TestModule::slots(OFFCHAIN_CERTIFICATE_SIGNER_1).owner,\n\n ROOT_MANAGER\n\n );\n\n assert_eq!(\n\n TestModule::slots(OFFCHAIN_CERTIFICATE_SIGNER_1).created,\n\n <system::Module<Test>>::block_number()\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 19, "score": 181672.19695388377 }, { "content": "#[test]\n\nfn non_member_can_not_buy_slots() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n\n\n assert_noop!(\n\n TestModule::book_slot(Origin::signed(ROOT_MANAGER), OFFCHAIN_CERTIFICATE_SIGNER_1),\n\n Error::<Test>::NotAMember\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 20, "score": 178035.66511332558 }, { "content": "/// The module's configuration trait.\n\npub trait Trait: system::Trait {\n\n type Event: From<Event<Self>> + Into<<Self as system::Trait>::Event>;\n\n\n\n /// The currency used to represent the voting power\n\n type Currency: ReservableCurrency<Self::AccountId>;\n\n /// Minimum amount of tokens required to apply\n\n type MinimumApplicationAmount: Get<BalanceOf<Self>>;\n\n /// Minimum amount of tokens required to counter an application\n\n type MinimumCounterAmount: Get<BalanceOf<Self>>;\n\n /// Minimum amount of tokens required to challenge a member's application\n\n type MinimumChallengeAmount: Get<BalanceOf<Self>>;\n\n /// How many blocks we need to wait for before validating an application\n\n type FinalizeApplicationPeriod: Get<Self::BlockNumber>;\n\n /// How many blocks we need to wait for before finalizing a challenge\n\n type FinalizeChallengePeriod: Get<Self::BlockNumber>;\n\n /// How do we slash loosing parties when challenges are finalized, application's\n\n /// member will be slashed at the same value\n\n type LoosersSlash: Get<Perbill>;\n\n /// Hook that we call whenever some members are added or removed from the TCR\n\n type ChangeMembers: ChangeMembers<Self::AccountId>;\n", "file_path": "pallets/tcr/src/lib.rs", "rank": 21, "score": 174841.25344994 }, { "content": " pub trait RootOfTrustApi<CertificateId> where\n\n CertificateId: codec::Codec\n\n {\n\n fn is_root_certificate_valid(cert: &CertificateId) -> bool;\n\n fn is_child_certificate_valid(root: &CertificateId, child: &CertificateId) -> bool;\n\n }\n\n}\n", "file_path": "pallets/root-of-trust/rpc/runtime-api/src/lib.rs", "rank": 22, "score": 172259.59421665085 }, { "content": "#[rpc]\n\npub trait RootOfTrustApi<BlockHash, CertificateId>\n\nwhere\n\n CertificateId: Codec,\n\n{\n\n #[rpc(name = \"rootOfTrust_isRootCertificateValid\")]\n\n fn is_root_certificate_valid(&self, cert: CertificateId, at: Option<BlockHash>)\n\n -> Result<bool>;\n\n #[rpc(name = \"rootOfTrust_isChildCertificateValid\")]\n\n fn is_child_certificate_valid(\n\n &self,\n\n root: CertificateId,\n\n child: CertificateId,\n\n at: Option<BlockHash>,\n\n ) -> Result<bool>;\n\n}\n\n\n\npub struct RootOfTrust<C, M> {\n\n client: Arc<C>,\n\n _marker: std::marker::PhantomData<M>,\n\n}\n", "file_path": "pallets/root-of-trust/rpc/src/lib.rs", "rank": 23, "score": 172259.59421665085 }, { "content": "type BalanceOf<T> = <<T as Trait>::Currency as Currency<<T as system::Trait>::AccountId>>::Balance;\n", "file_path": "pallets/tcr/src/lib.rs", "rank": 24, "score": 165418.29028529054 }, { "content": "#[test]\n\nfn child_invalid_if_equal_root() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n do_register();\n\n\n\n assert_ok!(TestModule::book_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n\n\n assert_eq!(\n\n TestModule::is_child_certificate_valid(\n\n &OFFCHAIN_CERTIFICATE_SIGNER_1,\n\n &OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ),\n\n false\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 25, "score": 157703.46705775912 }, { "content": "#[test]\n\nfn renew_update_fields() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n do_register();\n\n\n\n assert_ok!(TestModule::book_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n\n\n assert_ok!(TestModule::renew_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n assert_eq!(\n\n TestModule::slots(OFFCHAIN_CERTIFICATE_SIGNER_1).renewed,\n\n <system::Module<Test>>::block_number()\n\n );\n\n assert_eq!(\n\n BalancesModule::free_balance(ROOT_MANAGER),\n\n MinimumApplicationAmount::get()\n\n ); // Took SlotBookingCost + SlotRenewingCost\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 26, "score": 152231.00514875428 }, { "content": "#[test]\n\nfn can_not_renew_if_invalid() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n do_register();\n\n\n\n assert_ok!(TestModule::book_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n\n\n <system::Module<Test>>::set_block_number(SlotValidity::get() + 1);\n\n\n\n assert_noop!(\n\n TestModule::renew_slot(Origin::signed(ROOT_MANAGER), OFFCHAIN_CERTIFICATE_SIGNER_1),\n\n Error::<Test>::NoLongerValid\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 27, "score": 152231.0051487543 }, { "content": "#[test]\n\nfn can_not_renew_if_not_enough_funds() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n do_register();\n\n\n\n assert_ok!(TestModule::book_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n\n\n BalancesModule::make_free_balance_be(&ROOT_MANAGER, 0);\n\n\n\n assert_noop!(\n\n TestModule::renew_slot(Origin::signed(ROOT_MANAGER), OFFCHAIN_CERTIFICATE_SIGNER_1),\n\n Error::<Test>::NotEnoughFunds\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 28, "score": 149202.31882253088 }, { "content": "#[test]\n\nfn can_not_buy_slot_twice() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n do_register();\n\n\n\n assert_ok!(TestModule::book_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n assert_noop!(\n\n TestModule::book_slot(Origin::signed(ROOT_MANAGER), OFFCHAIN_CERTIFICATE_SIGNER_1),\n\n Error::<Test>::SlotTaken\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 29, "score": 149088.9679711959 }, { "content": "#[test]\n\nfn can_not_buy_slot_if_not_enough_funds() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n do_register();\n\n\n\n assert_ok!(TestModule::book_slot(\n\n Origin::signed(ROOT_MANAGER),\n\n OFFCHAIN_CERTIFICATE_SIGNER_1\n\n ));\n\n\n\n BalancesModule::make_free_balance_be(&ROOT_MANAGER, 0);\n\n\n\n assert_noop!(\n\n TestModule::book_slot(Origin::signed(ROOT_MANAGER), OFFCHAIN_CERTIFICATE_SIGNER_2),\n\n Error::<Test>::NotEnoughFunds\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 30, "score": 146202.29189859913 }, { "content": "type TestModule = Module<Test>;\n\n\n\nconst ROOT_MANAGER: u64 = 1;\n\nconst OFFCHAIN_CERTIFICATE_SIGNER_1: u64 = 2;\n\nconst OFFCHAIN_CERTIFICATE_SIGNER_2: u64 = 3;\n\nconst OFFCHAIN_CERTIFICATE_SIGNER_3: u64 = 4;\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 31, "score": 142493.70187675295 }, { "content": "/// Helper function to generate an authority key for Aura\n\npub fn get_authority_keys_from_seed(s: &str) -> (AuraId, GrandpaId) {\n\n (get_from_seed::<AuraId>(s), get_from_seed::<GrandpaId>(s))\n\n}\n\n\n\nimpl Alternative {\n\n /// Get an actual chain config from one of the alternatives.\n\n pub(crate) fn load(self) -> Result<ChainSpec, String> {\n\n Ok(match self {\n\n Alternative::Development => ChainSpec::from_genesis(\n\n \"Development\",\n\n \"dev\",\n\n || {\n\n testnet_genesis(\n\n vec![get_authority_keys_from_seed(\"Alice\")],\n\n get_account_id_from_seed::<sr25519::Public>(\"Alice\"),\n\n vec![\n\n get_account_id_from_seed::<sr25519::Public>(\"Alice\"),\n\n get_account_id_from_seed::<sr25519::Public>(\"Bob\"),\n\n get_account_id_from_seed::<sr25519::Public>(\"Alice//stash\"),\n\n get_account_id_from_seed::<sr25519::Public>(\"Bob//stash\"),\n", "file_path": "node/src/chain_spec.rs", "rank": 32, "score": 140434.8485233764 }, { "content": "type TcrModule = pallet_tcr::Module<Test>;\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 34, "score": 130199.47496265332 }, { "content": "type BalancesModule = pallet_balances::Module<Test>;\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 35, "score": 130199.47496265332 }, { "content": "fn do_register() {\n\n assert_ok!(TcrModule::apply(\n\n Origin::signed(ROOT_MANAGER),\n\n vec![],\n\n MinimumApplicationAmount::get(),\n\n ));\n\n <TcrModule as sp_runtime::traits::OnFinalize<<Test as system::Trait>::BlockNumber>>::on_finalize(FinalizeApplicationPeriod::get() + <system::Module<Test>>::block_number());\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 36, "score": 123623.79583898695 }, { "content": "fn allocate_balances() {\n\n let mut total_imbalance = <PositiveImbalanceOf<Test>>::zero();\n\n let r_manager = <Test as Trait>::Currency::deposit_creating(\n\n &ROOT_MANAGER,\n\n MinimumApplicationAmount::get() + SlotBookingCost::get() + SlotRenewingCost::get(),\n\n );\n\n total_imbalance.subsume(r_manager);\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 37, "score": 121152.46155549226 }, { "content": "#[test]\n\nfn can_not_vote_if_challenge_does_not_exists() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n\n\n assert_noop!(\n\n TestModule::vote(Origin::signed(VOTER_FOR), CANDIDATE, true, 100),\n\n Error::<Test>::ChallengeNotFound\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 38, "score": 120086.32385545847 }, { "content": "#[test]\n\nfn can_challenge_member_application() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n\n\n assert_ok!(TestModule::apply(\n\n Origin::signed(CANDIDATE),\n\n vec![],\n\n MinimumApplicationAmount::get(),\n\n ));\n\n\n\n <TestModule as sp_runtime::traits::OnFinalize<<Test as system::Trait>::BlockNumber>>::on_finalize(FinalizeApplicationPeriod::get() + <system::Module<Test>>::block_number());\n\n assert_eq!(MEMBERS.with(|m| m.borrow().clone()), vec![CANDIDATE]);\n\n\n\n assert_ok!(TestModule::challenge(\n\n Origin::signed(CHALLENGER_2),\n\n CANDIDATE,\n\n MinimumChallengeAmount::get()\n\n ));\n\n\n\n assert_eq!(<Applications<Test>>::contains_key(CANDIDATE), false);\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 39, "score": 119969.61433918717 }, { "content": "#[test]\n\nfn tcr_membership_propagate() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n do_register();\n\n\n\n assert_eq!(TestModule::is_member(&ROOT_MANAGER), true);\n\n assert_eq!(TestModule::is_member(&OFFCHAIN_CERTIFICATE_SIGNER_1), false);\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 40, "score": 118803.82677495551 }, { "content": "#[test]\n\nfn can_not_challenge_non_member_application() {\n\n new_test_ext().execute_with(|| {\n\n assert_noop!(\n\n TestModule::challenge(\n\n Origin::signed(CHALLENGER_2),\n\n CANDIDATE,\n\n MinimumChallengeAmount::get()\n\n ),\n\n Error::<Test>::MemberNotFound\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 41, "score": 117700.86601318243 }, { "content": "type PositiveImbalanceOf<T> =\n\n <<T as Trait>::Currency as Currency<<T as system::Trait>::AccountId>>::PositiveImbalance;\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 42, "score": 117351.0711120188 }, { "content": "/// Helper function to generate an account ID from seed\n\npub fn get_account_id_from_seed<TPublic: Public>(seed: &str) -> AccountId\n\nwhere\n\n AccountPublic: From<<TPublic::Pair as Pair>::Public>,\n\n{\n\n AccountPublic::from(get_from_seed::<TPublic>(seed)).into_account()\n\n}\n\n\n", "file_path": "node/src/chain_spec.rs", "rank": 43, "score": 117181.32622010706 }, { "content": "// This function basically just builds a genesis storage key/value store according to\n\n// our desired mockup.\n\nfn new_test_ext() -> sp_io::TestExternalities {\n\n system::GenesisConfig::default()\n\n .build_storage::<Test>()\n\n .unwrap()\n\n .into()\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 44, "score": 116583.12624651527 }, { "content": "#[test]\n\nfn can_not_challenge_member_applicaton_if_not_enough_funds() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n\n\n assert_ok!(TestModule::apply(\n\n Origin::signed(CANDIDATE),\n\n vec![],\n\n MinimumApplicationAmount::get(),\n\n ));\n\n\n\n <TestModule as sp_runtime::traits::OnFinalize<<Test as system::Trait>::BlockNumber>>::on_finalize(FinalizeApplicationPeriod::get() + <system::Module<Test>>::block_number());\n\n\n\n assert_noop!(TestModule::challenge(\n\n Origin::signed(CHALLENGER_2),\n\n CANDIDATE,\n\n MinimumChallengeAmount::get() + 1\n\n ), Error::<Test>::NotEnoughFunds);\n\n })\n\n}\n\n\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 45, "score": 115536.54159305952 }, { "content": "#[test]\n\nfn can_not_challenge_member_applicaton_if_not_big_enough_deposit() {\n\n new_test_ext().execute_with(|| {\n\n assert_noop!(\n\n TestModule::challenge(\n\n Origin::signed(CHALLENGER_2),\n\n CANDIDATE,\n\n MinimumChallengeAmount::get() - 1\n\n ),\n\n Error::<Test>::DepositTooSmall\n\n );\n\n })\n\n}\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 46, "score": 113469.59376813081 }, { "content": "type TestModule = Module<Test>;\n\n\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 47, "score": 112302.41101430071 }, { "content": "#[test]\n\nfn finalize_challenge_if_enough_time_elapsed_drop_and_kill_member() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n\n\n assert_ok!(TestModule::apply(\n\n Origin::signed(CANDIDATE),\n\n vec![],\n\n MinimumApplicationAmount::get(),\n\n ));\n\n\n\n <TestModule as sp_runtime::traits::OnFinalize<<Test as system::Trait>::BlockNumber>>::on_finalize(FinalizeApplicationPeriod::get() + <system::Module<Test>>::block_number());\n\n assert_eq!(MEMBERS.with(|m| m.borrow().clone()), vec![CANDIDATE]);\n\n\n\n assert_ok!(TestModule::challenge(\n\n Origin::signed(CHALLENGER_2),\n\n CANDIDATE,\n\n MinimumChallengeAmount::get(),\n\n ));\n\n\n\n <TestModule as sp_runtime::traits::OnFinalize<<Test as system::Trait>::BlockNumber>>::on_finalize(FinalizeChallengePeriod::get() + <system::Module<Test>>::block_number());\n\n assert_eq!(MEMBERS.with(|m| m.borrow().clone()), vec![]);\n\n\n\n assert_eq!(<Applications<Test>>::contains_key(CANDIDATE), false);\n\n assert_eq!(<Challenges<Test>>::contains_key(CANDIDATE), false);\n\n assert_eq!(<Members<Test>>::contains_key(CANDIDATE), false);\n\n })\n\n}\n\n\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 48, "score": 111493.59541675186 }, { "content": "/// Builds a new service for a light client.\n\npub fn new_light(\n\n config: Configuration<GenesisConfig>,\n\n) -> Result<impl AbstractService, ServiceError> {\n\n let inherent_data_providers = InherentDataProviders::new();\n\n\n\n ServiceBuilder::new_light::<Block, RuntimeApi, Executor>(config)?\n\n .with_select_chain(|_config, backend| Ok(LongestChain::new(backend.clone())))?\n\n .with_transaction_pool(|config, client, fetcher| {\n\n let fetcher = fetcher\n\n .ok_or_else(|| \"Trying to start light transaction pool without active fetcher\")?;\n\n\n\n let pool_api = sc_transaction_pool::LightChainApi::new(client.clone(), fetcher.clone());\n\n let pool = sc_transaction_pool::BasicPool::with_revalidation_type(\n\n config,\n\n Arc::new(pool_api),\n\n sc_transaction_pool::RevalidationType::Light,\n\n );\n\n Ok(pool)\n\n })?\n\n .with_import_queue_and_fprb(\n", "file_path": "node/src/service.rs", "rank": 49, "score": 109597.07741292345 }, { "content": "/// Builds a new service for a full client.\n\npub fn new_full(\n\n config: Configuration<GenesisConfig>,\n\n) -> Result<impl AbstractService, ServiceError> {\n\n let is_authority = config.roles.is_authority();\n\n let force_authoring = config.force_authoring;\n\n let name = config.name.clone();\n\n let disable_grandpa = config.disable_grandpa;\n\n\n\n // sentry nodes announce themselves as authorities to the network\n\n // and should run the same protocols authorities do, but it should\n\n // never actively participate in any consensus process.\n\n let participates_in_consensus = is_authority && !config.sentry_mode;\n\n\n\n let (builder, mut import_setup, inherent_data_providers) = new_full_start!(config);\n\n\n\n let (block_import, grandpa_link) = import_setup.take().expect(\n\n \"Link Half and Block Import are present for Full Services or setup failed before. qed\",\n\n );\n\n\n\n let service = builder\n", "file_path": "node/src/service.rs", "rank": 50, "score": 109597.07741292345 }, { "content": " trait Store for Module<T: Trait> as TcrModule {\n\n Applications get(applications): linked_map hasher(blake2_256) T::AccountId => Application<T::AccountId, BalanceOf<T>, T::BlockNumber>;\n\n Challenges get(challenges): linked_map hasher(blake2_256) T::AccountId => Application<T::AccountId, BalanceOf<T>, T::BlockNumber>;\n\n Members get(members): linked_map hasher(blake2_256) T::AccountId => Application<T::AccountId, BalanceOf<T>, T::BlockNumber>;\n\n }\n\n}\n\n\n\ndecl_module! {\n\n /// The module declaration.\n\n pub struct Module<T: Trait> for enum Call where origin: T::Origin {\n\n fn deposit_event() = default;\n\n\n\n pub fn apply(origin, metadata: Vec<u8>, deposit: BalanceOf<T>) -> DispatchResult {\n\n let sender = ensure_signed(origin)?;\n\n ensure!(deposit >= T::MinimumApplicationAmount::get(), Error::<T>::DepositTooSmall);\n\n ensure!(!<Applications<T>>::contains_key(sender.clone()), Error::<T>::ApplicationPending);\n\n ensure!(!<Challenges<T>>::contains_key(sender.clone()), Error::<T>::ApplicationChallenged);\n\n\n\n Self::reserve_for(sender.clone(), deposit)?;\n\n\n", "file_path": "pallets/tcr/src/lib.rs", "rank": 51, "score": 103734.50096909709 }, { "content": "type BalancesModule = pallet_balances::Module<Test>;\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 52, "score": 101433.8839380355 }, { "content": "#[cfg(feature = \"std\")]\n\npub fn native_version() -> NativeVersion {\n\n NativeVersion {\n\n runtime_version: VERSION,\n\n can_author_with: Default::default(),\n\n }\n\n}\n\n\n\nparameter_types! {\n\n pub const BlockHashCount: BlockNumber = 250;\n\n pub const MaximumBlockWeight: Weight = 1_000_000_000;\n\n pub const AvailableBlockRatio: Perbill = Perbill::from_percent(75);\n\n pub const MaximumBlockLength: u32 = 5 * 1024 * 1024;\n\n pub const Version: RuntimeVersion = VERSION;\n\n}\n\n\n\nimpl system::Trait for Runtime {\n\n /// The identifier used to distinguish between accounts.\n\n type AccountId = AccountId;\n\n /// The aggregated dispatch type that is available for extrinsics.\n\n type Call = Call;\n", "file_path": "runtime/src/lib.rs", "rank": 53, "score": 100588.8196484652 }, { "content": "/// Helper function to generate a crypto pair from seed\n\npub fn get_from_seed<TPublic: Public>(seed: &str) -> <TPublic::Pair as Pair>::Public {\n\n TPublic::Pair::from_string(&format!(\"//{}\", seed), None)\n\n .expect(\"static values are valid; qed\")\n\n .public()\n\n}\n\n\n", "file_path": "node/src/chain_spec.rs", "rank": 54, "score": 97851.86781024824 }, { "content": "const validateAndStoreNewCertificate = (keystore, req, res) => {\n\n const certificate = req.body.certificate;\n\n if (certificate === undefined) {\n\n badRequest(res, 'missing certificate in post body');\n\n return;\n\n }\n\n\n\n // Before burning the certificate into the device (aka saving\n\n // it locally for us) we verify that it is for our own device.\n\n // We trust the issuer but want to make sure it is targeted at\n\n // us.\n\n // We wrap the call in a try catch as a decoding error may happen\n\n // with malicious entries.\n\n let invalidReason = '';\n\n const valid = Certificate.verifyCertificateWithoutIssuerChecks(certificate, (unusedCert, reason) => {\n\n invalidReason = reason;\n\n })\n\n\n\n if (!valid) {\n\n badRequest(res, `Invalid certificate: ${invalidReason}`);\n\n return;\n\n }\n\n\n\n const decoded = Certificate.decodeCertificate(certificate);\n\n const forThisDevice = decoded.payload.deviceAddress == keystore.account.address;\n\n if (!forThisDevice) {\n\n badRequest(res, errors.errNotForThisDevice);\n\n return;\n\n }\n\n\n\n keystore.saveCertificate(certificate);\n\n res.status(200).send({ accepted: true });\n", "file_path": "nodejs/packages/firmware/handlers.js", "rank": 55, "score": 90954.11264526294 }, { "content": "\tasync rootAndChildValid(root, child) {\n\n\t\treturn await this.api.rpc.rootOfTrust.isChildCertificateValid(root, child)\n", "file_path": "nodejs/packages/pki/runtime.js", "rank": 56, "score": 90075.17147228691 }, { "content": "#[test]\n\nfn counter_works() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n\n\n assert_ok!(TestModule::apply(\n\n Origin::signed(CANDIDATE),\n\n vec![],\n\n MinimumApplicationAmount::get(),\n\n ));\n\n\n\n assert_ok!(TestModule::counter(\n\n Origin::signed(CHALLENGER_1),\n\n CANDIDATE,\n\n MinimumCounterAmount::get()\n\n ));\n\n\n\n assert_eq!(<Applications<Test>>::contains_key(CANDIDATE), false);\n\n assert_eq!(<Challenges<Test>>::contains_key(CANDIDATE), true);\n\n\n\n assert_eq!(\n\n BalancesModule::reserved_balance(CHALLENGER_1),\n\n MinimumCounterAmount::get()\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 57, "score": 89791.27058404488 }, { "content": "#[test]\n\nfn apply_works() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n\n\n assert_ok!(TestModule::apply(\n\n Origin::signed(CANDIDATE),\n\n vec![],\n\n MinimumApplicationAmount::get()\n\n ));\n\n assert_eq!(\n\n TestModule::applications(CANDIDATE).candidate_deposit,\n\n MinimumApplicationAmount::get()\n\n );\n\n assert_eq!(\n\n BalancesModule::reserved_balance(CANDIDATE),\n\n MinimumApplicationAmount::get()\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 58, "score": 89791.27058404488 }, { "content": "fn allocate_balances() {\n\n let mut total_imbalance = <PositiveImbalanceOf<Test>>::zero();\n\n let r_candidate =\n\n <Test as Trait>::Currency::deposit_creating(&CANDIDATE, MinimumApplicationAmount::get());\n\n let r_challenger_1 =\n\n <Test as Trait>::Currency::deposit_creating(&CHALLENGER_1, MinimumCounterAmount::get());\n\n let r_challenger_2 =\n\n <Test as Trait>::Currency::deposit_creating(&CHALLENGER_2, MinimumChallengeAmount::get());\n\n let r_voter_for = <Test as Trait>::Currency::deposit_creating(&VOTER_FOR, 1000);\n\n let r_voter_against = <Test as Trait>::Currency::deposit_creating(&VOTER_AGAINST, 1000);\n\n total_imbalance.subsume(r_candidate);\n\n total_imbalance.subsume(r_challenger_1);\n\n total_imbalance.subsume(r_challenger_2);\n\n total_imbalance.subsume(r_voter_for);\n\n total_imbalance.subsume(r_voter_against);\n\n}\n\n\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 59, "score": 89787.80753945837 }, { "content": "// This function basically just builds a genesis storage key/value store according to\n\n// our desired mockup.\n\nfn new_test_ext() -> sp_io::TestExternalities {\n\n system::GenesisConfig::default()\n\n .build_storage::<Test>()\n\n .unwrap()\n\n .into()\n\n}\n\n\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 60, "score": 88312.57248442828 }, { "content": "#[test]\n\nfn lock_unlock_works() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n\n\n assert_eq!(\n\n BalancesModule::usable_balance(CANDIDATE),\n\n MinimumApplicationAmount::get()\n\n );\n\n\n\n assert_ok!(TestModule::reserve_for(\n\n CANDIDATE,\n\n MinimumApplicationAmount::get() / 2\n\n ));\n\n assert_eq!(\n\n BalancesModule::usable_balance(CANDIDATE),\n\n MinimumApplicationAmount::get() / 2\n\n );\n\n assert_ok!(TestModule::reserve_for(\n\n CANDIDATE,\n\n MinimumApplicationAmount::get() / 2\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 61, "score": 88156.37046599112 }, { "content": "#[test]\n\nfn can_not_reapply_while_challenged() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n\n\n assert_ok!(TestModule::apply(\n\n Origin::signed(CANDIDATE),\n\n vec![],\n\n MinimumApplicationAmount::get(),\n\n ));\n\n\n\n assert_ok!(TestModule::counter(\n\n Origin::signed(CHALLENGER_1),\n\n CANDIDATE,\n\n MinimumCounterAmount::get()\n\n ));\n\n\n\n assert_noop!(\n\n TestModule::apply(\n\n Origin::signed(CANDIDATE),\n\n vec![],\n\n MinimumApplicationAmount::get()\n\n ),\n\n Error::<Test>::ApplicationChallenged\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 62, "score": 88156.37046599112 }, { "content": "#[test]\n\nfn can_not_apply_twice() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n\n\n assert_ok!(TestModule::apply(\n\n Origin::signed(CANDIDATE),\n\n vec![],\n\n MinimumApplicationAmount::get()\n\n ));\n\n assert_noop!(\n\n TestModule::apply(\n\n Origin::signed(CANDIDATE),\n\n vec![],\n\n MinimumApplicationAmount::get()\n\n ),\n\n Error::<Test>::ApplicationPending\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 63, "score": 88156.37046599112 }, { "content": "#[test]\n\nfn does_not_finalize_challenged_application() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n\n\n assert_ok!(TestModule::apply(\n\n Origin::signed(CANDIDATE),\n\n vec![],\n\n MinimumApplicationAmount::get(),\n\n ));\n\n\n\n assert_ok!(TestModule::counter(\n\n Origin::signed(CHALLENGER_1),\n\n CANDIDATE,\n\n MinimumCounterAmount::get(),\n\n ));\n\n\n\n <TestModule as sp_runtime::traits::OnFinalize<<Test as system::Trait>::BlockNumber>>::on_finalize(FinalizeApplicationPeriod::get() + <system::Module<Test>>::block_number());\n\n\n\n assert_eq!(<Applications<Test>>::contains_key(CANDIDATE), false);\n\n assert_eq!(<Challenges<Test>>::contains_key(CANDIDATE), true);\n\n assert_eq!(<Members<Test>>::contains_key(CANDIDATE), false);\n\n })\n\n}\n\n\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 64, "score": 88156.37046599112 }, { "content": "type PositiveImbalanceOf<T> =\n\n <<T as Trait>::Currency as Currency<<T as system::Trait>::AccountId>>::PositiveImbalance;\n\n\n\nconst CANDIDATE: u64 = 1;\n\nconst CHALLENGER_1: u64 = 2;\n\nconst CHALLENGER_2: u64 = 3;\n\nconst VOTER_FOR: u64 = 4;\n\nconst VOTER_AGAINST: u64 = 5;\n\n\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 65, "score": 86732.36733155951 }, { "content": "#[test]\n\nfn can_not_apply_if_deposit_is_too_low() {\n\n new_test_ext().execute_with(|| {\n\n assert_noop!(\n\n TestModule::apply(\n\n Origin::signed(CANDIDATE),\n\n vec![],\n\n MinimumApplicationAmount::get() - 1\n\n ),\n\n Error::<Test>::DepositTooSmall\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 66, "score": 86600.35069252276 }, { "content": "#[test]\n\nfn can_not_counter_unexisting_application() {\n\n new_test_ext().execute_with(|| {\n\n assert_noop!(\n\n TestModule::counter(\n\n Origin::signed(CHALLENGER_1),\n\n CANDIDATE,\n\n MinimumCounterAmount::get()\n\n ),\n\n Error::<Test>::ApplicationNotFound\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 67, "score": 86600.35069252276 }, { "content": "#[test]\n\nfn vote_positive_and_negative_works() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n\n\n assert_ok!(TestModule::apply(\n\n Origin::signed(CANDIDATE),\n\n vec![],\n\n MinimumApplicationAmount::get(),\n\n ));\n\n\n\n assert_ok!(TestModule::counter(\n\n Origin::signed(CHALLENGER_1),\n\n CANDIDATE,\n\n MinimumCounterAmount::get(),\n\n ));\n\n\n\n assert_ok!(TestModule::vote(\n\n Origin::signed(VOTER_FOR),\n\n CANDIDATE,\n\n true,\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 68, "score": 86600.35069252276 }, { "content": "#[test]\n\nfn can_not_apply_if_not_enough_tokens() {\n\n new_test_ext().execute_with(|| {\n\n assert_noop!(\n\n TestModule::apply(\n\n Origin::signed(CANDIDATE),\n\n vec![],\n\n MinimumApplicationAmount::get()\n\n ),\n\n Error::<Test>::NotEnoughFunds\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 69, "score": 86600.35069252276 }, { "content": "#[test]\n\nfn can_not_deposit_if_not_enough_funds() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n\n\n assert_ok!(TestModule::apply(\n\n Origin::signed(CANDIDATE),\n\n vec![],\n\n MinimumApplicationAmount::get(),\n\n ));\n\n\n\n assert_ok!(TestModule::counter(\n\n Origin::signed(CHALLENGER_1),\n\n CANDIDATE,\n\n MinimumCounterAmount::get(),\n\n ));\n\n\n\n assert_noop!(\n\n TestModule::vote(Origin::signed(VOTER_FOR), CANDIDATE, true, 1001),\n\n Error::<Test>::NotEnoughFunds\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 70, "score": 86600.35069252276 }, { "content": "/// Parse and run command line arguments\n\npub fn run(version: VersionInfo) -> sc_cli::Result<()> {\n\n\tlet opt = sc_cli::from_args::<Cli>(&version);\n\n\n\n\tlet mut config = sc_service::Configuration::from_version(&version);\n\n\n\n\tmatch opt.subcommand {\n\n\t\tSome(subcommand) => {\n\n\t\t\tsubcommand.init(&version)?;\n\n\t\t\tsubcommand.update_config(&mut config, chain_spec::load_spec, &version)?;\n\n\t\t\tsubcommand.run(\n\n\t\t\t\tconfig,\n\n\t\t\t\t|config: _| Ok(new_full_start!(config).0),\n\n\t\t\t)\n\n\t\t},\n\n\t\tNone => {\n\n\t\t\topt.run.init(&version)?;\n\n\t\t\topt.run.update_config(&mut config, chain_spec::load_spec, &version)?;\n\n\t\t\topt.run.run(\n\n\t\t\t\tconfig,\n\n\t\t\t\tservice::new_light,\n\n\t\t\t\tservice::new_full,\n\n\t\t\t\t&version,\n\n\t\t\t)\n\n\t\t},\n\n\t}\n\n}\n", "file_path": "node/src/command.rs", "rank": 72, "score": 85799.03026596786 }, { "content": "#[test]\n\nfn can_not_counter_application_if_not_enough_funds() {\n\n new_test_ext().execute_with(|| {\n\n <Applications<Test>>::insert(\n\n CANDIDATE,\n\n Application {\n\n candidate: CANDIDATE,\n\n candidate_deposit: 0,\n\n metadata: vec![],\n\n challenger: None,\n\n challenger_deposit: None,\n\n votes_for: None,\n\n voters_for: vec![],\n\n votes_against: None,\n\n voters_against: vec![],\n\n created_block: <system::Module<Test>>::block_number(),\n\n challenged_block: <system::Module<Test>>::block_number(),\n\n },\n\n );\n\n\n\n assert_noop!(\n\n TestModule::counter(\n\n Origin::signed(CHALLENGER_1),\n\n CANDIDATE,\n\n MinimumCounterAmount::get()\n\n ),\n\n Error::<Test>::NotEnoughFunds\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 73, "score": 85117.63703049454 }, { "content": "#[test]\n\nfn can_not_counter_application_if_deposit_too_low() {\n\n new_test_ext().execute_with(|| {\n\n assert_noop!(\n\n TestModule::counter(\n\n Origin::signed(CHALLENGER_1),\n\n CANDIDATE,\n\n MinimumCounterAmount::get() - 1\n\n ),\n\n Error::<Test>::DepositTooSmall\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 74, "score": 85117.63703049454 }, { "content": "#[test]\n\nfn does_not_finalize_application_if_not_enough_time_elapsed() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n\n\n assert_ok!(TestModule::apply(\n\n Origin::signed(CANDIDATE),\n\n vec![],\n\n MinimumApplicationAmount::get(),\n\n ));\n\n\n\n <TestModule as sp_runtime::traits::OnFinalize<<Test as system::Trait>::BlockNumber>>::on_finalize(FinalizeApplicationPeriod::get() + <system::Module<Test>>::block_number() - 1);\n\n\n\n assert_eq!(<Applications<Test>>::contains_key(CANDIDATE), true);\n\n assert_eq!(<Challenges<Test>>::contains_key(CANDIDATE), false);\n\n assert_eq!(<Members<Test>>::contains_key(CANDIDATE), false);\n\n })\n\n}\n\n\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 75, "score": 85117.63703049454 }, { "content": "#[test]\n\nfn does_not_finalize_challenge_if_not_enough_time_elapsed() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n\n\n assert_ok!(TestModule::apply(\n\n Origin::signed(CANDIDATE),\n\n vec![],\n\n MinimumApplicationAmount::get(),\n\n ));\n\n\n\n assert_ok!(TestModule::counter(\n\n Origin::signed(CHALLENGER_1),\n\n CANDIDATE,\n\n MinimumCounterAmount::get(),\n\n ));\n\n\n\n <TestModule as sp_runtime::traits::OnFinalize<<Test as system::Trait>::BlockNumber>>::on_finalize(FinalizeChallengePeriod::get() + <system::Module<Test>>::block_number() - 1);\n\n\n\n assert_eq!(<Applications<Test>>::contains_key(CANDIDATE), false);\n\n assert_eq!(<Challenges<Test>>::contains_key(CANDIDATE), true);\n\n assert_eq!(<Members<Test>>::contains_key(CANDIDATE), false);\n\n })\n\n}\n\n\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 76, "score": 85117.63703049454 }, { "content": "#[test]\n\nfn finalize_application_if_not_challenged_and_enough_time_elapsed() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n\n\n assert_ok!(TestModule::apply(\n\n Origin::signed(CANDIDATE),\n\n vec![],\n\n MinimumApplicationAmount::get(),\n\n ));\n\n\n\n <TestModule as sp_runtime::traits::OnFinalize<<Test as system::Trait>::BlockNumber>>::on_finalize(FinalizeApplicationPeriod::get() + <system::Module<Test>>::block_number());\n\n assert_eq!(MEMBERS.with(|m| m.borrow().clone()), vec![CANDIDATE]);\n\n\n\n assert_eq!(<Applications<Test>>::contains_key(CANDIDATE), false);\n\n assert_eq!(<Challenges<Test>>::contains_key(CANDIDATE), false);\n\n assert_eq!(<Members<Test>>::contains_key(CANDIDATE), true);\n\n\n\n assert_eq!(BalancesModule::usable_balance(CANDIDATE), MinimumApplicationAmount::get());\n\n })\n\n}\n\n\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 77, "score": 83703.16837833308 }, { "content": "#[test]\n\nfn finalize_challenge_if_enough_time_elapsed_accept() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n\n\n assert_ok!(TestModule::apply(\n\n Origin::signed(CANDIDATE),\n\n vec![],\n\n MinimumApplicationAmount::get(),\n\n ));\n\n\n\n assert_ok!(TestModule::counter(\n\n Origin::signed(CHALLENGER_1),\n\n CANDIDATE,\n\n MinimumCounterAmount::get(),\n\n ));\n\n\n\n assert_ok!(TestModule::vote(\n\n Origin::signed(VOTER_FOR),\n\n CANDIDATE,\n\n true,\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 78, "score": 83703.16837833308 }, { "content": "#[test]\n\nfn finalize_challenge_if_enough_time_elapsed_drop() {\n\n new_test_ext().execute_with(|| {\n\n allocate_balances();\n\n\n\n assert_ok!(TestModule::apply(\n\n Origin::signed(CANDIDATE),\n\n vec![],\n\n MinimumApplicationAmount::get(),\n\n ));\n\n\n\n assert_ok!(TestModule::counter(\n\n Origin::signed(CHALLENGER_1),\n\n CANDIDATE,\n\n MinimumCounterAmount::get(),\n\n ));\n\n\n\n assert_ok!(TestModule::vote(\n\n Origin::signed(VOTER_FOR),\n\n CANDIDATE,\n\n true,\n", "file_path": "pallets/tcr/src/tests.rs", "rank": 79, "score": 83703.16837833308 }, { "content": "pub fn load_spec(id: &str) -> Result<Option<ChainSpec>, String> {\n\n Ok(match Alternative::from(id) {\n\n Some(spec) => Some(spec.load()?),\n\n None => None,\n\n })\n\n}\n", "file_path": "node/src/chain_spec.rs", "rank": 80, "score": 77867.94394026499 }, { "content": "\tstatic decodeCertificate(encodedCertificate) {\n\n\t\tconst buff = Buffer.from(encodedCertificate, 'base64');\n\n\t\tconst json = buff.toString('ascii');\n\n\t\treturn JSON.parse(json);\n", "file_path": "nodejs/packages/pki/certificate.js", "rank": 81, "score": 75464.80213513161 }, { "content": "\tsignAndEncode() {\n\n\t\tconst signed = this.sign();\n\n\n\n\t\treturn Buffer.from(JSON.stringify(signed)).toString('base64');\n", "file_path": "nodejs/packages/pki/certificate.js", "rank": 82, "score": 71115.01696188896 }, { "content": " pub const SlotValidity: u64 = 100000;\n\n}\n\nimpl Trait for Test {\n\n type Event = ();\n\n type Currency = pallet_balances::Module<Self>;\n\n type CertificateId = <Test as system::Trait>::AccountId;\n\n type SlotBookingCost = SlotBookingCost;\n\n type SlotRenewingCost = SlotRenewingCost;\n\n type SlotValidity = SlotValidity;\n\n type FundsCollector = ();\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 83, "score": 67985.33298599478 }, { "content": " );\n\n assert_eq!(\n\n TestModule::slots(OFFCHAIN_CERTIFICATE_SIGNER_1).renewed,\n\n <system::Module<Test>>::block_number()\n\n );\n\n assert_eq!(\n\n TestModule::slots(OFFCHAIN_CERTIFICATE_SIGNER_1).revoked,\n\n false\n\n );\n\n assert_eq!(\n\n TestModule::slots(OFFCHAIN_CERTIFICATE_SIGNER_1).validity,\n\n SlotValidity::get(),\n\n );\n\n assert_eq!(\n\n TestModule::slots(OFFCHAIN_CERTIFICATE_SIGNER_1).child_revocations,\n\n vec![],\n\n );\n\n\n\n assert_eq!(\n\n BalancesModule::free_balance(ROOT_MANAGER),\n\n MinimumApplicationAmount::get() + SlotRenewingCost::get()\n\n ); // Took SlotBookingCost\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 84, "score": 67982.76735844398 }, { "content": "use super::*;\n\n\n\nuse frame_support::{\n\n assert_noop, assert_ok, impl_outer_origin, parameter_types, traits::Imbalance, weights::Weight,\n\n};\n\nuse sp_core::H256;\n\nuse sp_runtime::{\n\n testing::Header,\n\n traits::{BlakeTwo256, IdentityLookup},\n\n Perbill,\n\n};\n\n\n\nimpl_outer_origin! {\n\n pub enum Origin for Test {}\n\n}\n\n\n\n// For testing the module, we construct most of a mock runtime. This means\n\n// first constructing a configuration type (`Test`) which `impl`s each of the\n\n// configuration traits of modules we want to use.\n\n#[derive(Clone, Eq, PartialEq)]\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 85, "score": 67979.6864347333 }, { "content": " child_revocations: vec![OFFCHAIN_CERTIFICATE_SIGNER_2],\n\n },\n\n );\n\n\n\n assert_eq!(\n\n TestModule::is_root_certificate_valid(&OFFCHAIN_CERTIFICATE_SIGNER_1),\n\n true\n\n );\n\n\n\n assert_eq!(\n\n TestModule::is_root_certificate_valid(&OFFCHAIN_CERTIFICATE_SIGNER_3),\n\n true\n\n );\n\n\n\n assert_eq!(\n\n TestModule::is_child_certificate_valid(\n\n &OFFCHAIN_CERTIFICATE_SIGNER_1,\n\n &OFFCHAIN_CERTIFICATE_SIGNER_2\n\n ),\n\n true\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 86, "score": 67978.06662278702 }, { "content": " assert_eq!(\n\n TestModule::is_child_certificate_valid(\n\n &OFFCHAIN_CERTIFICATE_SIGNER_1,\n\n &OFFCHAIN_CERTIFICATE_SIGNER_2\n\n ),\n\n false\n\n );\n\n assert_eq!(\n\n <Slots<Test>>::get(&OFFCHAIN_CERTIFICATE_SIGNER_1)\n\n .child_revocations\n\n .contains(&OFFCHAIN_CERTIFICATE_SIGNER_2),\n\n true\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 87, "score": 67978.05954639931 }, { "content": " type MaximumBlockLength = MaximumBlockLength;\n\n type AvailableBlockRatio = AvailableBlockRatio;\n\n type Version = ();\n\n type ModuleToIndex = ();\n\n type AccountData = pallet_balances::AccountData<u64>;\n\n type OnNewAccount = ();\n\n type OnKilledAccount = ();\n\n}\n\nparameter_types! {\n\n pub const DisabledValidatorsThreshold: Perbill = Perbill::from_percent(33);\n\n}\n\nimpl pallet_balances::Trait for Test {\n\n type Balance = u64;\n\n type Event = ();\n\n type DustRemoval = ();\n\n type AccountStore = system::Module<Test>;\n\n type ExistentialDeposit = ();\n\n}\n\nparameter_types! {\n\n pub const MinimumApplicationAmount: u64 = 100;\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 88, "score": 67976.7834906414 }, { "content": " TestModule::is_root_certificate_valid(&OFFCHAIN_CERTIFICATE_SIGNER_1),\n\n true\n\n );\n\n\n\n assert_eq!(\n\n TestModule::is_child_certificate_valid(\n\n &OFFCHAIN_CERTIFICATE_SIGNER_1,\n\n &OFFCHAIN_CERTIFICATE_SIGNER_2\n\n ),\n\n false\n\n );\n\n })\n\n}\n\n\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 89, "score": 67976.69734479279 }, { "content": "pub struct Test;\n\nparameter_types! {\n\n pub const BlockHashCount: u64 = 250;\n\n pub const MaximumBlockWeight: Weight = 1024;\n\n pub const MaximumBlockLength: u32 = 2 * 1024;\n\n pub const AvailableBlockRatio: Perbill = Perbill::from_percent(75);\n\n}\n\nimpl system::Trait for Test {\n\n type Origin = Origin;\n\n type Call = ();\n\n type Index = u64;\n\n type BlockNumber = u64;\n\n type Hash = H256;\n\n type Hashing = BlakeTwo256;\n\n type AccountId = u64;\n\n type Lookup = IdentityLookup<Self::AccountId>;\n\n type Header = Header;\n\n type Event = ();\n\n type BlockHashCount = BlockHashCount;\n\n type MaximumBlockWeight = MaximumBlockWeight;\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 90, "score": 67975.89649454749 }, { "content": " pub const MinimumCounterAmount: u64 = 1000;\n\n pub const MinimumChallengeAmount: u64 = 10000;\n\n pub const FinalizeApplicationPeriod: u64 = 100;\n\n pub const FinalizeChallengePeriod: u64 = 101; // Happens later to ease unit tests\n\n pub const LoosersSlash: Perbill = Perbill::from_percent(50);\n\n}\n\nimpl pallet_tcr::Trait for Test {\n\n type Event = ();\n\n type Currency = pallet_balances::Module<Self>;\n\n type MinimumApplicationAmount = MinimumApplicationAmount;\n\n type MinimumCounterAmount = MinimumCounterAmount;\n\n type MinimumChallengeAmount = MinimumChallengeAmount;\n\n type FinalizeApplicationPeriod = FinalizeApplicationPeriod;\n\n type FinalizeChallengePeriod = FinalizeChallengePeriod;\n\n type LoosersSlash = LoosersSlash;\n\n type ChangeMembers = TestModule;\n\n}\n\nparameter_types! {\n\n pub const SlotBookingCost: u64 = 1000;\n\n pub const SlotRenewingCost: u64 = 10000;\n", "file_path": "pallets/root-of-trust/src/tests.rs", "rank": 91, "score": 67975.35661927666 }, { "content": "const errors = require('./errors');\n", "file_path": "nodejs/packages/pki/certificate.js", "rank": 92, "score": 64855.58068453936 }, { "content": "const errors = require('./errors');\n", "file_path": "nodejs/packages/pki/test.js", "rank": 93, "score": 62730.759453928804 }, { "content": "const errors = require('./errors');\n", "file_path": "nodejs/packages/firmware/test.js", "rank": 94, "score": 62730.759453928804 }, { "content": "\tasync renewSlot(slotAddress) {\n\n\t\treturn await this.api.tx.rootOfTrust\n\n\t\t\t.renewSlot(slotAddress)\n\n\t\t\t.signAndSend(this.signer)\n", "file_path": "nodejs/packages/pki/runtime.js", "rank": 95, "score": 62234.270842905884 }, { "content": "\tasync revokeChild(root, child) {\n\n\t\treturn await this.api.tx.rootOfTrust\n\n\t\t\t.revokeChild(root, child)\n\n\t\t\t.signAndSend(this.signer)\n", "file_path": "nodejs/packages/pki/runtime.js", "rank": 96, "score": 62107.84712022689 }, { "content": "\tasync revokeSlot(slotAddress) {\n\n\t\treturn await this.api.tx.rootOfTrust\n\n\t\t\t.revokeSlot(slotAddress)\n\n\t\t\t.signAndSend(this.signer)\n", "file_path": "nodejs/packages/pki/runtime.js", "rank": 97, "score": 62107.84712022689 }, { "content": "const createKeystore = () => {\n\n const randomKeystoreName = Math.random().toString(36).substring(2, 15) + Math.random().toString(36).substring(2, 15);\n\n const keystorePath = `/tmp/${randomKeystoreName}.json`;\n\n const keystore = new Keystore(keystorePath);\n\n\n\n return keystore;\n", "file_path": "nodejs/packages/client/test.js", "rank": 98, "score": 61361.60600183933 }, { "content": "fn main() {\n\n\tgenerate_cargo_keys(ConstantsFlags::SHA_SHORT).expect(ERROR_MSG);\n\n\n\n\tbuild_script_utils::rerun_if_git_head_changed();\n\n}\n", "file_path": "node/build.rs", "rank": 99, "score": 60164.88961094343 } ]
Rust
src/particle.rs
n3f4s/particule_generator
731ee8d834f5a458b3ee78b7849b60ccb81d659b
use vec3d::Vec3; use point3d::Point3; use drawable::Drawable; use sdl2::render::Canvas; use sdl2::video::Window; use sdl2::surface::Surface; use sdl2::gfx::primitives::DrawRenderer; static PARTICLE_DENSITY : f64 = 1.0; #[derive(Debug, Default, PartialEq, Copy, Clone)] pub struct Particle { position: Point3<i16>, direction: Vec3<i16>, alive: bool, lifetime: u64, max_lifetime: u64, radius: i16, mass: f64 } impl Particle { pub fn new(p: Point3<i16>, d: Vec3<i16>) -> Particle { Particle { position: p, direction: d, alive: true, lifetime: 250, max_lifetime: 250, radius: 5, mass: 5.0 * PARTICLE_DENSITY } } pub fn set_position(&mut self, pos: Point3<i16>) { self.position = pos; } pub fn is_alive(&self) -> bool { self.alive } pub fn get_position(&self) -> Point3<i16> { self.position } pub fn get_direction(&self) -> Vec3<i16> { self.direction } pub fn get_lifetime(&self) -> u64 { self.lifetime } pub fn get_radius(&self) -> i16 { self.radius } pub fn get_mass(&self) -> f64 { self.mass } pub fn copy(&self) -> Particle { Particle { position: self.position, direction: self.direction, alive: self.alive, lifetime: self.lifetime, max_lifetime: self.max_lifetime, radius: self.radius, mass: self.mass } } pub fn update(&mut self) { if self.alive { self.position.apply_vec(self.direction/10.0); self.lifetime -= 1; if self.lifetime == 0 { self.alive = false; } } } pub fn apply_force(&mut self, f: Vec3<i16>) { self.direction += f } pub fn kill(&mut self) { self.alive = false; } fn compute_alpha(&self) -> u8 { ((self.lifetime as f64) / (self.max_lifetime as f64) * (self.lifetime as f64) / (self.max_lifetime as f64) * 255.0) as u8 } fn compute_green(&self) -> u8 { ( ( 1.0 - (((self.lifetime as f64) / (self.max_lifetime as f64) ) * ( (self.lifetime as f64) / (self.max_lifetime as f64))) ) * 255.0 ) as u8 } fn change_radius(&mut self, rad: i16) { self.radius = rad; self.mass = (rad as f64) * PARTICLE_DENSITY; } } impl Drawable for Particle { fn draw_surface(&self, c: &mut Canvas<Surface>) { c.filled_circle(self.position.x as i16, self.position.y as i16, self.radius, (255, self.compute_green(), 0, self.compute_alpha())).unwrap(); } fn draw_window(&self, c: &mut Canvas<Window>) { c.filled_circle(self.position.x as i16, self.position.y as i16, self.radius, (255, self.compute_green(), 0, self.compute_alpha())).unwrap(); } } pub struct ParticleBuilder { template: Particle } impl<'a> ParticleBuilder { pub fn new(start_pos: Point3<i16>, start_dir: Vec3<i16>) -> ParticleBuilder { ParticleBuilder { template: Particle::new(start_pos, start_dir) } } pub fn with_radius(&'a mut self, radius: i16) -> &'a mut ParticleBuilder { self.template.change_radius(radius); self } pub fn with_lifetime(&'a mut self, lifetime: u64) -> &'a mut ParticleBuilder { self.template.max_lifetime = lifetime; self.template.lifetime = lifetime; self } pub fn create(&self) -> Particle { self.template.clone() } }
use vec3d::Vec3; use point3d::Point3; use drawable::Drawable; use sdl2::render::Canvas; use sdl2::video::Window; use sdl2::surface::Surface; use sdl2::gfx::primitives::DrawRenderer; static PARTICLE_DENSITY : f64 = 1.0; #[derive(Debug, Default, PartialEq, Copy, Clone)] pub struct Particle { position: Point3<i16>, direction: Vec3<i16>, alive: bool, lifetime: u64, max_lifetime: u64, radius: i16, mass: f64 } impl Particle { pub fn new(p: Point3<i16>, d: Vec3<i16>) -> Particle { Particle { position: p, direction: d, alive: true, li
pub fn set_position(&mut self, pos: Point3<i16>) { self.position = pos; } pub fn is_alive(&self) -> bool { self.alive } pub fn get_position(&self) -> Point3<i16> { self.position } pub fn get_direction(&self) -> Vec3<i16> { self.direction } pub fn get_lifetime(&self) -> u64 { self.lifetime } pub fn get_radius(&self) -> i16 { self.radius } pub fn get_mass(&self) -> f64 { self.mass } pub fn copy(&self) -> Particle { Particle { position: self.position, direction: self.direction, alive: self.alive, lifetime: self.lifetime, max_lifetime: self.max_lifetime, radius: self.radius, mass: self.mass } } pub fn update(&mut self) { if self.alive { self.position.apply_vec(self.direction/10.0); self.lifetime -= 1; if self.lifetime == 0 { self.alive = false; } } } pub fn apply_force(&mut self, f: Vec3<i16>) { self.direction += f } pub fn kill(&mut self) { self.alive = false; } fn compute_alpha(&self) -> u8 { ((self.lifetime as f64) / (self.max_lifetime as f64) * (self.lifetime as f64) / (self.max_lifetime as f64) * 255.0) as u8 } fn compute_green(&self) -> u8 { ( ( 1.0 - (((self.lifetime as f64) / (self.max_lifetime as f64) ) * ( (self.lifetime as f64) / (self.max_lifetime as f64))) ) * 255.0 ) as u8 } fn change_radius(&mut self, rad: i16) { self.radius = rad; self.mass = (rad as f64) * PARTICLE_DENSITY; } } impl Drawable for Particle { fn draw_surface(&self, c: &mut Canvas<Surface>) { c.filled_circle(self.position.x as i16, self.position.y as i16, self.radius, (255, self.compute_green(), 0, self.compute_alpha())).unwrap(); } fn draw_window(&self, c: &mut Canvas<Window>) { c.filled_circle(self.position.x as i16, self.position.y as i16, self.radius, (255, self.compute_green(), 0, self.compute_alpha())).unwrap(); } } pub struct ParticleBuilder { template: Particle } impl<'a> ParticleBuilder { pub fn new(start_pos: Point3<i16>, start_dir: Vec3<i16>) -> ParticleBuilder { ParticleBuilder { template: Particle::new(start_pos, start_dir) } } pub fn with_radius(&'a mut self, radius: i16) -> &'a mut ParticleBuilder { self.template.change_radius(radius); self } pub fn with_lifetime(&'a mut self, lifetime: u64) -> &'a mut ParticleBuilder { self.template.max_lifetime = lifetime; self.template.lifetime = lifetime; self } pub fn create(&self) -> Particle { self.template.clone() } }
fetime: 250, max_lifetime: 250, radius: 5, mass: 5.0 * PARTICLE_DENSITY } }
function_block-function_prefixed
[ { "content": "pub fn dot(v1: &Vec3, v2: &Vec3) -> f64 {\n\n v1.x * v2.x + v1.y * v2.y + v1.z * v2.z\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 0, "score": 83669.11190807322 }, { "content": "pub fn unit_vector(v: Vec3) -> Vec3 {\n\n let mut tmp = v;\n\n tmp.make_unit();\n\n tmp\n\n}\n\n\n\nimpl Index<i32> for Vec3 {\n\n type Output = f64;\n\n fn index(&self, index: i32) -> &f64 {\n\n match index {\n\n 0 => &self.x,\n\n 1 => &self.y,\n\n 2 => &self.z,\n\n _ => panic!(\"Tried to access out of bound vector\")\n\n }\n\n }\n\n}\n\n\n\nimpl Div for Vec3 {\n\n type Output = Vec3;\n", "file_path": "src/vec3.rs", "rank": 1, "score": 59718.949180699594 }, { "content": "pub fn cross(v1: &Vec3, v2: &Vec3) -> Vec3 {\n\n Vec3::new(\n\n v1.y * v2.z - v1.z * v2.y,\n\n -(v1.x * v2.z - v1.z * v2.x),\n\n v1.x * v2.y - v1.y * v2.x\n\n )\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 2, "score": 53162.25827323585 }, { "content": "pub fn unit_vector<T: Float>(v: Vec3<T>) -> Vec3<T> {\n\n let mut tmp = v;\n\n tmp.make_unit();\n\n tmp\n\n}\n\n\n\nimpl<T: Num> Index<i32> for Vec3<T> {\n\n type Output = T;\n\n fn index(&self, index: i32) -> &T {\n\n match index {\n\n 0 => &self.x,\n\n 1 => &self.y,\n\n 2 => &self.z,\n\n _ => panic!(\"Tried to access out of bound vector\")\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Num> Div for Vec3<T> {\n\n type Output = Vec3<T>;\n", "file_path": "src/vec3d.rs", "rank": 3, "score": 47975.00988892562 }, { "content": "struct SqrtCache {\n\n cache: Vec<f64>\n\n}\n\n\n\nimpl SqrtCache {\n\n fn new(max: usize) -> SqrtCache {\n\n let mut c = SqrtCache {\n\n cache: Vec::<f64>::with_capacity(max)\n\n };\n\n for (i, v) in c.cache.iter_mut().enumerate() {\n\n let j = i as f64;\n\n *v = j.sqrt();\n\n }\n\n c\n\n }\n\n\n\n fn compute_sqrt(&self, v: f64) -> f64 {\n\n if v.ceil() >= (self.cache.len() as f64) {\n\n v.sqrt()\n\n } else {\n", "file_path": "src/physic_property.rs", "rank": 4, "score": 43918.75381018781 }, { "content": "pub fn dot<T: Num>(v1: &Vec3<T>, v2: &Vec3<T>) -> T {\n\n v1.x * v2.x + v1.y * v2.y + v1.z * v2.z\n\n}\n\n\n", "file_path": "src/vec3d.rs", "rank": 5, "score": 43751.80722445828 }, { "content": "pub fn cross<T: Num>(v1: &Vec3<T>, v2: &Vec3<T>) -> Vec3<T> {\n\n Vec3::new(\n\n v1.y * v2.z - v1.z * v2.y,\n\n -(v1.x * v2.z - v1.z * v2.x),\n\n v1.x * v2.y - v1.y * v2.x\n\n )\n\n}\n\n\n", "file_path": "src/vec3d.rs", "rank": 6, "score": 41918.187401140225 }, { "content": "pub trait Drawable {\n\n //fn draw<T: RenderTarget>(&self, &mut canvas: Canvas<T>);\n\n fn draw_window(&self, canvas: &mut Canvas<Window>);\n\n fn draw_surface(&self, canvas: &mut Canvas<Surface>);\n\n}\n", "file_path": "src/drawable.rs", "rank": 7, "score": 41606.720254429616 }, { "content": "struct MyType<T>(T);\n\nimpl<T: Num> Mul<Vec3<T>> for MyType<T> {\n\n type Output = Vec3<T>;\n\n fn mul(self, rhs: Vec3<T>) -> Vec3<T> {\n\n Vec3::new(\n\n rhs.x * self,\n\n rhs.y * self,\n\n rhs.z * self,\n\n )\n\n }\n\n}\n\n\n\nimpl<T: Num> Sub for Vec3<T> {\n\n type Output = Vec3<T>;\n\n fn sub(self, rhs: Vec3<T>) -> Vec3<T> {\n\n Vec3::new(\n\n self.x - rhs.x,\n\n self.y - rhs.y,\n\n self.z - rhs.z,\n\n )\n", "file_path": "src/vec3d.rs", "rank": 8, "score": 39367.072509856254 }, { "content": "fn main() {\n\n let args : Vec<String> = env::args().collect();\n\n let width: u32 = args[1].parse().unwrap();\n\n let height: u32 = args[2].parse().unwrap();\n\n let parts_by_frame: u32 = args[3].parse().unwrap();\n\n let bound = Rectangle{\n\n up_left_corner: Point3::new(0.0,0.0,0.0),\n\n height: height as f64,\n\n width: width as f64,\n\n depth: 0.0\n\n };\n\n let mut world = World::new(vec![Box::new(Gravity{}),\n\n Box::new(Wind{}),\n\n Box::new(AirResistance::new()),\n\n Box::new(GravityWell::new(Point3{\n\n x: bound.center().x - 200.0,\n\n y: bound.center().y - 300.0,\n\n z: bound.center().z}, 7.0, 10.0)),\n\n Box::new(GravityWell::new(Point3{\n\n x: bound.center().x + 100.0,\n", "file_path": "src/main.rs", "rank": 9, "score": 34922.73605263276 }, { "content": "pub trait PhysicProperty : Send + Sync {\n\n //type DrawableEntity: Drawable;\n\n fn update_particle(&self, p: &Particle) -> Particle;\n\n //fn as_drawable(&self) -> &Self::DrawableEntity;\n\n fn as_drawable(&self) -> Option<&Drawable>;\n\n}\n\n// struct Void {}\n\n// impl Drawable for Void {\n\n// fn draw<T: RenderTarget>(&self, canvas: Canvas<T>) {}\n\n// }\n\n\n\npub struct Gravity {}\n\nimpl PhysicProperty for Gravity {\n\n //type DrawableEntity = Void;\n\n fn update_particle(&self, p: &Particle) -> Particle {\n\n let mut tmp = p.clone();\n\n tmp.apply_force(Vec3::new(0.0, 1.0 * (p.get_mass() as f64), 0.0));\n\n tmp\n\n }\n\n fn as_drawable(&self) -> Option<&Drawable> {\n", "file_path": "src/physic_property.rs", "rank": 10, "score": 33832.26317424466 }, { "content": "#[test]\n\nfn vec3_length() {\n\n let a = Vec3::new(1.0, 1.0, 1.0);\n\n assert!(a.length() == 3f64.sqrt());\n\n}\n\n\n", "file_path": "src/vec3d.rs", "rank": 11, "score": 33653.16398178722 }, { "content": "#[test]\n\nfn vec3_sub() {\n\n let a = Vec3::new(1.0, 2.0, 3.0);\n\n assert!(a - a == Default::default());\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 12, "score": 33653.16398178722 }, { "content": "#[test]\n\nfn vec3_neg() {\n\n let a = Vec3::new(1.0, 2.0, 3.0);\n\n let a = -a;\n\n let c = Vec3::new(-1.0, -2.0, -3.0);\n\n assert!(a == c);\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 13, "score": 33653.16398178722 }, { "content": "#[test]\n\nfn vec3_dot() {\n\n let a = Vec3::new(1.0, 2.0, 3.0);\n\n let b = Vec3::new(2.0, 3.0, 4.0);\n\n assert!(dot(&a, &b) == 20.0);\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 14, "score": 33653.16398178722 }, { "content": "#[test]\n\nfn vec3_add() {\n\n let a = Vec3::new(1.0, 2.0, 3.0);\n\n assert!(a + a == Vec3::new(2.0, 4.0, 6.0));\n\n}\n\n\n", "file_path": "src/vec3d.rs", "rank": 15, "score": 33653.16398178722 }, { "content": "#[test]\n\nfn vec3_length() {\n\n let a = Vec3::new(1.0, 1.0, 1.0);\n\n assert!(a.length() == 3f64.sqrt());\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 16, "score": 33653.16398178722 }, { "content": "#[test]\n\nfn vec3_add() {\n\n let a = Vec3::new(1.0, 2.0, 3.0);\n\n assert!(a + a == Vec3::new(2.0, 4.0, 6.0));\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 17, "score": 33653.16398178722 }, { "content": "#[test]\n\nfn vec3_access() {\n\n let a = Vec3::new(1.0, 2.0, 3.0);\n\n assert!(a[0] == 1.0);\n\n assert!(a[1] == 2.0);\n\n assert!(a[2] == 3.0);\n\n let result = ::std::panic::catch_unwind(|| a[3]);\n\n assert!(result.is_err());\n\n}\n\n\n\n*/\n", "file_path": "src/vec3d.rs", "rank": 18, "score": 33653.16398178722 }, { "content": "#[test]\n\nfn vec3_mul() {\n\n let a = Vec3::new(1.0, 2.0, 3.0);\n\n assert!(a * a == Vec3::new(1.0, 4.0, 9.0));\n\n assert!(a * 2.0 == Vec3::new(2.0, 4.0, 6.0));\n\n assert!(2.0 * a == Vec3::new(2.0, 4.0, 6.0));\n\n}\n\n\n", "file_path": "src/vec3d.rs", "rank": 19, "score": 33653.16398178722 }, { "content": "#[test]\n\nfn vec3_div() {\n\n let a = Vec3::new(1.0, 2.0, 3.0);\n\n assert!(a / a == Vec3::new(1.0, 1.0, 1.0));\n\n assert!(a / 2.0 == Vec3::new(0.5, 1.0, 1.5));\n\n}\n\n\n", "file_path": "src/vec3d.rs", "rank": 20, "score": 33653.16398178722 }, { "content": "#[test]\n\nfn vec3_access() {\n\n let a = Vec3::new(1.0, 2.0, 3.0);\n\n assert!(a[0] == 1.0);\n\n assert!(a[1] == 2.0);\n\n assert!(a[2] == 3.0);\n\n let result = ::std::panic::catch_unwind(|| a[3]);\n\n assert!(result.is_err());\n\n}\n\n*/\n", "file_path": "src/vec3.rs", "rank": 21, "score": 33653.16398178722 }, { "content": "#[test]\n\nfn vec3_cross() {\n\n let a = Vec3::new(1.0, 2.0, 3.0);\n\n let b = Vec3::new(2.0, 3.0, 4.0);\n\n assert!(cross(&a, &b) == Vec3::new(-1.0, 2.0, -1.0));\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 22, "score": 33653.16398178722 }, { "content": "#[test]\n\nfn vec3_neg() {\n\n let a = Vec3::new(1.0, 2.0, 3.0);\n\n let a = -a;\n\n let c = Vec3::new(-1.0, -2.0, -3.0);\n\n assert!(a == c);\n\n}\n\n\n", "file_path": "src/vec3d.rs", "rank": 23, "score": 33653.16398178722 }, { "content": "#[test]\n\nfn vec3_cross() {\n\n let a = Vec3::new(1.0, 2.0, 3.0);\n\n let b = Vec3::new(2.0, 3.0, 4.0);\n\n assert!(cross(&a, &b) == Vec3::new(-1.0, 2.0, -1.0));\n\n}\n\n\n", "file_path": "src/vec3d.rs", "rank": 24, "score": 33653.16398178722 }, { "content": "#[test]\n\nfn vec3_dot() {\n\n let a = Vec3::new(1.0, 2.0, 3.0);\n\n let b = Vec3::new(2.0, 3.0, 4.0);\n\n assert!(dot(&a, &b) == 20.0);\n\n}\n\n\n", "file_path": "src/vec3d.rs", "rank": 25, "score": 33653.16398178722 }, { "content": "#[test]\n\nfn vec3_div() {\n\n let a = Vec3::new(1.0, 2.0, 3.0);\n\n assert!(a / a == Vec3::new(1.0, 1.0, 1.0));\n\n assert!(a / 2.0 == Vec3::new(0.5, 1.0, 1.5));\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 26, "score": 33653.16398178722 }, { "content": "#[test]\n\nfn vec3_sub() {\n\n let a = Vec3::new(1.0, 2.0, 3.0);\n\n assert!(a - a == Default::default());\n\n}\n\n\n", "file_path": "src/vec3d.rs", "rank": 27, "score": 33653.16398178722 }, { "content": "#[test]\n\nfn vec3_mul() {\n\n let a = Vec3::new(1.0, 2.0, 3.0);\n\n assert!(a * a == Vec3::new(1.0, 4.0, 9.0));\n\n assert!(a * 2.0 == Vec3::new(2.0, 4.0, 6.0));\n\n assert!(2.0 * a == Vec3::new(2.0, 4.0, 6.0));\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 28, "score": 33653.16398178722 }, { "content": "#[test]\n\nfn vec3_mul_assign() {\n\n let mut a = Vec3::new(1.0, 2.0, 3.0);\n\n a *= Vec3::new(2.0, 3.0, 4.0);\n\n let b = Vec3::new(2.0, 6.0, 12.0);\n\n assert!(a == b);\n\n a *= 2.0;\n\n assert!(a == Vec3::new(4.0, 12.0, 24.0));\n\n}\n\n\n", "file_path": "src/vec3d.rs", "rank": 29, "score": 32519.100639746972 }, { "content": "#[test]\n\nfn vec3_add_assign() {\n\n let mut a = Vec3::new(1.0, 2.0, 3.0);\n\n a += Vec3::new(2.0, 3.0, 4.0);\n\n let b = Vec3::new(3.0, 5.0, 7.0);\n\n assert!(a == b);\n\n}\n\n\n", "file_path": "src/vec3d.rs", "rank": 30, "score": 32519.100639746972 }, { "content": "#[test]\n\nfn vec3_make_unit() {\n\n let mut a = Vec3::new(1.0, 0.0, 0.0);\n\n a.make_unit();\n\n assert!(a.length() == 1.0);\n\n let mut a = Vec3::new(0.0, 1.0, 0.0);\n\n a.make_unit();\n\n assert!(a.length() == 1.0);\n\n let mut a = Vec3::new(0.0, 0.0, 1.0);\n\n a.make_unit();\n\n assert!(a.length() == 1.0);\n\n}\n\n\n", "file_path": "src/vec3d.rs", "rank": 31, "score": 32519.100639746972 }, { "content": "#[test]\n\nfn vec3_mul_assign() {\n\n let mut a = Vec3::new(1.0, 2.0, 3.0);\n\n a *= Vec3::new(2.0, 3.0, 4.0);\n\n let b = Vec3::new(2.0, 6.0, 12.0);\n\n assert!(a == b);\n\n a *= 2.0;\n\n assert!(a == Vec3::new(4.0, 12.0, 24.0));\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 32, "score": 32519.100639746972 }, { "content": "#[test]\n\nfn vec3_div_assign() {\n\n let mut a = Vec3::new(1.0, 2.0, 3.0);\n\n a /= Vec3::new(2.0, 3.0, 4.0);\n\n let b = Vec3::new(0.5, 2.0/3.0, 3.0/4.0);\n\n assert!(a == b);\n\n a /= 2.0;\n\n assert!(a == Vec3::new(0.25, 1.0/3.0, 3.0/8.0));\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 33, "score": 32519.100639746972 }, { "content": "#[test]\n\nfn vec3_sub_assign() {\n\n let mut a = Vec3::new(1.0, 2.0, 3.0);\n\n a -= Vec3::new(2.0, 3.0, 4.0);\n\n let b = Vec3::new(-1.0, -1.0, -1.0);\n\n assert!(a == b);\n\n}\n\n\n", "file_path": "src/vec3d.rs", "rank": 34, "score": 32519.100639746972 }, { "content": "#[test]\n\nfn vec3_sub_assign() {\n\n let mut a = Vec3::new(1.0, 2.0, 3.0);\n\n a -= Vec3::new(2.0, 3.0, 4.0);\n\n let b = Vec3::new(-1.0, -1.0, -1.0);\n\n assert!(a == b);\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 35, "score": 32519.100639746972 }, { "content": "#[test]\n\nfn vec3_add_assign() {\n\n let mut a = Vec3::new(1.0, 2.0, 3.0);\n\n a += Vec3::new(2.0, 3.0, 4.0);\n\n let b = Vec3::new(3.0, 5.0, 7.0);\n\n assert!(a == b);\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 36, "score": 32519.100639746972 }, { "content": "#[test]\n\nfn vec3_div_assign() {\n\n let mut a = Vec3::new(1.0, 2.0, 3.0);\n\n a /= Vec3::new(2.0, 3.0, 4.0);\n\n let b = Vec3::new(0.5, 2.0/3.0, 3.0/4.0);\n\n assert!(a == b);\n\n a /= 2.0;\n\n assert!(a == Vec3::new(0.25, 1.0/3.0, 3.0/8.0));\n\n}\n\n\n", "file_path": "src/vec3d.rs", "rank": 37, "score": 32519.100639746972 }, { "content": "#[test]\n\nfn vec3_unit_vector() {\n\n let a = Vec3::new(1.0, 1.0, 1.0);\n\n let a = unit_vector(a);\n\n assert!(a.length() == 1.0);\n\n let mut a = Vec3::new(1.0, 1.0, 1.0);\n\n a.make_unit();\n\n assert!(a.length() == 1.0);\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 38, "score": 32519.100639746972 }, { "content": "#[test]\n\nfn vec3_squared_length() {\n\n let a = Vec3::new(1.0, 1.0, 1.0);\n\n assert!(a.squared_length() == 3.0);\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 39, "score": 32519.100639746972 }, { "content": "#[test]\n\nfn vec3_unit_vector() {\n\n let a = Vec3::new(1.0, 1.0, 1.0);\n\n let a = unit_vector(a);\n\n assert!(a.length() == 1.0);\n\n let mut a = Vec3::new(1.0, 1.0, 1.0);\n\n a.make_unit();\n\n assert!(a.length() == 1.0);\n\n}\n\n\n", "file_path": "src/vec3d.rs", "rank": 40, "score": 32519.100639746972 }, { "content": "#[test]\n\nfn vec3_make_unit() {\n\n let mut a = Vec3::new(1.0, 0.0, 0.0);\n\n a.make_unit();\n\n assert!(a.length() == 1.0);\n\n let mut a = Vec3::new(0.0, 1.0, 0.0);\n\n a.make_unit();\n\n assert!(a.length() == 1.0);\n\n let mut a = Vec3::new(0.0, 0.0, 1.0);\n\n a.make_unit();\n\n assert!(a.length() == 1.0);\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 41, "score": 32519.100639746972 }, { "content": "#[test]\n\nfn vec3_squared_length() {\n\n let a = Vec3::new(1.0, 1.0, 1.0);\n\n assert!(a.squared_length() == 3.0);\n\n}\n\n\n", "file_path": "src/vec3d.rs", "rank": 42, "score": 32519.100639746972 }, { "content": "\n\nuse vec3::Vec3;\n\n\n\n#[derive(Debug, Default, PartialEq, Copy, Clone)]\n\npub struct Point3 {\n\n pub x: f64,\n\n pub y: f64,\n\n pub z: f64\n\n}\n\n\n\nimpl Point3 {\n\n pub fn new(x: f64, y: f64, z: f64) -> Point3 {\n\n Point3 {\n\n x: x,\n\n y: y,\n\n z: z\n\n }\n\n }\n\n pub fn apply_vec(&mut self, v: Vec3) {\n\n self.x += v.x;\n\n self.y += v.y;\n\n self.z += v.z;\n\n }\n\n}\n", "file_path": "src/point3.rs", "rank": 50, "score": 13.989834857449484 }, { "content": "\n\nuse point3::Point3;\n\nuse drawable::Drawable;\n\n\n\nuse sdl2::rect::Rect;\n\nuse sdl2::render::Canvas;\n\nuse sdl2::video::Window;\n\nuse sdl2::surface::Surface;\n\nuse sdl2::gfx::primitives::DrawRenderer;\n\n\n\n#[derive(Debug, Default, PartialEq, Copy, Clone)]\n\npub struct Rectangle {\n\n pub up_left_corner: Point3,\n\n pub height: f64,\n\n pub width: f64,\n\n pub depth: f64\n\n}\n\n\n\nimpl Rectangle {\n\n pub fn is_in_bound(&self, p: &Point3) -> bool {\n", "file_path": "src/rectangle.rs", "rank": 51, "score": 13.306320364621499 }, { "content": "\n\nuse vec3d;\n\npub use vec3d::unit_vector;\n\n\n\npub type Vec3 = vec3d::Vec3<f64>;\n\n\n\n/*\n\nuse std::ops::{Neg, AddAssign, SubAssign, MulAssign, DivAssign, Add, Sub, Mul, Div, Index};\n\n\n\n#[derive(Debug, Default, PartialEq, Copy, Clone)]\n\npub struct Vec3 {\n\n pub x: f64,\n\n pub y: f64,\n\n pub z: f64\n\n}\n\n\n\nimpl Vec3 {\n\n pub fn length(&self) -> f64 {\n\n (self.x * self.x + self.y * self.y + self.z * self.z).sqrt()\n\n }\n", "file_path": "src/vec3.rs", "rank": 52, "score": 12.638534252827547 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug, Default, PartialEq, Copy, Clone)]\n\npub struct GravityWell {\n\n pub position: Point3,\n\n pub strength: f64,\n\n pub area_of_effect: f64,\n\n}\n\nimpl GravityWell {\n\n pub fn new(p: Point3, s: f64, aoe: f64) -> GravityWell {\n\n GravityWell {\n\n position: p,\n\n strength: s,\n\n area_of_effect: aoe\n\n }\n\n }\n\n}\n\nimpl Drawable for GravityWell {\n\n //fn draw<T: RenderTarget>(&self, c: &mut Canvas<T>) {\n", "file_path": "src/physic_property.rs", "rank": 53, "score": 12.3961329364516 }, { "content": "use particle::Particle;\n\nuse physic_property::PhysicProperty;\n\nuse point3::Point3;\n\nuse rectangle::Rectangle;\n\nuse vec3::Vec3;\n\n\n\nuse rayon::prelude::*;\n\nuse rayon::iter::IntoParallelRefMutIterator;\n\n\n\npub struct World {\n\n pub particles: Vec<Particle>,\n\n pub properties: Vec<Box<PhysicProperty>>,\n\n pub boundaries: Rectangle,\n\n\n\n particle_creation_point: Point3,\n\n create_particle_fun: Box<Fn(Point3) -> Particle + Sync + Send>,\n\n\n\n cpt: i16,\n\n iter: usize\n\n\n", "file_path": "src/world.rs", "rank": 54, "score": 10.33679151648744 }, { "content": " pub position: Point3,\n\n pub strength: f64,\n\n pub area_of_effect: f64,\n\n pub layers: u64,\n\n}\n\nimpl BigGravityWell {\n\n pub fn new(p: Point3, s: f64, aoe: f64, l: u64) -> BigGravityWell {\n\n BigGravityWell {\n\n position: p,\n\n strength: s,\n\n area_of_effect: aoe,\n\n layers: l\n\n }\n\n }\n\n}\n\nimpl Drawable for BigGravityWell {\n\n //fn draw<T: RenderTarget>(&self, c: &mut Canvas<T>) {\n\n fn draw_window(&self, c: &mut Canvas<Window>) {\n\n let mut alpha = 55;\n\n let mut aoe = self.area_of_effect;\n", "file_path": "src/physic_property.rs", "rank": 55, "score": 9.902613865952034 }, { "content": "\n\nuse std::ops::{Neg, AddAssign, SubAssign, MulAssign, DivAssign, Add, Sub, Mul, Div};\n\nuse std::marker::Copy;\n\nuse std::clone::Clone;\n\nuse std::fmt::Debug;\n\nuse std::default::Default;\n\n\n\nuse vec3d::Vec3;\n\nuse num;\n\n\n", "file_path": "src/point3d.rs", "rank": 56, "score": 8.268747937963452 }, { "content": " if dir.y >= 0.0 {\n\n p.apply_force(Vec3::new(dir.x,\n\n -1.0 * dir.y * 2.0,\n\n dir.z));\n\n }\n\n p.set_position(Point3::new(x, corner.y + bound.height - rad, z));\n\n }\n\n });\n\n self.cpt += 1;\n\n if self.cpt > 100 {\n\n self.particles.retain(|&x| x.is_alive());\n\n self.cpt = 0;\n\n }\n\n // self.iter += 1;\n\n // if self.iter >= self.properties.len() {\n\n // self.iter = 0;\n\n // }\n\n }\n\n\n\n pub fn create_particle(&mut self) {\n\n self.particles.push((self.create_particle_fun)(self.particle_creation_point).clone());\n\n }\n\n}\n", "file_path": "src/world.rs", "rank": 57, "score": 8.192390050729262 }, { "content": "\n\nuse std::ops::{Neg, AddAssign, SubAssign, MulAssign, DivAssign, Add, Sub, Mul, Div, Index};\n\nuse std::marker::Copy;\n\nuse std::clone::Clone;\n\nuse std::fmt::Debug;\n\nuse std::default::Default;\n\n\n\nuse num;\n\nuse num::Float;\n\n\n", "file_path": "src/vec3d.rs", "rank": 58, "score": 8.182198574088606 }, { "content": " ).unwrap();\n\n alpha -= 255/(self.layers+1);\n\n aoe = (aoe * 1.5) + self.area_of_effect;\n\n }\n\n }\n\n}\n\nimpl PhysicProperty for BigGravityWell {\n\n //type DrawableEntity = BigGravityWell;\n\n fn update_particle(&self, p: &Particle) -> Particle {\n\n let dist = ((self.position.x - p.get_position().x) *\n\n (self.position.x - p.get_position().x)) +\n\n ((self.position.y - p.get_position().y) *\n\n (self.position.y - p.get_position().y));\n\n let mut aoe = self.area_of_effect;\n\n\n\n let vec = Vec3{ x: p.get_position().x - self.position.x,\n\n y: p.get_position().y - self.position.y,\n\n z: p.get_position().z - self.position.z};\n\n\n\n let mut tmp = p.clone();\n", "file_path": "src/physic_property.rs", "rank": 59, "score": 7.873234229693692 }, { "content": " unsafe {\n\n self.cache.get_unchecked(v.abs().ceil() as usize).clone()\n\n }\n\n }\n\n }\n\n}\n\n\n\npub struct AirResistance {\n\n cache: SqrtCache\n\n}\n\nimpl AirResistance {\n\n pub fn new() -> AirResistance {\n\n AirResistance {\n\n cache: SqrtCache::new(100)\n\n }\n\n }\n\n}\n\nimpl PhysicProperty for AirResistance {\n\n //type DrawableEntity = Void;\n\n fn update_particle(&self, p: &Particle) -> Particle {\n", "file_path": "src/physic_property.rs", "rank": 60, "score": 7.733026289654251 }, { "content": " let density = 1.0; // air density\n\n let drag = 0.020; // drag coeficient (magic number here)\n\n let area = PI * (p.get_radius() as f64);// area affected by the air resistance, compute using radius of sphere\n\n let mut next_point = p.copy();\n\n next_point.update();\n\n let speed = self.cache.compute_sqrt(((next_point.get_position().x - p.get_position().x) *\n\n (next_point.get_position().x - p.get_position().x)) +\n\n ((next_point.get_position().y - p.get_position().y) *\n\n (next_point.get_position().y - p.get_position().y)));\n\n let f = ((density * drag * area) / 2.0) * speed;\n\n let unit_v = unit_vector(p.get_direction());\n\n let mut tmp = p.copy(); //p.clone();\n\n tmp.apply_force(-1.0 * f * unit_v);\n\n tmp\n\n }\n\n // fn as_drawable(&self) -> &Self::DrawableEntity {\n\n // Void {}\n\n // }\n\n fn as_drawable(&self) -> Option<&Drawable> {\n\n None\n", "file_path": "src/physic_property.rs", "rank": 61, "score": 7.375734776156779 }, { "content": " 5,\n\n (0, 0, 255, 255)\n\n ).unwrap();\n\n }\n\n}\n\nimpl PhysicProperty for GravityWell {\n\n //type DrawableEntity = GravityWell;\n\n fn update_particle(&self, p: &Particle) -> Particle {\n\n let dist = ((self.position.x - p.get_position().x) *\n\n (self.position.x - p.get_position().x)) +\n\n ((self.position.y - p.get_position().y) *\n\n (self.position.y - p.get_position().y));\n\n let aoe = self.area_of_effect;\n\n let aoe2 = aoe * 2.0;\n\n let aoe3 = aoe * 3.0;\n\n\n\n let vec = Vec3{ x: p.get_position().x - self.position.x,\n\n y: p.get_position().y - self.position.y,\n\n z: p.get_position().z - self.position.z};\n\n\n", "file_path": "src/physic_property.rs", "rank": 62, "score": 7.119780457984337 }, { "content": " pub y: T,\n\n pub z: T\n\n}\n\n\n\n// impl<T: Num> Copy for Vec3<T> {}\n\n\n\nimpl<T: Float> Vec3<T> {\n\n pub fn length(&self) -> T {\n\n (self.x * self.x + self.y * self.y + self.z * self.z).sqrt()\n\n }\n\n\n\n pub fn make_unit(&mut self) -> Self {\n\n *self /= self.length();\n\n *self\n\n }\n\n}\n\n\n\nimpl<T: Num> Vec3<T> {\n\n\n\n pub fn squared_length(&self) -> T {\n", "file_path": "src/vec3d.rs", "rank": 63, "score": 7.057564772346668 }, { "content": " None\n\n }\n\n}\n\n\n\npub struct Wind {}\n\nimpl PhysicProperty for Wind {\n\n // type DrawableEntity = Void;\n\n fn update_particle(&self, p: &Particle) -> Particle {\n\n let mut tmp = p.clone();\n\n tmp.apply_force(Vec3::new(-0.25, 0.0, 0.0));\n\n tmp\n\n }\n\n // fn as_drawable(&self) -> &Self::DrawableEntity {\n\n // Void {}\n\n // }\n\n fn as_drawable(&self) -> Option<&Drawable> {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/physic_property.rs", "rank": 64, "score": 7.011426218185458 }, { "content": " fn draw_window(&self, c: &mut Canvas<Window>) {\n\n c.filled_circle(self.position.x as i16,\n\n self.position.y as i16,\n\n (self.area_of_effect * 3.0) as i16,\n\n (0, 0, 255, 100)\n\n ).unwrap();\n\n c.filled_circle(self.position.x as i16,\n\n self.position.y as i16,\n\n (self.area_of_effect * 2.0) as i16,\n\n (0, 0, 255, 150)\n\n ).unwrap();\n\n c.filled_circle(self.position.x as i16,\n\n self.position.y as i16,\n\n self.area_of_effect as i16,\n\n (0, 0, 255, 200)\n\n ).unwrap();\n\n c.filled_circle(self.position.x as i16,\n\n self.position.y as i16,\n\n 5,\n\n (0, 0, 255, 255)\n", "file_path": "src/physic_property.rs", "rank": 65, "score": 6.944830789392876 }, { "content": " ).unwrap();\n\n }\n\n fn draw_surface(&self, c: &mut Canvas<Surface>) {\n\n c.filled_circle(self.position.x as i16,\n\n self.position.y as i16,\n\n (self.area_of_effect * 3.0) as i16,\n\n (0, 0, 255, 100)\n\n ).unwrap();\n\n c.filled_circle(self.position.x as i16,\n\n self.position.y as i16,\n\n (self.area_of_effect * 2.0) as i16,\n\n (0, 0, 255, 150)\n\n ).unwrap();\n\n c.filled_circle(self.position.x as i16,\n\n self.position.y as i16,\n\n self.area_of_effect as i16,\n\n (0, 0, 255, 200)\n\n ).unwrap();\n\n c.filled_circle(self.position.x as i16,\n\n self.position.y as i16,\n", "file_path": "src/physic_property.rs", "rank": 66, "score": 6.915665158099346 }, { "content": "}\n\n\n\nimpl World {\n\n pub fn new(pr: Vec<Box<PhysicProperty>>, b: Rectangle, c: Point3, f: Box<Fn(Point3) -> Particle + Sync + Send>) -> World {\n\n World {\n\n particles: vec![],\n\n properties: pr,\n\n boundaries: b,\n\n particle_creation_point: c,\n\n create_particle_fun: f,\n\n cpt: 0,\n\n iter: 0\n\n }\n\n }\n\n pub fn update(&mut self) {\n\n // let mut cpt = 0;\n\n // par_iter_mut\n\n let prop = &self.properties;\n\n let bound = &self.boundaries;\n\n // let iter = self.iter;\n", "file_path": "src/world.rs", "rank": 67, "score": 6.837700593635878 }, { "content": " pub y: T,\n\n pub z: T\n\n}\n\n\n\n// impl<T: Num> Copy for Point3<T> {}\n\n\n\nimpl<T: Num> Point3<T> {\n\n pub fn new(x: T, y: T, z: T) -> Point3<T> {\n\n Point3 {\n\n x: x,\n\n y: y,\n\n z: z\n\n }\n\n }\n\n pub fn apply_vec(&mut self, v: Vec3<T>) {\n\n self.x += v.x;\n\n self.y += v.y;\n\n self.z += v.z;\n\n }\n\n}\n", "file_path": "src/point3d.rs", "rank": 68, "score": 6.763986046043549 }, { "content": " let mut tmp = p.clone();\n\n if dist < (aoe * aoe) {\n\n tmp.apply_force(vec * -self.strength * p.get_mass());\n\n } else if dist < (aoe2 * aoe2) {\n\n tmp.apply_force(vec * -(self.strength/2.0));\n\n } else if dist < (aoe3 * aoe3) {\n\n tmp.apply_force(vec * -(self.strength/3.0));\n\n }\n\n tmp\n\n }\n\n // fn as_drawable(&self) -> &Self::DrawableEntity {\n\n // self\n\n // }\n\n fn as_drawable(&self) -> Option<&Drawable> {\n\n Some(self)\n\n }\n\n}\n\n\n\n#[derive(Debug, Default, PartialEq, Copy, Clone)]\n\npub struct BigGravityWell {\n", "file_path": "src/physic_property.rs", "rank": 69, "score": 6.615405375248061 }, { "content": "use particle::Particle;\n\nuse vec3::Vec3;\n\nuse vec3::unit_vector;\n\nuse point3::Point3;\n\nuse drawable::Drawable;\n\nuse sdl2::render::Canvas;\n\nuse sdl2::video::Window;\n\nuse sdl2::surface::Surface;\n\nuse sdl2::gfx::primitives::DrawRenderer;\n\n//use sdl2::render::RenderTarget;\n\nuse std::f64::consts::PI;\n\nuse std::vec::Vec;\n\nuse std;\n\n\n", "file_path": "src/physic_property.rs", "rank": 70, "score": 6.555573404988234 }, { "content": "\n\n pub fn squared_length(&self) -> f64 {\n\n self.x * self.x + self.y * self.y + self.z * self.z\n\n }\n\n\n\n pub fn make_unit(&mut self) -> Self {\n\n *self /= self.length();\n\n *self\n\n }\n\n\n\n pub fn new(x: f64, y: f64, z: f64) -> Vec3 {\n\n Vec3{\n\n x: x,\n\n y: y,\n\n z: z\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 71, "score": 6.458040134268844 }, { "content": " for i in 0..self.layers {\n\n let green = std::cmp::min(255, i*12) as u8;\n\n c.filled_circle(self.position.x as i16,\n\n self.position.y as i16,\n\n (aoe) as i16,\n\n (0, green, 255, alpha)\n\n ).unwrap();\n\n alpha += 20;\n\n aoe = (aoe * 1.5) + self.area_of_effect;\n\n }\n\n }\n\n fn draw_surface(&self, c: &mut Canvas<Surface>) {\n\n let mut alpha = 255;\n\n let mut aoe = self.area_of_effect;\n\n for _ in 0..self.layers {\n\n // let green = std::cmp::min(255, i*12) as u8;\n\n c.filled_circle(self.position.x as i16,\n\n self.position.y as i16,\n\n (aoe) as i16,\n\n (0, 0, 255, alpha as u8)\n", "file_path": "src/physic_property.rs", "rank": 72, "score": 5.572144238939343 }, { "content": " // FIXME maybe useless to parallelise\n\n // FIXME or maybe do all physic computation (instead of just one per frame)\n\n self.particles.par_iter_mut().for_each(|p: &mut Particle| {\n\n // *p = prop[iter].update_particle(p);\n\n // for prop in &self.properties {\n\n for prop in prop.iter() {\n\n *p = prop.update_particle(p);\n\n }\n\n p.update();\n\n let pos = p.get_position();\n\n let dir = p.get_direction();\n\n let rad = p.get_radius() as f64;\n\n\n\n let x = pos.x;\n\n let y = pos.y;\n\n let z = pos.z;\n\n let corner = bound.up_left_corner;\n\n if (x - (rad)) < corner.x {\n\n if dir.x <= 0.0 {\n\n p.apply_force(Vec3::new(-1.0 * dir.x * 2.0,\n", "file_path": "src/world.rs", "rank": 73, "score": 5.543012513473858 }, { "content": " }\n\n}\n\n\n\nimpl DivAssign<f64> for Vec3 {\n\n fn div_assign(&mut self, t: f64) {\n\n self.x /= t;\n\n self.y /= t;\n\n self.z /= t;\n\n }\n\n}\n\n\n\nimpl MulAssign for Vec3 {\n\n fn mul_assign(&mut self, rhs: Vec3) {\n\n self.x *= rhs.x;\n\n self.y *= rhs.y;\n\n self.z *= rhs.z;\n\n }\n\n}\n\n\n\nimpl MulAssign<f64> for Vec3 {\n", "file_path": "src/vec3.rs", "rank": 74, "score": 5.51340025817144 }, { "content": "use world::World;\n\nuse drawable::Drawable;\n\n\n\nuse std::boxed::Box;\n\nuse std::env;\n\n\n\nuse rand::{thread_rng, Rng};\n\nuse sdl2::image::{INIT_PNG, INIT_JPG};\n\nuse sdl2::event::Event;\n\nuse sdl2::keyboard::Keycode;\n\nuse sdl2::gfx::primitives::DrawRenderer;\n\nuse sdl2::pixels::Color;\n\nuse sdl2::render::SurfaceCanvas;\n\nuse sdl2::surface::Surface;\n\nuse sdl2::pixels::PixelFormatEnum;\n\nuse sdl2::gfx::framerate::FPSManager;\n\nuse sdl2::rect::Rect;\n\n\n\n\n\n// TODO add more flexibility (more complex gravity well, particle mass, ...)\n\n// TODO add collision ???\n\n// FIXME better drawing of physic properties\n\n// TODO other physic property (accelerator tube, ...)\n\n\n", "file_path": "src/main.rs", "rank": 75, "score": 4.850529795823311 }, { "content": "extern crate rand;\n\nextern crate sdl2;\n\nextern crate rayon;\n\nextern crate num;\n\n\n\nmod vec3;\n\nmod vec3d;\n\nmod point3;\n\nmod point3d;\n\nmod particle;\n\nmod physic_property;\n\nmod world;\n\nmod rectangle;\n\nmod drawable;\n\n\n\nuse vec3::Vec3;\n\nuse point3::Point3;\n\nuse particle::ParticleBuilder;\n\nuse physic_property::{Gravity, GravityWell, AirResistance, Wind, BigGravityWell};\n\nuse rectangle::Rectangle;\n", "file_path": "src/main.rs", "rank": 76, "score": 4.799362577418924 }, { "content": "impl Mul<f64> for Vec3 {\n\n type Output = Vec3;\n\n fn mul(self, t: f64) -> Vec3 {\n\n Vec3::new(\n\n self.x * t,\n\n self.y * t,\n\n self.z * t,\n\n )\n\n }\n\n}\n\n\n\nimpl Mul for Vec3 {\n\n type Output = Vec3;\n\n fn mul(self, rhs: Vec3) -> Vec3 {\n\n Vec3::new(\n\n self.x * rhs.x,\n\n self.y * rhs.y,\n\n self.z * rhs.z,\n\n )\n\n }\n", "file_path": "src/vec3.rs", "rank": 77, "score": 4.767406491438376 }, { "content": " fn div(self, rhs: Vec3) -> Vec3 {\n\n Vec3::new(\n\n self.x / rhs.x,\n\n self.y / rhs.y,\n\n self.z / rhs.z,\n\n )\n\n }\n\n}\n\n\n\nimpl Div<f64> for Vec3 {\n\n type Output = Vec3;\n\n fn div(self, t: f64) -> Vec3 {\n\n Vec3::new(\n\n self.x / t,\n\n self.y / t,\n\n self.z / t,\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 78, "score": 4.41569320658199 }, { "content": " fn mul_assign(&mut self, t: f64) {\n\n self.x *= t;\n\n self.y *= t;\n\n self.z *= t;\n\n }\n\n}\n\n\n\nimpl AddAssign for Vec3 {\n\n fn add_assign(&mut self, rhs: Vec3) {\n\n self.x += rhs.x;\n\n self.y += rhs.y;\n\n self.z += rhs.z;\n\n }\n\n}\n\n\n\nimpl SubAssign for Vec3 {\n\n fn sub_assign(&mut self, rhs: Vec3) {\n\n self.x -= rhs.x;\n\n self.y -= rhs.y;\n\n self.z -= rhs.z;\n", "file_path": "src/vec3.rs", "rank": 79, "score": 3.84727382157458 }, { "content": "}\n\n\n\nimpl Mul<Vec3> for f64 {\n\n type Output = Vec3;\n\n fn mul(self, rhs: Vec3) -> Vec3 {\n\n Vec3::new(\n\n rhs.x * self,\n\n rhs.y * self,\n\n rhs.z * self,\n\n )\n\n }\n\n}\n\n\n\nimpl Sub for Vec3 {\n\n type Output = Vec3;\n\n fn sub(self, rhs: Vec3) -> Vec3 {\n\n Vec3::new(\n\n self.x - rhs.x,\n\n self.y - rhs.y,\n\n self.z - rhs.z,\n", "file_path": "src/vec3.rs", "rank": 80, "score": 3.7886928643780835 }, { "content": " None => {},\n\n Some(d) => d.draw_surface(&mut surface_canvas)\n\n }\n\n }\n\n for p in &world.particles {\n\n if p.is_alive() {\n\n p.draw_surface(&mut surface_canvas);\n\n }\n\n }\n\n world.boundaries.draw_surface(&mut surface_canvas);\n\n let surface = font.render(&fps_counter.get_framerate().to_string())\n\n .blended(Color::RGBA(0, 0, 255, 255)).unwrap();\n\n let surface2 = font.render(&world.particles\n\n .iter()\n\n .filter(|p| p.is_alive())\n\n .count()\n\n .to_string())\n\n .blended(Color::RGBA(0, 0, 255, 255)).unwrap();\n\n let surface3 = font.render(&world.particles.len().to_string())\n\n .blended(Color::RGBA(0, 0, 255, 255)).unwrap();\n", "file_path": "src/main.rs", "rank": 81, "score": 3.77781399607645 }, { "content": " for i in 1..(self.layers+1) {\n\n let j = i as f64;\n\n if dist < (aoe * aoe) {\n\n tmp.apply_force(vec * (-self.strength / j) * p.get_mass());\n\n break\n\n }\n\n aoe = (aoe * 1.5) + self.area_of_effect;\n\n }\n\n tmp\n\n }\n\n // fn as_drawable(&self) -> &Self::DrawableEntity {\n\n // self\n\n // }\n\n fn as_drawable(&self) -> Option<&Drawable> {\n\n Some(self)\n\n }\n\n}\n", "file_path": "src/physic_property.rs", "rank": 82, "score": 3.4387471207961493 }, { "content": "\n\nuse sdl2::render::Canvas;\n\nuse sdl2::video::Window;\n\nuse sdl2::surface::Surface;\n\n// use sdl2::render::RenderTarget;\n\n\n", "file_path": "src/drawable.rs", "rank": 83, "score": 3.4019993175949885 }, { "content": " }\n\n }\n\n _ => {}\n\n }\n\n }\n\n if cpt == 0 {\n\n world.update();\n\n } else {\n\n canvas.set_draw_color(Color::RGB(0, 0, 0));\n\n canvas.clear();\n\n surface_canvas.set_draw_color(Color::RGB(0, 0, 0));\n\n surface_canvas.clear();\n\n // Point where the particle are created\n\n surface_canvas.filled_circle((bound.center().x) as i16,\n\n bound.center().y as i16,\n\n 1,\n\n (255, 255, 255, 255)\n\n ).unwrap();\n\n for p in &world.properties {\n\n match p.as_drawable() {\n", "file_path": "src/main.rs", "rank": 84, "score": 3.379147227053881 }, { "content": " let texture = texture_creator.create_texture_from_surface(&surface).unwrap();\n\n let texture2 = texture_creator.create_texture_from_surface(&surface2).unwrap();\n\n let texture3 = texture_creator.create_texture_from_surface(&surface3).unwrap();\n\n let texture_creator = canvas.texture_creator();\n\n canvas.copy(&texture_creator.create_texture_from_surface(\n\n surface_canvas.into_surface()\n\n ).unwrap(),\n\n None,\n\n None).unwrap();\n\n canvas.copy(&texture, None, Some(Rect::new(0, 0, 50, 50))).unwrap();\n\n canvas.copy(&texture2, None, Some(Rect::new(0, 55, 50, 50))).unwrap();\n\n canvas.copy(&texture3, None, Some(Rect::new(0, 110, 50, 50))).unwrap();\n\n canvas.present();\n\n for _ in 0..parts_by_frame {\n\n world.create_particle();\n\n }\n\n }\n\n cpt = 1 - cpt;\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 85, "score": 3.067855331551745 }, { "content": " Vec3::new(\n\n rng.gen_range(-100.0, 100.0),\n\n rng.gen_range(-200.0, 0.0),\n\n 0.0))\n\n .with_radius(rng.gen_range(1, 10))\n\n .with_lifetime(rng.gen_range(50, 1000))\n\n .create()\n\n }));\n\n // SDL\n\n let sdl_context = sdl2::init().unwrap();\n\n let video_subsystem = sdl_context.video().unwrap();\n\n let _image_context = sdl2::image::init(INIT_PNG | INIT_JPG).unwrap();\n\n let window = video_subsystem.window(\"Particle generator\", width, height)\n\n .position_centered()\n\n .build()\n\n .unwrap();\n\n\n\n let mut canvas = window.into_canvas().present_vsync().build().unwrap();\n\n\n\n let fps_counter = FPSManager::new();\n", "file_path": "src/main.rs", "rank": 86, "score": 2.9080334156903183 }, { "content": " p.x >= self.up_left_corner.x &&\n\n p.y >= self.up_left_corner.y &&\n\n p.z >= self.up_left_corner.z &&\n\n p.x <= self.up_left_corner.x + self.width &&\n\n p.y <= self.up_left_corner.y + self.height &&\n\n p.z <= self.up_left_corner.z + self.depth\n\n }\n\n\n\n pub fn center(&self) -> Point3 {\n\n Point3 {\n\n x: self.up_left_corner.x + self.width/2.0,\n\n y: self.up_left_corner.y + self.height/2.0,\n\n z: self.up_left_corner.z + self.depth/2.0\n\n }\n\n }\n\n}\n\n\n\nimpl Drawable for Rectangle {\n\n fn draw_surface(&self, c: &mut Canvas<Surface>) {\n\n let r = Rect::new(self.up_left_corner.x as i32,\n", "file_path": "src/rectangle.rs", "rank": 87, "score": 2.518058418476428 }, { "content": " self.x * self.x + self.y * self.y + self.z * self.z\n\n }\n\n\n\n pub fn new(x: T, y: T, z: T) -> Vec3<T> {\n\n Vec3{\n\n x: x,\n\n y: y,\n\n z: z\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/vec3d.rs", "rank": 88, "score": 2.4904505044346754 }, { "content": " }\n\n}\n\n\n\nimpl Neg for Vec3 {\n\n type Output = Vec3;\n\n fn neg(self) -> Vec3 {\n\n Vec3::new(-self.x, -self.y, -self.z)\n\n }\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 89, "score": 2.3762183058704767 }, { "content": "}\n\n\n\nimpl<T: Num> DivAssign<T> for Vec3<T> {\n\n fn div_assign(&mut self, t: T) {\n\n self.x /= t;\n\n self.y /= t;\n\n self.z /= t;\n\n }\n\n}\n\n\n\nimpl<T: Num> MulAssign for Vec3<T> {\n\n fn mul_assign(&mut self, rhs: Vec3<T>) {\n\n self.x *= rhs.x;\n\n self.y *= rhs.y;\n\n self.z *= rhs.z;\n\n }\n\n}\n\n\n\nimpl<T: Num> MulAssign<T> for Vec3<T> {\n\n fn mul_assign(&mut self, t: T) {\n", "file_path": "src/vec3d.rs", "rank": 90, "score": 2.3577833354650815 }, { "content": " )\n\n }\n\n}\n\n\n\nimpl Add for Vec3 {\n\n type Output = Vec3;\n\n fn add(self, rhs: Vec3) -> Vec3 {\n\n Vec3::new(\n\n self.x + rhs.x,\n\n self.y + rhs.y,\n\n self.z + rhs.z\n\n )\n\n }\n\n}\n\n\n\nimpl DivAssign for Vec3 {\n\n fn div_assign(&mut self, rhs: Vec3) {\n\n self.x /= rhs.x;\n\n self.y /= rhs.y;\n\n self.z /= rhs.z;\n", "file_path": "src/vec3.rs", "rank": 91, "score": 2.3135916751307004 }, { "content": " dir.y,\n\n dir.z));\n\n }\n\n p.set_position(Point3::new(corner.x + rad, y, z));\n\n } else if (corner.x + bound.width) < (x + (rad)) && (dir.x > 0.0) {\n\n if dir.x >= 0.0 {\n\n p.apply_force(Vec3::new(-1.0 * dir.x * 2.0,\n\n dir.y,\n\n dir.z))\n\n }\n\n p.set_position(Point3::new(corner.x + bound.width - rad, y, z));\n\n }\n\n if (y - (rad)) < corner.y {\n\n if dir.y <= 0.0 {\n\n p.apply_force(Vec3::new(dir.x,\n\n -1.0 * dir.y * 2.0,\n\n dir.z));\n\n }\n\n p.set_position(Point3::new(x, corner.y + rad, z));\n\n } else if (corner.y + bound.height) < (y + (rad)) {\n", "file_path": "src/world.rs", "rank": 92, "score": 2.2592764874409608 }, { "content": " self.x *= t;\n\n self.y *= t;\n\n self.z *= t;\n\n }\n\n}\n\n\n\nimpl<T: Num> AddAssign for Vec3<T> {\n\n fn add_assign(&mut self, rhs: Vec3<T>) {\n\n self.x += rhs.x;\n\n self.y += rhs.y;\n\n self.z += rhs.z;\n\n }\n\n}\n\n\n\nimpl<T: Num> SubAssign for Vec3<T> {\n\n fn sub_assign(&mut self, rhs: Vec3<T>) {\n\n self.x -= rhs.x;\n\n self.y -= rhs.y;\n\n self.z -= rhs.z;\n\n }\n\n}\n\n\n\nimpl<T: Num> Neg for Vec3<T> {\n\n type Output = Vec3<T>;\n\n fn neg(self) -> Vec3<T> {\n\n Vec3::new(-self.x, -self.y, -self.z)\n\n }\n\n}\n\n/*\n", "file_path": "src/vec3d.rs", "rank": 93, "score": 2.2325215047677593 }, { "content": " }\n\n}\n\n\n\nimpl<T: Num> Add for Vec3<T> {\n\n type Output = Vec3<T>;\n\n fn add(self, rhs: Vec3<T>) -> Vec3<T> {\n\n Vec3::new(\n\n self.x + rhs.x,\n\n self.y + rhs.y,\n\n self.z + rhs.z\n\n )\n\n }\n\n}\n\n\n\nimpl<T: Num> DivAssign for Vec3<T> {\n\n fn div_assign(&mut self, rhs: Vec3<T>) {\n\n self.x /= rhs.x;\n\n self.y /= rhs.y;\n\n self.z /= rhs.z;\n\n }\n", "file_path": "src/vec3d.rs", "rank": 94, "score": 2.033675730627292 }, { "content": "impl<T: Num> Mul<T> for Vec3<T> {\n\n type Output = Vec3<T>;\n\n fn mul(self, t: T) -> Vec3<T> {\n\n Vec3::new(\n\n self.x * t,\n\n self.y * t,\n\n self.z * t,\n\n )\n\n }\n\n}\n\n\n\nimpl<T: Num> Mul for Vec3<T> {\n\n type Output = Vec3<T>;\n\n fn mul(self, rhs: Vec3<T>) -> Vec3<T> {\n\n Vec3::new(\n\n self.x * rhs.x,\n\n self.y * rhs.y,\n\n self.z * rhs.z,\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/vec3d.rs", "rank": 95, "score": 2.033675730627292 }, { "content": " fn div(self, rhs: Vec3<T>) -> Vec3<T> {\n\n Vec3::new(\n\n self.x / rhs.x,\n\n self.y / rhs.y,\n\n self.z / rhs.z,\n\n )\n\n }\n\n}\n\n\n\nimpl<T: Num> Div<T> for Vec3<T> {\n\n type Output = Vec3<T>;\n\n fn div(self, t: T) -> Vec3<T> {\n\n Vec3::new(\n\n self.x / t,\n\n self.y / t,\n\n self.z / t,\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/vec3d.rs", "rank": 96, "score": 1.5747826284411688 }, { "content": " y: bound.center().y,\n\n z: bound.center().z}, 7.0, 10.0)),\n\n Box::new(BigGravityWell::new(Point3{\n\n x: bound.center().x - 500.0,\n\n y: bound.center().y,\n\n z: bound.center().z}, 1.0, 1.0, 13)),\n\n // Box::new(GravityWell::new(Point3{\n\n // x: bound.center().x + 130.0,\n\n // y: bound.center().y,\n\n // z: bound.center().z}, 7.0, 10.0)),\n\n Box::new(GravityWell::new(Point3{\n\n x: bound.center().x + 160.0,\n\n y: bound.center().y,\n\n z: bound.center().z}, 7.0, 10.0))],\n\n bound,\n\n bound.center(),\n\n Box::new(move |p: Point3| {\n\n // meh\n\n let mut rng = thread_rng();\n\n ParticleBuilder::new(p,\n", "file_path": "src/main.rs", "rank": 97, "score": 1.0391590340526227 }, { "content": " let ttf_context = sdl2::ttf::init().unwrap();\n\n let mut font = ttf_context.load_font(\"/usr/share/wesnoth/fonts/DejaVuSans.ttf\", 12).unwrap();\n\n font.set_style(sdl2::ttf::STYLE_BOLD);\n\n let texture_creator = canvas.texture_creator();\n\n\n\n let mut cpt = 0;\n\n\n\n 'mainloop: loop {\n\n let mut surface_canvas = SurfaceCanvas::from_surface(\n\n Surface::new(1900, 1060, PixelFormatEnum::RGBA4444).unwrap()\n\n ).unwrap();\n\n\n\n for event in sdl_context.event_pump().unwrap().poll_iter() {\n\n match event {\n\n Event::Quit{..} |\n\n Event::KeyDown {keycode: Option::Some(Keycode::Escape), ..} =>\n\n break 'mainloop,\n\n Event::KeyDown {keycode: Option::Some(Keycode::Space), ..} => {\n\n for _ in 0..1 {\n\n world.create_particle();\n", "file_path": "src/main.rs", "rank": 98, "score": 0.8046942466017124 } ]
Rust
src/keychain/mod.rs
mikelodder7/keychain-services.rs
6c68371b199d9d450d185681d80720ed4561b8d0
pub mod item; pub mod key; use self::item::MatchLimit; pub use self::{item::Item, key::Key}; use crate::dictionary::*; use crate::error::Error; use crate::ffi::*; use core_foundation::base::{CFTypeRef, TCFType}; use std::{ffi::CString, os::raw::c_char, os::unix::ffi::OsStrExt, path::Path, ptr}; declare_TCFType! { Keychain, KeychainRef } impl_TCFType!(Keychain, KeychainRef, SecKeychainGetTypeID); impl Keychain { pub fn find_default() -> Result<Keychain, Error> { let mut result: KeychainRef = ptr::null_mut(); let status = unsafe { SecKeychainCopyDefault(&mut result) }; if let Some(e) = Error::maybe_from_OSStatus(status) { Err(e) } else { Ok(unsafe { Keychain::wrap_under_create_rule(result) }) } } pub fn create(path: &Path, password: Option<&str>) -> Result<Keychain, Error> { let path_cstring = CString::new(path.as_os_str().as_bytes()).unwrap(); let mut result: KeychainRef = ptr::null_mut(); let status = match password { Some(pw) => unsafe { SecKeychainCreate( path_cstring.as_ptr() as *const c_char, pw.len() as u32, pw.as_bytes().as_ptr() as *const c_char, false, ptr::null(), &mut result, ) }, None => unsafe { SecKeychainCreate( path_cstring.as_ptr() as *const c_char, 0, ptr::null(), true, ptr::null(), &mut result, ) }, }; if let Some(e) = Error::maybe_from_OSStatus(status) { Err(e) } else { Ok(unsafe { Keychain::wrap_under_create_rule(result) }) } } pub fn delete(self) -> Result<(), Error> { let status = unsafe { SecKeychainDelete(self.as_concrete_TypeRef()) }; if let Some(e) = Error::maybe_from_OSStatus(status) { Err(e) } else { Ok(()) } } fn find_item(&self, mut attrs: DictionaryBuilder) -> Result<Item, Error> { attrs.add(unsafe { kSecMatchLimit }, &MatchLimit::One.as_CFType()); attrs.add_boolean(unsafe { kSecReturnRef }, true); let mut result: ItemRef = ptr::null_mut(); let status = unsafe { SecItemCopyMatching( Dictionary::from(attrs).as_concrete_TypeRef(), &mut result as &mut CFTypeRef, ) }; if let Some(e) = Error::maybe_from_OSStatus(status) { return Err(e); } Ok(unsafe { Item::wrap_under_create_rule(result) }) } fn add_item(&self, mut attrs: DictionaryBuilder) -> Result<Item, Error> { attrs.add(unsafe { kSecUseKeychain }, self); attrs.add_boolean(unsafe { kSecReturnRef }, true); let mut result: ItemRef = ptr::null_mut(); let status = unsafe { SecItemAdd(Dictionary::from(attrs).as_concrete_TypeRef(), &mut result) }; if let Some(e) = Error::maybe_from_OSStatus(status) { Err(e) } else { Ok(unsafe { Item::wrap_under_create_rule(result) }) } } } impl Default for Keychain { fn default() -> Keychain { Self::find_default().expect("no default keychain available") } }
pub mod item; pub mod key; use self::item::MatchLimit; pub use self::{item::Item, key::Key}; use crate::dictionary::*; use crate::error::Error; use crate::ffi::*; use core_foundation::base::{CFTypeRef, TCFType}; use std::{ffi::CString, os::raw::c_char, os::unix::ffi::OsStrExt, path::Path, ptr}; declare_TCFType! { Keychain, KeychainRef } impl_TCFType!(Keychain, KeychainRef, SecKeychainGetTypeID); impl Keychain { pub fn find_default() -> Result<Keychain, Error> { let mut result: KeychainRef = ptr::null_mut(); let status = unsafe { SecKeychainCopyDefault(&mut result) }; if let Some(e) = Error::maybe_from_OSStatus(status) { Err(e) } else { Ok(unsafe { Keychain::wrap_under_create_rule(result) }) } } pub fn create(path: &Path, password: Option<&str>) -> Result<Keychain, Error> { let path_cstring = CString::new(path.as_os_str().as_bytes()).unwrap(); let mut result: KeychainRef = ptr::null_mut(); let status = match password { Some(pw) => unsafe { SecKeychainCreate( path_cstring.as_ptr() as *const c_char, pw.len() as u32, pw.as_bytes().as_ptr() as *const c_char, false, ptr::null(), &mut result, ) }, None => unsafe { SecKeychainCreate( path_cstring.as_ptr() as *const c_char, 0, ptr::null(), true, ptr::null(), &mut result, ) }, }; if let Some(e) = Error::maybe_from_OSStatus(status) { Err(e) } else { Ok(unsafe { Keychain::wrap_under_create_rule(result) }) } } pub fn delete(self) -> Result<(), Error> { let status = unsafe { SecKeychainDelete(self.as_concrete_TypeRef()) }; if let Some(e) = Error::maybe_from_OSStatus(status) { Err(e) } else { Ok(()) } } fn find_item(&self, mut attrs: DictionaryBuilder) -> Result<Item, Error> { attrs.add(unsafe { kSecMatchLimit }, &MatchLimit::One.as_CFType()); attrs.add_boolean(unsafe { kSecReturnRef }, true); let mut result: ItemRef = ptr::null_mut(); let status = unsafe { SecItemCopyMatching( Dictionary::from(attrs).as_concrete_TypeRef(), &mut result as &mut CFTypeRef,
unsafe { SecItemAdd(Dictionary::from(attrs).as_concrete_TypeRef(), &mut result) }; if let Some(e) = Error::maybe_from_OSStatus(status) { Err(e) } else { Ok(unsafe { Item::wrap_under_create_rule(result) }) } } } impl Default for Keychain { fn default() -> Keychain { Self::find_default().expect("no default keychain available") } }
) }; if let Some(e) = Error::maybe_from_OSStatus(status) { return Err(e); } Ok(unsafe { Item::wrap_under_create_rule(result) }) } fn add_item(&self, mut attrs: DictionaryBuilder) -> Result<Item, Error> { attrs.add(unsafe { kSecUseKeychain }, self); attrs.add_boolean(unsafe { kSecReturnRef }, true); let mut result: ItemRef = ptr::null_mut(); let status =
random
[ { "content": "#[test]\n\nfn generate_and_use_rsa_keys() {\n\n let acl =\n\n AccessControl::create_with_flags(AttrAccessible::WhenUnlocked, Default::default()).unwrap();\n\n\n\n let generate_params = KeyPairGenerateParams::new(AttrKeyType::Rsa, 2048).access_control(&acl);\n\n\n\n let keypair = KeyPair::create(generate_params).unwrap();\n\n\n\n let signature = keypair\n\n .private_key\n\n .sign(KeyAlgorithm::RSASignatureMessagePSSSHA256, TEST_MESSAGE)\n\n .unwrap();\n\n\n\n let public_key_bytes = keypair.public_key.to_external_representation().unwrap();\n\n\n\n let res = ring::signature::verify(\n\n &ring::signature::RSA_PSS_2048_8192_SHA256,\n\n untrusted::Input::from(&public_key_bytes),\n\n untrusted::Input::from(TEST_MESSAGE),\n\n untrusted::Input::from(signature.as_ref()),\n\n );\n\n assert!(res.is_ok());\n\n\n\n let res = keypair.public_key.verify(TEST_MESSAGE, &signature);\n\n assert!(res.is_ok());\n\n assert!(res.unwrap());\n\n let res = keypair.public_key.verify(&[0u8, 0u8], &signature);\n\n assert!(res.is_err());\n\n}\n\n\n", "file_path": "tests/core.rs", "rank": 0, "score": 84940.71040941705 }, { "content": "use crate::{attr::*, dictionary::DictionaryBuilder, error::Error, ffi::*, keychain::*};\n\nuse std::str;\n\nuse zeroize::Zeroize;\n\n\n\n/// Generic passwords\n\npub struct GenericPassword(Item);\n\n\n\nimpl GenericPassword {\n\n /// Create a new generic password item in the given keychain.\n\n pub fn create(\n\n keychain: &Keychain,\n\n service: &str,\n\n account: &str,\n\n password: &str,\n\n ) -> Result<Self, Error> {\n\n let mut attrs = DictionaryBuilder::new();\n\n attrs.add_class(item::Class::GenericPassword);\n\n attrs.add_string(AttrKind::Service, service);\n\n attrs.add_string(AttrKind::Account, account);\n\n attrs.add_string(unsafe { kSecValueData }, password);\n", "file_path": "src/keychain/item/password.rs", "rank": 1, "score": 81204.29988983274 }, { "content": "\n\n Ok(GenericPassword(keychain.add_item(attrs)?))\n\n }\n\n\n\n /// Find a generic password in the given keychain.\n\n pub fn find(keychain: &Keychain, service: &str, account: &str) -> Result<Self, Error> {\n\n let mut attrs = DictionaryBuilder::new();\n\n attrs.add_class(item::Class::GenericPassword);\n\n attrs.add_string(AttrKind::Service, service);\n\n attrs.add_string(AttrKind::Account, account);\n\n\n\n Ok(GenericPassword(keychain.find_item(attrs)?))\n\n }\n\n\n\n /// Get the account this password is associated with\n\n pub fn account(&self) -> Result<String, Error> {\n\n self.0.attribute(AttrKind::Account)\n\n }\n\n\n\n /// Get the service this password is associated with\n", "file_path": "src/keychain/item/password.rs", "rank": 2, "score": 81201.08107629478 }, { "content": " pub fn service(&self) -> Result<String, Error> {\n\n self.0.attribute(AttrKind::Service)\n\n }\n\n\n\n /// Get the raw password value\n\n pub fn password(&self) -> Result<PasswordData, Error> {\n\n Ok(PasswordData(self.0.data()?))\n\n }\n\n}\n\n\n\n/// Internet passwords\n\npub struct InternetPassword(Item);\n\n\n\nimpl InternetPassword {\n\n /// Create a new Internet password item in the given keychain.\n\n pub fn create(\n\n keychain: &Keychain,\n\n server: &str,\n\n account: &str,\n\n password: &str,\n", "file_path": "src/keychain/item/password.rs", "rank": 3, "score": 81200.99383146789 }, { "content": " ) -> Result<Self, Error> {\n\n let mut attrs = DictionaryBuilder::new();\n\n attrs.add_class(item::Class::InternetPassword);\n\n attrs.add_string(AttrKind::Server, server);\n\n attrs.add_string(AttrKind::Account, account);\n\n attrs.add_string(unsafe { kSecValueData }, password);\n\n\n\n Ok(InternetPassword(keychain.add_item(attrs)?))\n\n }\n\n\n\n /// Find an Internet password in the given keychain.\n\n pub fn find(\n\n keychain: &Keychain,\n\n server: &str,\n\n account: &str,\n\n protocol: Option<AttrProtocol>,\n\n ) -> Result<Self, Error> {\n\n let mut attrs = DictionaryBuilder::new();\n\n attrs.add_class(item::Class::InternetPassword);\n\n attrs.add_string(AttrKind::Server, server);\n", "file_path": "src/keychain/item/password.rs", "rank": 4, "score": 81200.96695376946 }, { "content": " attrs.add_string(AttrKind::Account, account);\n\n\n\n if let Some(proto) = protocol {\n\n attrs.add_attr(&proto);\n\n }\n\n\n\n Ok(InternetPassword(keychain.find_item(attrs)?))\n\n }\n\n\n\n /// Get the account this password is associated with\n\n pub fn account(&self) -> Result<String, Error> {\n\n self.0.attribute(AttrKind::Account)\n\n }\n\n\n\n /// Get the service this password is associated with\n\n pub fn server(&self) -> Result<String, Error> {\n\n self.0.attribute(AttrKind::Server)\n\n }\n\n\n\n /// Get the raw password value\n", "file_path": "src/keychain/item/password.rs", "rank": 5, "score": 81196.94883644636 }, { "content": " pub fn password(&self) -> Result<PasswordData, Error> {\n\n Ok(PasswordData(self.0.data()?))\n\n }\n\n}\n\n\n\n/// Wrapper around password data that ensures it is cleared from memory after\n\n/// being used.\n\n#[derive(Clone)]\n\npub struct PasswordData(Vec<u8>);\n\n\n\nimpl PasswordData {\n\n /// Borrow the password as a byte slice\n\n pub fn as_bytes(&self) -> &[u8] {\n\n self.0.as_ref()\n\n }\n\n\n\n /// Borrow the password as a `str` (if valid UTF-8), panicking if the\n\n /// UTF-8 conversion fails.\n\n pub fn as_str(&self) -> &str {\n\n self.try_as_str().expect(\"password contained invalid UTF-8\")\n", "file_path": "src/keychain/item/password.rs", "rank": 6, "score": 81191.85394933693 }, { "content": " }\n\n\n\n /// Borrow the password as a `str` (if valid UTF-8), returning a\n\n /// `Utf8Error` if the UTF-8 conversion failed.\n\n pub fn try_as_str(&self) -> Result<&str, str::Utf8Error> {\n\n str::from_utf8(self.as_bytes())\n\n }\n\n}\n\n\n\nimpl AsRef<[u8]> for PasswordData {\n\n fn as_ref(&self) -> &[u8] {\n\n self.as_bytes()\n\n }\n\n}\n\n\n\nimpl Drop for PasswordData {\n\n fn drop(&mut self) {\n\n self.0.zeroize();\n\n }\n\n}\n", "file_path": "src/keychain/item/password.rs", "rank": 7, "score": 81190.76279846813 }, { "content": "//! Items stored in a keychain (e.g. certificates, keys, passwords)\n\n\n\nmod class;\n\nmod password;\n\nmod query;\n\n\n\npub use self::{class::*, password::*, query::*};\n\nuse crate::{attr::AttrKind, error::*, ffi::*};\n\nuse core_foundation::base::TCFType;\n\nuse std::{mem, os::raw::c_void, ptr, slice};\n\n\n\ndeclare_TCFType! {\n\n /// Items stored in the keychain.\n\n ///\n\n /// Wrapper for the `SecKeychainItem`/`SecKeychainItemRef` types:\n\n /// <https://developer.apple.com/documentation/security/seckeychainitemref>\n\n Item, ItemRef\n\n}\n\n\n\nimpl_TCFType!(Item, ItemRef, SecKeychainItemGetTypeID);\n", "file_path": "src/keychain/item/mod.rs", "rank": 8, "score": 80663.09527655954 }, { "content": " \"SecKeychainItemCopyContent refused to return data\",\n\n ))\n\n } else {\n\n // Copy the data into a vector we've allocated\n\n let result = Vec::from(unsafe { slice::from_raw_parts(result_ptr, length as usize) });\n\n\n\n // Free the original data\n\n Error::maybe_from_OSStatus(unsafe {\n\n SecKeychainItemFreeContent(ptr::null_mut(), result_ptr as *mut c_void)\n\n })\n\n .unwrap();\n\n\n\n Ok(result)\n\n }\n\n }\n\n\n\n /// Get an attribute of this item as a `String`.\n\n // TODO: handle attribute types other than `String`?\n\n pub(crate) fn attribute(&self, attr_kind: AttrKind) -> Result<String, Error> {\n\n let mut attrs = unsafe { self.attributes() }?;\n", "file_path": "src/keychain/item/mod.rs", "rank": 9, "score": 80662.63276572434 }, { "content": " &format!(\"missing attribute {:?}\", attr_kind),\n\n )\n\n })\n\n }\n\n\n\n /// Get the attributes of a keychain item. Note that this does not handle\n\n /// deallocating the attribute list so the caller must take care to do so.\n\n unsafe fn attributes(&self) -> Result<SecKeychainAttributeList, Error> {\n\n let mut result: SecKeychainAttributeList = mem::zeroed();\n\n\n\n let status = SecKeychainItemCopyContent(\n\n self.as_concrete_TypeRef(),\n\n ptr::null_mut(),\n\n &mut result,\n\n ptr::null_mut(),\n\n ptr::null_mut(),\n\n );\n\n\n\n if let Some(e) = Error::maybe_from_OSStatus(status) {\n\n Err(e)\n\n } else {\n\n Ok(result)\n\n }\n\n }\n\n}\n", "file_path": "src/keychain/item/mod.rs", "rank": 10, "score": 80661.28651526109 }, { "content": " /// Get the raw data associated with this keychain item\n\n pub(crate) fn data(&self) -> Result<Vec<u8>, Error> {\n\n let result_ptr: *mut u8 = ptr::null_mut();\n\n let mut length = 0;\n\n\n\n let status = unsafe {\n\n SecKeychainItemCopyContent(\n\n self.as_concrete_TypeRef(),\n\n ptr::null_mut(),\n\n ptr::null_mut(),\n\n &mut length,\n\n &mut (result_ptr as *mut c_void),\n\n )\n\n };\n\n\n\n if let Some(e) = Error::maybe_from_OSStatus(status) {\n\n Err(e)\n\n } else if result_ptr.is_null() {\n\n Err(Error::new(\n\n ErrorKind::MissingEntitlement,\n", "file_path": "src/keychain/item/mod.rs", "rank": 11, "score": 80661.11427912847 }, { "content": "\n\nimpl Item {\n\n /// Get the class of this item\n\n pub fn class(&self) -> Class {\n\n let mut result = FourCharacterCode::from(b\"NULL\");\n\n\n\n Error::maybe_from_OSStatus(unsafe {\n\n SecKeychainItemCopyContent(\n\n self.as_concrete_TypeRef(),\n\n &mut result,\n\n ptr::null_mut(),\n\n ptr::null_mut(),\n\n ptr::null_mut(),\n\n )\n\n })\n\n .unwrap();\n\n\n\n result.into()\n\n }\n\n\n", "file_path": "src/keychain/item/mod.rs", "rank": 12, "score": 80660.28862822922 }, { "content": "\n\n let result = attrs\n\n .iter()\n\n .find(|attr| {\n\n if let Some(kind) = AttrKind::from_tag(attr.tag()) {\n\n kind == attr_kind\n\n } else {\n\n false\n\n }\n\n })\n\n .map(|attr| String::from_utf8(attr.data().unwrap().into()).unwrap());\n\n\n\n Error::maybe_from_OSStatus(unsafe {\n\n SecKeychainItemFreeContent(&mut attrs, ptr::null_mut())\n\n })\n\n .unwrap();\n\n\n\n result.ok_or_else(|| {\n\n Error::new(\n\n ErrorKind::NoSuchAttr,\n", "file_path": "src/keychain/item/mod.rs", "rank": 13, "score": 80658.31136059442 }, { "content": " ///\n\n /// Wrapper for `SecItemCopyMatching`. See:\n\n /// <https://developer.apple.com/documentation/security/1398306-secitemcopymatching>\n\n pub fn find(query: item::Query) -> Result<Self, Error> {\n\n let mut params = DictionaryBuilder::from(query);\n\n params.add(unsafe { kSecClass }, &item::Class::Key.as_CFString());\n\n params.add(unsafe { kSecMatchLimit }, &MatchLimit::One.as_CFType());\n\n params.add_boolean(unsafe { kSecReturnRef }, true);\n\n\n\n let mut result: KeyRef = ptr::null_mut();\n\n let status = unsafe {\n\n SecItemCopyMatching(\n\n Dictionary::from(params).as_concrete_TypeRef(),\n\n &mut result as &mut CFTypeRef,\n\n )\n\n };\n\n\n\n // Return an error if the status was unsuccessful\n\n if let Some(e) = Error::maybe_from_OSStatus(status) {\n\n return Err(e);\n", "file_path": "src/keychain/key/mod.rs", "rank": 14, "score": 79598.49561958316 }, { "content": " /// <https://developer.apple.com/documentation/security/1643698-seckeycopyexternalrepresentation>\n\n pub fn to_external_representation(&self) -> Result<Vec<u8>, Error> {\n\n let mut error: CFErrorRef = ptr::null_mut();\n\n let data =\n\n unsafe { SecKeyCopyExternalRepresentation(self.as_concrete_TypeRef(), &mut error) };\n\n\n\n if error.is_null() {\n\n Ok(unsafe { CFData::wrap_under_create_rule(data) }.to_vec())\n\n } else {\n\n Err(error.into())\n\n }\n\n }\n\n\n\n /// Restores a key from an external representation of that key.\n\n ///\n\n /// Wrapper for the `SecKeyCreateWithData` function. See:\n\n /// <https://developer.apple.com/documentation/security/1643701-seckeycreatewithdata>\n\n pub fn from_external_representation(params: RestoreKeyParams) -> Result<Self, Error> {\n\n let mut error: CFErrorRef = ptr::null_mut();\n\n let data = unsafe {\n", "file_path": "src/keychain/key/mod.rs", "rank": 15, "score": 79592.37394812534 }, { "content": " CFData::from_buffer(ciphertext.as_ref()).as_concrete_TypeRef(),\n\n &mut error,\n\n )\n\n };\n\n\n\n if error.is_null() {\n\n let bytes = unsafe { CFData::wrap_under_create_rule(plaintext) }.to_vec();\n\n Ok(bytes)\n\n } else {\n\n Err(error.into())\n\n }\n\n }\n\n\n\n /// Delete this key from the keychain\n\n ///\n\n /// Wrapper for `SecItemDelete` function. See:\n\n /// <https://developer.apple.com/documentation/security/1395547-secitemdelete>\n\n pub fn delete(self) -> Result<(), Error> {\n\n let mut query = DictionaryBuilder::new();\n\n let key_class = self.class().unwrap();\n", "file_path": "src/keychain/key/mod.rs", "rank": 16, "score": 79592.01990942872 }, { "content": " if error.is_null() {\n\n Ok(result == 0x1)\n\n } else {\n\n Err(error.into())\n\n }\n\n }\n\n\n\n /// Encrypts a block of data using a public key and specified algorithm\n\n ///\n\n /// Wrapper for the `SecKeyCreateEncryptedData` function. See:\n\n /// <https://developer.apple.com/documentation/security/1643957-seckeycreateencrypteddata>\n\n pub fn encrypt(&self, alg: KeyAlgorithm, plaintext: &[u8]) -> Result<Ciphertext, Error> {\n\n let mut error: CFErrorRef = ptr::null_mut();\n\n let ciphertext = unsafe {\n\n SecKeyCreateEncryptedData(\n\n self.as_concrete_TypeRef(),\n\n alg.as_CFString().as_CFTypeRef(),\n\n CFData::from_buffer(plaintext).as_concrete_TypeRef(),\n\n &mut error,\n\n )\n", "file_path": "src/keychain/key/mod.rs", "rank": 17, "score": 79591.79374339324 }, { "content": " };\n\n\n\n if error.is_null() {\n\n let bytes = unsafe { CFData::wrap_under_create_rule(ciphertext) }.to_vec();\n\n Ok(Ciphertext::new(alg, bytes))\n\n } else {\n\n Err(error.into())\n\n }\n\n }\n\n\n\n /// Decrypts a block of data using a private key and specified algorithm\n\n ///\n\n /// Wrapper for the `SecKeyCreateDecryptedData` function. See:\n\n /// <https://developer.apple.com/documentation/security/1644043-seckeycreatedecrypteddata>\n\n pub fn decrypt(&self, ciphertext: Ciphertext) -> Result<Vec<u8>, Error> {\n\n let mut error: CFErrorRef = ptr::null_mut();\n\n let plaintext = unsafe {\n\n SecKeyCreateDecryptedData(\n\n self.as_concrete_TypeRef(),\n\n ciphertext.algorithm().as_CFString().as_CFTypeRef(),\n", "file_path": "src/keychain/key/mod.rs", "rank": 18, "score": 79590.72820026345 }, { "content": "//! Keys stored in macOS Keychain Services.\n\n\n\nmod algorithm;\n\nmod operation;\n\nmod pair;\n\n\n\npub use self::{algorithm::*, operation::*, pair::*};\n\nuse crate::{\n\n attr::*,\n\n ciphertext::Ciphertext,\n\n dictionary::{Dictionary, DictionaryBuilder},\n\n error::Error,\n\n ffi::*,\n\n keychain::item::{self, MatchLimit},\n\n signature::Signature,\n\n};\n\nuse core_foundation::{\n\n base::{CFIndexConvertible, CFType, CFTypeRef, FromVoid, TCFType},\n\n data::{CFData, CFDataRef},\n\n error::CFErrorRef,\n", "file_path": "src/keychain/key/mod.rs", "rank": 19, "score": 79590.58127393063 }, { "content": " Err(error.into())\n\n }\n\n }\n\n\n\n /// Verifies the cryptographic signature of the given data using this key.\n\n ///\n\n /// Wrapper for the `SecKeyVerifySignature` function. See:\n\n /// <https://developer.apple.com/documentation/security/1643715-seckeyverifysignature>\n\n pub fn verify(&self, signed_data: &[u8], signature: &Signature) -> Result<bool, Error> {\n\n let mut error: CFErrorRef = ptr::null_mut();\n\n let result = unsafe {\n\n SecKeyVerifySignature(\n\n self.as_concrete_TypeRef(),\n\n signature.algorithm().as_CFString().as_CFTypeRef(),\n\n CFData::from_buffer(signed_data).as_concrete_TypeRef(),\n\n CFData::from_buffer(signature.as_bytes()).as_concrete_TypeRef(),\n\n &mut error,\n\n )\n\n };\n\n\n", "file_path": "src/keychain/key/mod.rs", "rank": 20, "score": 79588.85882785046 }, { "content": "\n\n /// Create a cryptographic signature of the given data using this key.\n\n ///\n\n /// Wrapper for the `SecKeyCreateSignature` function. See:\n\n /// <https://developer.apple.com/documentation/security/1643916-seckeycreatesignature>\n\n pub fn sign(&self, alg: KeyAlgorithm, data: &[u8]) -> Result<Signature, Error> {\n\n let mut error: CFErrorRef = ptr::null_mut();\n\n let signature = unsafe {\n\n SecKeyCreateSignature(\n\n self.as_concrete_TypeRef(),\n\n alg.as_CFString().as_CFTypeRef(),\n\n CFData::from_buffer(data).as_concrete_TypeRef(),\n\n &mut error,\n\n )\n\n };\n\n\n\n if error.is_null() {\n\n let bytes = unsafe { CFData::wrap_under_create_rule(signature) }.to_vec();\n\n Ok(Signature::new(alg, bytes))\n\n } else {\n", "file_path": "src/keychain/key/mod.rs", "rank": 21, "score": 79588.73419470133 }, { "content": " query.add(unsafe { kSecClass }, &item::Class::Key.as_CFString());\n\n query.add(unsafe { kSecAttrKeyClass }, &key_class.as_CFString());\n\n if key_class == AttrKeyClass::Public {\n\n query.add(unsafe { kSecAttrKeyType }, &self.key_type().unwrap().as_CFString());\n\n query.add(\n\n unsafe { kSecAttrApplicationTag },\n\n &self.application_tag().unwrap().as_CFType(),\n\n );\n\n } else if key_class == AttrKeyClass::Private {\n\n query.add(\n\n unsafe { kSecAttrApplicationLabel },\n\n &self.application_label().unwrap().as_CFType(),\n\n );\n\n query.add_boolean(unsafe { kSecReturnRef }, true);\n\n }\n\n let status = unsafe { SecItemDelete(Dictionary::from(query).as_concrete_TypeRef()) };\n\n if let Some(e) = Error::maybe_from_OSStatus(status) {\n\n Err(e)\n\n } else {\n\n Ok(())\n", "file_path": "src/keychain/key/mod.rs", "rank": 22, "score": 79587.12098641455 }, { "content": " }\n\n\n\n Ok(unsafe { Key::wrap_under_create_rule(result) })\n\n }\n\n\n\n /// Get the `AttrApplicationLabel` for this `Key`.\n\n pub fn application_label(&self) -> Option<AttrApplicationLabel> {\n\n self.attributes()\n\n .find(AttrKind::ApplicationLabel)\n\n .map(|tag| {\n\n AttrApplicationLabel(unsafe {\n\n CFData::wrap_under_get_rule(tag.as_CFTypeRef() as CFDataRef)\n\n })\n\n })\n\n }\n\n\n\n /// Get the `AttrApplicationTag` for this `Key`.\n\n pub fn application_tag(&self) -> Option<AttrApplicationTag> {\n\n self.attributes().find(AttrKind::ApplicationTag).map(|tag| {\n\n AttrApplicationTag(unsafe {\n", "file_path": "src/keychain/key/mod.rs", "rank": 23, "score": 79583.36145390045 }, { "content": " SecKeyCreateWithData(\n\n CFData::from_buffer(params.as_bytes()).as_concrete_TypeRef(),\n\n params.attributes().as_concrete_TypeRef(),\n\n &mut error,\n\n )\n\n };\n\n\n\n if error.is_null() {\n\n Ok(unsafe { Key::wrap_under_create_rule(data) })\n\n } else {\n\n Err(error.into())\n\n }\n\n }\n\n\n\n /// Fetch attributes for this `Key`.\n\n ///\n\n /// Wrapper for `SecKeyCopyAttributes`. See:\n\n /// <https://developer.apple.com/documentation/security/1643699-seckeycopyattributes>\n\n fn attributes(&self) -> Dictionary {\n\n unsafe { Dictionary::wrap_under_get_rule(SecKeyCopyAttributes(self.as_concrete_TypeRef())) }\n", "file_path": "src/keychain/key/mod.rs", "rank": 24, "score": 79582.62446424787 }, { "content": " string::{CFString, CFStringRef},\n\n};\n\nuse std::{\n\n ffi::c_void,\n\n fmt::{self, Debug},\n\n ptr,\n\n};\n\n\n\ndeclare_TCFType! {\n\n /// Object which represents a cryptographic key.\n\n ///\n\n /// Wrapper for the `SecKey`/`SecKeyRef` types:\n\n /// <https://developer.apple.com/documentation/security/seckeyref>\n\n Key, KeyRef\n\n}\n\n\n\nimpl_TCFType!(Key, KeyRef, SecKeyGetTypeID);\n\n\n\nimpl Key {\n\n /// Find a `Key` in the keyring using the given `ItemQuery`.\n", "file_path": "src/keychain/key/mod.rs", "rank": 25, "score": 79580.58201571027 }, { "content": " pub fn key_type(&self) -> Option<AttrKeyType> {\n\n self.attributes()\n\n .find(AttrKind::KeyType)\n\n .map(|keytype| AttrKeyType::from(keytype.as_CFTypeRef() as CFStringRef))\n\n }\n\n\n\n /// Determine whether a key is suitable for an operation using a certain algorithm\n\n ///\n\n /// Wrapper for the `SecKeyIsAlgorithmSupported` function. See:\n\n /// <https://developer.apple.com/documentation/security/1644057-seckeyisalgorithmsupported>\n\n pub fn is_supported(&self, operation: KeyOperation, alg: KeyAlgorithm) -> bool {\n\n let res = unsafe {\n\n SecKeyIsAlgorithmSupported(\n\n self.as_concrete_TypeRef(),\n\n operation.to_CFIndex(),\n\n alg.as_CFString().as_CFTypeRef(),\n\n )\n\n };\n\n res == 1\n\n }\n", "file_path": "src/keychain/key/mod.rs", "rank": 26, "score": 79579.33183651761 }, { "content": " CFData::wrap_under_get_rule(tag.as_CFTypeRef() as CFDataRef)\n\n })\n\n })\n\n }\n\n\n\n /// Get the `AttrLabel` for this `Key`.\n\n pub fn label(&self) -> Option<AttrLabel> {\n\n self.attributes().find(AttrKind::Label).map(|label| {\n\n AttrLabel(unsafe { CFString::wrap_under_get_rule(label.as_CFTypeRef() as CFStringRef) })\n\n })\n\n }\n\n\n\n /// Get the `AttrKeyClass` for this `Key`.\n\n pub fn class(&self) -> Option<AttrKeyClass> {\n\n self.attributes()\n\n .find(AttrKind::KeyClass)\n\n .map(|class| AttrKeyClass::from(class.as_CFTypeRef() as CFStringRef))\n\n }\n\n\n\n /// Get the `AttrKeyType` for this `Key`.\n", "file_path": "src/keychain/key/mod.rs", "rank": 27, "score": 79578.17545261976 }, { "content": " }\n\n}\n\n\n\nimpl Debug for Key {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(\n\n f,\n\n \"SecKey {{ application_label: {:?}, application_tag: {:?}, label: {:?} }}\",\n\n self.application_label(),\n\n self.application_tag(),\n\n self.label()\n\n )\n\n }\n\n}\n", "file_path": "src/keychain/key/mod.rs", "rank": 28, "score": 79578.09403309117 }, { "content": " }\n\n }\n\n\n\n /// Export this key as an external representation.\n\n ///\n\n /// If the key is not exportable the operation will fail (e.g. if it\n\n /// was generated inside of the Secure Enclave, or if the \"Extractable\"\n\n /// flag is set to NO).\n\n ///\n\n /// The data returned depends on the key type:\n\n ///\n\n /// - RSA: PKCS#1 format\n\n /// - EC: ANSI X9.63 bytestring:\n\n /// - Public key: `04 || X || Y`\n\n /// - Private key: Concatenation of public key with big endian encoding\n\n /// of the secret scalar, i.e. `04 || X || Y || K`\n\n ///\n\n /// All representations use fixed-size integers with leading zeroes.\n\n ///\n\n /// Wrapper for the `SecKeyCopyExternalRepresentation` function. See:\n", "file_path": "src/keychain/key/mod.rs", "rank": 29, "score": 79571.54742829717 }, { "content": "#[test]\n\nfn key_delete() {\n\n let acl =\n\n AccessControl::create_with_flags(AttrAccessible::WhenUnlocked, Default::default()).unwrap();\n\n\n\n let generate_params = KeyPairGenerateParams::new(AttrKeyType::EcSecPrimeRandom, 256).access_control(&acl)\n\n .permanent(true);\n\n\n\n let keypair = KeyPair::generate(generate_params).unwrap();\n\n let res = keypair.private_key.delete();\n\n println!(\"{:?}\", res);\n\n assert!(res.is_ok());\n\n}\n", "file_path": "tests/interactive.rs", "rank": 30, "score": 64767.56593659517 }, { "content": "#[test]\n\nfn key_query() {\n\n let keypair = generate_keypair(\n\n \"rs.keychain-services.test.integration.query\",\n\n \"keychain-services.rs integration test query key\",\n\n );\n\n\n\n let private_key_query = keychain::item::Query::new()\n\n .key_class(AttrKeyClass::Private)\n\n .key_type(AttrKeyType::EcSecPrimeRandom)\n\n .application_label(keypair.public_key.application_label().unwrap());\n\n\n\n let private_key = Key::find(private_key_query).unwrap();\n\n\n\n assert_eq!(\n\n keypair.private_key.application_label(),\n\n private_key.application_label()\n\n );\n\n}\n\n\n\n/// Passwords\n", "file_path": "tests/interactive.rs", "rank": 31, "score": 64767.56593659517 }, { "content": "/// Create a temporary keychain we can use for testing\n\nfn temp_keychain() -> TempKeychain {\n\n let dir = tempfile::tempdir().unwrap();\n\n let keychain =\n\n Keychain::create(&dir.path().join(\"test-keychain\"), Some(TEST_PASSWORD)).unwrap();\n\n\n\n TempKeychain { dir, keychain }\n\n}\n\n\n", "file_path": "tests/interactive.rs", "rank": 32, "score": 64147.723025192834 }, { "content": "#[test]\n\nfn store_and_retrieve_passwords() {\n\n let tmp = temp_keychain();\n\n let service = \"example.com\";\n\n let account = \"example\";\n\n\n\n let keychain_item =\n\n keychain::item::GenericPassword::create(&tmp.keychain, service, account, TEST_PASSWORD)\n\n .unwrap();\n\n\n\n assert_eq!(keychain_item.service().unwrap(), service);\n\n assert_eq!(keychain_item.account().unwrap(), account);\n\n assert_eq!(keychain_item.password().unwrap().as_str(), TEST_PASSWORD);\n\n}\n\n\n\n///\n", "file_path": "tests/interactive.rs", "rank": 33, "score": 63390.55758915597 }, { "content": "#[test]\n\nfn encrypt_and_decrypt_rsa_keys() {\n\n let acl =\n\n AccessControl::create_with_flags(AttrAccessible::WhenUnlocked, Default::default()).unwrap();\n\n\n\n let generate_params = KeyPairGenerateParams::new(AttrKeyType::Rsa, 2048).access_control(&acl);\n\n\n\n let keypair = KeyPair::create(generate_params).unwrap();\n\n\n\n let ciphertext = keypair\n\n .public_key\n\n .encrypt(KeyAlgorithm::RSAEncryptionOAEPSHA256, TEST_MESSAGE)\n\n .unwrap();\n\n\n\n let res = keypair.private_key.decrypt(ciphertext);\n\n assert!(res.is_ok());\n\n assert_eq!(res.unwrap(), TEST_MESSAGE);\n\n let ciphertext = Ciphertext::new(KeyAlgorithm::RSAEncryptionOAEPSHA256, vec![0u8, 0u8]);\n\n let res = keypair.private_key.decrypt(ciphertext);\n\n assert!(res.is_err());\n\n\n\n assert!(!keypair\n\n .private_key\n\n .is_supported(KeyOperation::Encrypt, KeyAlgorithm::RSAEncryptionOAEPSHA256));\n\n let res = keypair\n\n .private_key\n\n .encrypt(KeyAlgorithm::RSAEncryptionOAEPSHA256, TEST_MESSAGE);\n\n assert!(res.is_err());\n\n}\n\n\n", "file_path": "tests/core.rs", "rank": 34, "score": 59433.90826095529 }, { "content": "#[test]\n\nfn export_and_import_ecdsa_keys() {\n\n let acl =\n\n AccessControl::create_with_flags(AttrAccessible::WhenUnlocked, Default::default()).unwrap();\n\n\n\n let generate_params =\n\n KeyPairGenerateParams::new(AttrKeyType::EcSecPrimeRandom, 256).access_control(&acl);\n\n\n\n let keypair = KeyPair::create(generate_params).unwrap();\n\n\n\n let public_key_bytes = keypair.public_key.to_external_representation().unwrap();\n\n\n\n let restore_params = RestoreKeyParams {\n\n key_type: AttrKeyType::EcSecPrimeRandom,\n\n key_data: public_key_bytes.clone(),\n\n key_class: AttrKeyClass::Public,\n\n };\n\n\n\n let res = Key::from_external_representation(restore_params);\n\n\n\n assert!(res.is_ok());\n", "file_path": "tests/core.rs", "rank": 35, "score": 59433.90826095529 }, { "content": "#[test]\n\nfn generate_and_sign_with_create_ecdsa_keys() {\n\n let acl =\n\n AccessControl::create_with_flags(AttrAccessible::WhenUnlocked, Default::default()).unwrap();\n\n\n\n let generate_params =\n\n KeyPairGenerateParams::new(AttrKeyType::EcSecPrimeRandom, 256).access_control(&acl);\n\n\n\n let keypair = KeyPair::create(generate_params).unwrap();\n\n\n\n let public_key_bytes = keypair.public_key.to_external_representation().unwrap();\n\n\n\n let signature = keypair\n\n .private_key\n\n .sign(KeyAlgorithm::ECDSASignatureMessageX962SHA256, TEST_MESSAGE)\n\n .unwrap();\n\n\n\n ring::signature::verify(\n\n &ring::signature::ECDSA_P256_SHA256_ASN1,\n\n untrusted::Input::from(&public_key_bytes),\n\n untrusted::Input::from(TEST_MESSAGE),\n", "file_path": "tests/core.rs", "rank": 36, "score": 57139.345637599414 }, { "content": "#[test]\n\nfn generate_and_sign_with_generate_ecdsa_keys() {\n\n let acl =\n\n AccessControl::create_with_flags(AttrAccessible::WhenUnlocked, Default::default()).unwrap();\n\n\n\n let generate_params =\n\n KeyPairGenerateParams::new(AttrKeyType::EcSecPrimeRandom, 256).access_control(&acl);\n\n\n\n let keypair = KeyPair::generate(generate_params).unwrap();\n\n\n\n let public_key_bytes = keypair.public_key.to_external_representation().unwrap();\n\n\n\n let signature = keypair\n\n .private_key\n\n .sign(KeyAlgorithm::ECDSASignatureMessageX962SHA256, TEST_MESSAGE)\n\n .unwrap();\n\n\n\n ring::signature::verify(\n\n &ring::signature::ECDSA_P256_SHA256_ASN1,\n\n untrusted::Input::from(&public_key_bytes),\n\n untrusted::Input::from(TEST_MESSAGE),\n", "file_path": "tests/core.rs", "rank": 37, "score": 57139.345637599414 }, { "content": " MatchLimit::One => {\n\n unsafe { CFString::wrap_under_get_rule(kSecMatchLimitOne) }.as_CFType()\n\n }\n\n MatchLimit::Number(n) => CFNumber::from(n as i64).as_CFType(),\n\n MatchLimit::All => {\n\n unsafe { CFString::wrap_under_get_rule(kSecMatchLimitAll) }.as_CFType()\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// Query builder for locating particular keychain items.\n\n///\n\n/// For more information, see \"Search Attribute Keys and Values\":\n\n/// <https://developer.apple.com/documentation/security/keychain_services/keychain_items/search_attribute_keys_and_values>\n\n#[derive(Default, Debug)]\n\npub struct Query(DictionaryBuilder);\n\n\n\nimpl Query {\n\n /// Create a new keychain item query builder\n", "file_path": "src/keychain/item/query.rs", "rank": 46, "score": 53093.128847347594 }, { "content": "//! Query the keychain, looking for particular items\n\n\n\nuse crate::{attr::*, dictionary::DictionaryBuilder, ffi::*};\n\nuse core_foundation::{\n\n base::{CFType, TCFType},\n\n number::CFNumber,\n\n string::CFString,\n\n};\n\n\n\n/// Limit the number of matched items to one or an unlimited number.\n\n///\n\n/// Wrapper for the `kSecMatchLimit` attribute key. See:\n\n/// <https://developer.apple.com/documentation/security/ksecmatchlimit>\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\npub enum MatchLimit {\n\n /// Match exactly one item.\n\n ///\n\n /// Wrapper for the `kSecMatchLimitOne` attribute value. See:\n\n /// <https://developer.apple.com/documentation/security/ksecmatchlimitone>\n\n One,\n", "file_path": "src/keychain/item/query.rs", "rank": 47, "score": 53092.7775557795 }, { "content": " pub fn new() -> Self {\n\n Self::default()\n\n }\n\n\n\n /// Query for keychain items with the provided `SecAttrApplicationLabel`\n\n /// (not to be confused with a `SecAttrLabel`), i.e. the hash/fingerprint\n\n /// of a public key in the keychain.\n\n ///\n\n /// Both the private and public key in a keypair have a\n\n /// `SecAttrApplicationLabel` set to the public key's fingerprint.\n\n ///\n\n /// Wrapper for the `kSecAttrApplicationLabel` attribute key. See:\n\n /// <https://developer.apple.com/documentation/security/ksecattrlabel>\n\n pub fn application_label<L: Into<AttrApplicationLabel>>(mut self, label: L) -> Self {\n\n self.0.add_attr(&label.into());\n\n self\n\n }\n\n\n\n /// Query for keychain items with the provided `SecAttrApplicationTag`.\n\n ///\n", "file_path": "src/keychain/item/query.rs", "rank": 48, "score": 53091.65884241578 }, { "content": "\n\n Some(result)\n\n }\n\n /// Get `CFString` containing the `kSecClass` dictionary value for\n\n /// this particular `SecClass`.\n\n pub fn as_CFString(self) -> CFString {\n\n unsafe {\n\n CFString::wrap_under_get_rule(match self {\n\n Class::GenericPassword => kSecClassGenericPassword,\n\n Class::InternetPassword => kSecClassInternetPassword,\n\n Class::Certificate => kSecClassCertificate,\n\n Class::Key => kSecClassKey,\n\n Class::Identity => kSecClassIdentity,\n\n })\n\n }\n\n }\n\n}\n\n\n\nimpl From<FourCharacterCode> for Class {\n\n fn from(tag: FourCharacterCode) -> Self {\n\n Self::from_tag(tag).unwrap_or_else(|| panic!(\"invalid SecItemClass tag: {:?}\", tag))\n\n }\n\n}\n", "file_path": "src/keychain/item/class.rs", "rank": 49, "score": 53091.38589659845 }, { "content": "use crate::ffi::*;\n\nuse core_foundation::{base::TCFType, string::CFString};\n\n\n\n/// Classes of keychain items supported by Keychain Services\n\n/// (not to be confused with `SecAttrClass` or `SecType`)\n\n///\n\n/// Wrapper for the `kSecClass` attribute key. See:\n\n/// <https://developer.apple.com/documentation/security/ksecclass>\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\npub enum Class {\n\n /// Generic password items.\n\n ///\n\n /// Wrapper for the `kSecClassGenericPassword` attribute value. See:\n\n /// <https://developer.apple.com/documentation/security/ksecclassgenericpassword>\n\n GenericPassword,\n\n\n\n /// Internet passwords.\n\n ///\n\n /// Wrapper for the `kSecClassInternetPassword` attribute value. See:\n\n /// <https://developer.apple.com/documentation/security/ksecclassinternetpassword>\n", "file_path": "src/keychain/item/class.rs", "rank": 50, "score": 53090.52137290072 }, { "content": "\n\nimpl Class {\n\n /// Attempt to look up an attribute kind by its `FourCharacterCode`.\n\n // TODO: cache `FourCharacterCodes`? e.g. as `lazy_static`\n\n pub(crate) fn from_tag(tag: FourCharacterCode) -> Option<Self> {\n\n let result = unsafe {\n\n if tag == FourCharacterCode::from(kSecClassGenericPassword) {\n\n Class::GenericPassword\n\n } else if tag == FourCharacterCode::from(kSecClassInternetPassword) {\n\n Class::InternetPassword\n\n } else if tag == FourCharacterCode::from(kSecClassCertificate) {\n\n Class::Certificate\n\n } else if tag == FourCharacterCode::from(kSecClassKey) {\n\n Class::Key\n\n } else if tag == FourCharacterCode::from(kSecClassIdentity) {\n\n Class::Identity\n\n } else {\n\n return None;\n\n }\n\n };\n", "file_path": "src/keychain/item/class.rs", "rank": 51, "score": 53089.199211039566 }, { "content": " self.0.add_boolean(AttrKind::Sensitive, value);\n\n self\n\n }\n\n\n\n /// Query for keys stored in an external token i.e. the\n\n /// Secure Enclave Processor (SEP).\n\n ///\n\n /// Wrapper for the `kSecAttrTokenID` attribute key. See:\n\n /// <https://developer.apple.com/documentation/security/ksecattrtokenid>\n\n pub fn token_id(mut self, value: AttrTokenId) -> Self {\n\n self.0.add_attr(&value);\n\n self\n\n }\n\n\n\n /// Prompt the user with the given custom message when using keys returned\n\n /// from this query.\n\n ///\n\n /// Wrapper for the `kSecUseOperationPrompt`. See:\n\n /// <https://developer.apple.com/documentation/security/ksecuseoperationprompt>\n\n pub fn use_operation_prompt(mut self, value: &str) -> Self {\n", "file_path": "src/keychain/item/query.rs", "rank": 52, "score": 53088.60718084164 }, { "content": " ///\n\n /// Wrapper for the `kSecAttrKeyType` attribute key. See:\n\n /// <https://developer.apple.com/documentation/security/ksecattrkeytype>\n\n pub fn key_type(mut self, key_type: AttrKeyType) -> Self {\n\n self.0.add_attr(&key_type);\n\n self\n\n }\n\n\n\n /// Query for a particular (human-meaningful) label on keys\n\n ///\n\n /// Wrapper for the `kSecAttrLabel` attribute key. See:\n\n /// <https://developer.apple.com/documentation/security/ksecattrlabel>\n\n pub fn label<L: Into<AttrLabel>>(mut self, label: L) -> Self {\n\n self.0.add_attr(&label.into());\n\n self\n\n }\n\n\n\n /// Query for keys which are or not permanent members of the default keychain.\n\n ///\n\n /// Wrapper for the `kSecAttrIsPermanent` attribute key. See:\n", "file_path": "src/keychain/item/query.rs", "rank": 53, "score": 53088.43457961454 }, { "content": "\n\n /// Match the specified number of items.\n\n ///\n\n /// Equivalent to passing a `CFNumberRef` as the value for\n\n /// `kSecMatchLimit`. See:\n\n /// <https://developer.apple.com/documentation/security/ksecmatchlimit>\n\n Number(usize),\n\n\n\n /// Match an unlimited number of items.\n\n ///\n\n /// Wrapper for the `kSecMatchLimitAll` attribute value. See:\n\n /// <https://developer.apple.com/documentation/security/ksecmatchlimitall>\n\n All,\n\n}\n\n\n\nimpl MatchLimit {\n\n /// Get `CFType` containing the `kSecMatchLimit` dictionary value for\n\n /// this particular `SecMatchLimit`.\n\n pub fn as_CFType(self) -> CFType {\n\n match self {\n", "file_path": "src/keychain/item/query.rs", "rank": 54, "score": 53086.945728756065 }, { "content": " /// Wrapper for the `kSecAttrApplicationTag` attribute key. See:\n\n /// <https://developer.apple.com/documentation/security/ksecattrapplicationtag>\n\n pub fn application_tag<T>(mut self, tag: T) -> Self\n\n where\n\n T: Into<AttrApplicationTag>,\n\n {\n\n self.0.add_attr(&tag.into());\n\n self\n\n }\n\n\n\n /// Query for keys with the given `SecAttrKeyClass`.\n\n ///\n\n /// Wrapper for the `kSecAttrKeyClass` attribute key. See:\n\n /// <https://developer.apple.com/documentation/security/ksecattrkeyclass>\n\n pub fn key_class(mut self, key_class: AttrKeyClass) -> Self {\n\n self.0.add_attr(&key_class);\n\n self\n\n }\n\n\n\n /// Query for keys with the given `SecAttrKeyType`.\n", "file_path": "src/keychain/item/query.rs", "rank": 55, "score": 53086.61924134268 }, { "content": " /// <https://developer.apple.com/documentation/security/ksecattrispermanent>\n\n pub fn permanent(mut self, value: bool) -> Self {\n\n self.0.add_boolean(AttrKind::Permanent, value);\n\n self\n\n }\n\n\n\n /// Query for keys which are or are not synchronizable.\n\n ///\n\n /// Wrapper for the `kSecAttrSynchronizable` attribute key. See:\n\n /// <https://developer.apple.com/documentation/security/ksecattrsynchronizable>\n\n pub fn synchronizable(mut self, value: bool) -> Self {\n\n self.0.add_boolean(AttrKind::Synchronizable, value);\n\n self\n\n }\n\n\n\n /// Query for keys which are or are not sensitive.\n\n ///\n\n /// Wrapper for the `kSecAttrIsSensitive` attribute key. See:\n\n /// <https://developer.apple.com/documentation/security/ksecattrissensitive>\n\n pub fn sensitive(mut self, value: bool) -> Self {\n", "file_path": "src/keychain/item/query.rs", "rank": 56, "score": 53086.38555325952 }, { "content": " self.0.add_string(unsafe { kSecUseOperationPrompt }, value);\n\n self\n\n }\n\n}\n\n\n\nimpl From<Query> for DictionaryBuilder {\n\n fn from(params: Query) -> DictionaryBuilder {\n\n params.0\n\n }\n\n}\n", "file_path": "src/keychain/item/query.rs", "rank": 57, "score": 53082.472902431255 }, { "content": " InternetPassword,\n\n\n\n /// Certificates.\n\n ///\n\n /// Wrapper for the `kSecClassCertificate` attribute value. See:\n\n /// <https://developer.apple.com/documentation/security/ksecclasscertificate>\n\n Certificate,\n\n\n\n /// Cryptographic keys.\n\n ///\n\n /// Wrapper for the `kSecClassKey` attribute value. See:\n\n /// <https://developer.apple.com/documentation/security/ksecclasskey>\n\n Key,\n\n\n\n /// Identities.\n\n ///\n\n /// Wrapper for the `kSecClassIdentity` attribute value. See:\n\n /// <https://developer.apple.com/documentation/security/ksecclassidentity>\n\n Identity,\n\n}\n", "file_path": "src/keychain/item/class.rs", "rank": 58, "score": 53078.5792095116 }, { "content": " /// Wrapper for the `SecKeyCreateRandomKey` function see:\n\n /// <https://developer.apple.com/documentation/security/1823694-seckeycreaterandomkey>\n\n pub fn create(params: KeyPairGenerateParams) -> Result<KeyPair, Error> {\n\n let mut error: CFErrorRef = ptr::null_mut();\n\n let private_key_ref: KeyRef = unsafe {\n\n SecKeyCreateRandomKey(Dictionary::from(params).as_concrete_TypeRef(), &mut error)\n\n };\n\n if private_key_ref.is_null() {\n\n Err(error.into())\n\n } else {\n\n let public_key_ref = unsafe { SecKeyCopyPublicKey(private_key_ref) };\n\n assert!(!public_key_ref.is_null());\n\n assert!(!private_key_ref.is_null());\n\n\n\n Ok(unsafe {\n\n KeyPair {\n\n public_key: Key::wrap_under_create_rule(public_key_ref),\n\n private_key: Key::wrap_under_create_rule(private_key_ref),\n\n }\n\n })\n", "file_path": "src/keychain/key/pair.rs", "rank": 59, "score": 52030.87999768306 }, { "content": " }\n\n }\n\n\n\n /// Generate a public/private `KeyPair` using the given\n\n /// `GeneratePairParams`.\n\n ///\n\n /// Wrapper for the `SecKeyGeneratePair` function. See:\n\n /// <https://developer.apple.com/documentation/security/1395339-seckeygeneratepair>\n\n pub fn generate(params: KeyPairGenerateParams) -> Result<KeyPair, Error> {\n\n let mut public_key_ref: KeyRef = ptr::null_mut();\n\n let mut private_key_ref: KeyRef = ptr::null_mut();\n\n\n\n let status = unsafe {\n\n SecKeyGeneratePair(\n\n Dictionary::from(params).as_concrete_TypeRef(),\n\n &mut public_key_ref,\n\n &mut private_key_ref,\n\n )\n\n };\n\n\n", "file_path": "src/keychain/key/pair.rs", "rank": 60, "score": 52029.32230723814 }, { "content": "use super::*;\n\nuse crate::{access::AccessControl, dictionary::*, error::Error};\n\nuse core_foundation::base::TCFType;\n\nuse std::ptr;\n\n\n\n/// Public key pairs (i.e. public and private key) stored in the keychain.\n\n#[derive(Debug)]\n\npub struct KeyPair {\n\n /// Public key\n\n pub public_key: Key,\n\n\n\n /// Private key\n\n pub private_key: Key,\n\n}\n\n\n\nimpl KeyPair {\n\n /// An asymmetric cryptographic key pair is composed of a public and a private key that are generated together.\n\n /// The public key can be distributed freely, but keep the private key secret.\n\n /// One or both may be stored in a keychain for safekeeping.\n\n ///\n", "file_path": "src/keychain/key/pair.rs", "rank": 61, "score": 52029.18012949342 }, { "content": " /// - Public key: `04 || X || Y`\n\n /// - Private key: Concatenation of public key with big endian encoding\n\n /// of the secret scalar, i.e. `04 || X || Y || K`\n\n ///\n\n /// All representations use fixed-size integers with leading zeroes.\n\n pub key_data: Vec<u8>,\n\n /// The type of key algorithm\n\n pub key_type: AttrKeyType,\n\n}\n\n\n\nimpl RestoreKeyParams {\n\n /// Return the attributes that will be used to restore the key\n\n pub fn attributes(&self) -> Dictionary {\n\n let mut result = DictionaryBuilder::new();\n\n result.add_attr(&self.key_type);\n\n result.add(AttrKind::KeyClass, &self.key_class.as_CFString());\n\n result.add_number(AttrKind::KeySizeInBits, (self.key_data.len() * 8) as i64);\n\n result.into()\n\n }\n\n\n\n /// Return the `key_data` as a slice\n\n pub fn as_bytes(&self) -> &[u8] {\n\n self.key_data.as_slice()\n\n }\n\n}\n", "file_path": "src/keychain/key/pair.rs", "rank": 62, "score": 52028.21480476488 }, { "content": "\n\n /// Store this key in an external token i.e. Secure Enclave Processor (SEP).\n\n ///\n\n /// Wrapper for the `kSecAttrTokenID` attribute key. See:\n\n /// <https://developer.apple.com/documentation/security/ksecattrtokenid>\n\n pub fn token_id(mut self, value: AttrTokenId) -> Self {\n\n self.attrs.add_attr(&value);\n\n self\n\n }\n\n}\n\n\n\nimpl From<KeyPairGenerateParams> for Dictionary {\n\n fn from(params: KeyPairGenerateParams) -> Dictionary {\n\n let mut result = DictionaryBuilder::new();\n\n result.add_attr(&params.key_type);\n\n result.add_number(AttrKind::KeySizeInBits, params.key_size as i64);\n\n result.add(\n\n unsafe { kSecPrivateKeyAttrs },\n\n &Dictionary::from(params.attrs),\n\n );\n", "file_path": "src/keychain/key/pair.rs", "rank": 63, "score": 52027.34927234434 }, { "content": " }\n\n\n\n /// Set whether this key is stored permanently in the keychain (default: false).\n\n ///\n\n /// Wrapper for the `kSecAttrIsPermanent` attribute key. See:\n\n /// <https://developer.apple.com/documentation/security/ksecattrispermanent>\n\n pub fn permanent(mut self, value: bool) -> Self {\n\n self.attrs.add_boolean(AttrKind::Permanent, value);\n\n self\n\n }\n\n\n\n /// Set whether this key can be wrapped with NONE algorithm. True\n\n /// means it cannot be wrapped with NONE, false means it can.\n\n ///\n\n /// Wrapper for `kSecKeySensitive` attribute key. See\n\n /// <https://developer.apple.com/documentation/security/kseckeysensitive>\n\n pub fn sensitive(mut self, value: bool) -> Self {\n\n self.attrs.add_boolean(AttrKind::Sensitive, value);\n\n self\n\n }\n", "file_path": "src/keychain/key/pair.rs", "rank": 64, "score": 52027.21466623294 }, { "content": " ///\n\n /// Wrapper for the `kSecAttrAccessControl` attribute key. See:\n\n /// <https://developer.apple.com/documentation/security/ksecattraccesscontrol>\n\n pub fn access_control(mut self, access_control: &AccessControl) -> Self {\n\n self.attrs.add(AttrKind::AccessControl, access_control);\n\n self\n\n }\n\n\n\n /// Set a tag (private, application-specific identifier) on this key.\n\n /// Tags are useful as the \"primary key\" for looking up keychain items.\n\n ///\n\n /// Wrapper for `kSecAttrApplicationTag` attribute key. See:\n\n /// <https://developer.apple.com/documentation/security/ksecattrapplicationtag>\n\n pub fn application_tag<T>(mut self, tag: T) -> Self\n\n where\n\n T: Into<AttrApplicationTag>,\n\n {\n\n self.attrs.add_attr(&tag.into());\n\n self\n\n }\n", "file_path": "src/keychain/key/pair.rs", "rank": 65, "score": 52026.169097368496 }, { "content": " /// <https://developer.apple.com/documentation/security/kseckeyverify>\n\n pub fn can_verify(mut self, value: bool) -> Self {\n\n self.attrs.add_boolean(AttrKind::Verify, value);\n\n self\n\n }\n\n\n\n /// Set whether this key can be used to wrap another key.\n\n ///\n\n /// Wrapper for the `kSecKeyWrap` attribute key. See:\n\n /// <https://developer.apple.com/documentation/security/kseckeywrap>\n\n pub fn can_wrap(mut self, value: bool) -> Self {\n\n self.attrs.add_boolean(AttrKind::Wrap, value);\n\n self\n\n }\n\n\n\n /// Set whether this key can be used to unwrap another key.\n\n ///\n\n /// Wrapper for the `kSecKeyUnwrap` attribute key. See:\n\n /// <https://developer.apple.com/documentation/security/kseckeyunwrap>\n\n pub fn can_unwrap(mut self, value: bool) -> Self {\n", "file_path": "src/keychain/key/pair.rs", "rank": 66, "score": 52023.55399416709 }, { "content": "\n\n /// Set whether this key can be used in a key derivation operation\n\n ///\n\n /// Wrapper for the `kSecKeyDerive` attribute key. See:\n\n /// <https://developer.apple.com/documentation/security/kseckeyderive>\n\n pub fn can_derive(mut self, value: bool) -> Self {\n\n self.attrs.add_boolean(AttrKind::Derive, value);\n\n self\n\n }\n\n\n\n /// Set whether this key can be used in a decrypt operation.\n\n ///\n\n /// Wrapper for the `kSecKeyDecrypt` attribute key. See:\n\n /// <https://developer.apple.com/documentation/security/kseckeydecrypt>\n\n pub fn can_decrypt(mut self, value: bool) -> Self {\n\n self.attrs.add_boolean(AttrKind::Decrypt, value);\n\n self\n\n }\n\n\n\n /// Set whether this key can be used in a encrypt operation.\n", "file_path": "src/keychain/key/pair.rs", "rank": 67, "score": 52023.51399190364 }, { "content": " ///\n\n /// Wrapper for the `kSecKeyEncrypt` attribute key. See:\n\n /// <https://developer.apple.com/documentation/security/kseckeyencrypt>\n\n pub fn can_encrypt(mut self, value: bool) -> Self {\n\n self.attrs.add_boolean(AttrKind::Encrypt, value);\n\n self\n\n }\n\n\n\n /// Set whether this key can be used in a signing operation.\n\n ///\n\n /// Wrapper for the `kSecKeySign` attribute key. See:\n\n /// <https://developer.apple.com/documentation/security/kseckeysign>\n\n pub fn can_sign(mut self, value: bool) -> Self {\n\n self.attrs.add_boolean(AttrKind::Sign, value);\n\n self\n\n }\n\n\n\n /// Set whether this key can be used to verify a signatures.\n\n ///\n\n /// Wrapper for the `kSecKeyVerify` attribute key. See:\n", "file_path": "src/keychain/key/pair.rs", "rank": 68, "score": 52023.06964740719 }, { "content": "\n\n /// Set a string label on this key. SecAttrLabels are useful for providing\n\n /// additional descriptions or context on keys.\n\n ///\n\n /// Wrapper for the `kSecAttrLabel` attribute key. See:\n\n /// <https://developer.apple.com/documentation/security/ksecattrlabel>\n\n pub fn label<L: Into<AttrLabel>>(mut self, label: L) -> Self {\n\n self.attrs.add_attr(&label.into());\n\n self\n\n }\n\n\n\n /// Set whether this key can be synchronized with other devices owned by\n\n /// the same account (default: false).\n\n ///\n\n /// Wrapper for the `kSecAttrSynchronizable` attribute key. See:\n\n /// <https://developer.apple.com/documentation/security/ksecattrsynchronizable>\n\n pub fn synchronizable(mut self, value: bool) -> Self {\n\n self.attrs.add_boolean(AttrKind::Synchronizable, value);\n\n self\n\n }\n", "file_path": "src/keychain/key/pair.rs", "rank": 69, "score": 52022.40594002796 }, { "content": " // Return an error if the status was unsuccessful\n\n if let Some(e) = Error::maybe_from_OSStatus(status) {\n\n return Err(e);\n\n }\n\n\n\n assert!(!public_key_ref.is_null());\n\n assert!(!private_key_ref.is_null());\n\n\n\n Ok(unsafe {\n\n KeyPair {\n\n public_key: Key::wrap_under_create_rule(public_key_ref),\n\n private_key: Key::wrap_under_create_rule(private_key_ref),\n\n }\n\n })\n\n }\n\n}\n\n\n\n/// Builder for key generation parameters (passed to the underlying\n\n/// `SecKeyGeneratePair` function)\n\n///\n", "file_path": "src/keychain/key/pair.rs", "rank": 70, "score": 52022.31747574983 }, { "content": "/// For more information on generating cryptographic keys in a keychain, see:\n\n/// <https://developer.apple.com/documentation/security/certificate_key_and_trust_services/keys/generating_new_cryptographic_keys>\n\n#[derive(Clone, Debug)]\n\npub struct KeyPairGenerateParams {\n\n key_type: AttrKeyType,\n\n key_size: usize,\n\n attrs: DictionaryBuilder,\n\n}\n\n\n\nimpl KeyPairGenerateParams {\n\n /// Create a new `GeneratePairParams`\n\n pub fn new(key_type: AttrKeyType, key_size: usize) -> Self {\n\n Self {\n\n key_type,\n\n key_size,\n\n attrs: <_>::default(),\n\n }\n\n }\n\n\n\n /// Set the access control policy (a.k.a. ACL) for the `Key`.\n", "file_path": "src/keychain/key/pair.rs", "rank": 71, "score": 52021.67914243911 }, { "content": " self.attrs.add_boolean(AttrKind::Unwrap, value);\n\n self\n\n }\n\n\n\n /// Set a key's cryptographic class.\n\n ///\n\n /// Wrapper for the `kSecAttrKeyClass` attribute key. See:\n\n /// <https://developer.apple.com/documentation/security/ksecattrkeyclass>\n\n pub fn key_class(mut self, value: AttrKeyClass) -> Self {\n\n self.attrs.add(AttrKind::KeyClass, &value.as_CFString());\n\n self\n\n }\n\n\n\n /// Set whether this key can be extractable when wrapped\n\n ///\n\n /// Wrapper for the `kSecKeyExtractable` attribute key. See:\n\n /// <https://developer.apple.com/documentation/security/kseckeyextractable>\n\n pub fn extractable(mut self, value: bool) -> Self {\n\n self.attrs.add_boolean(AttrKind::Extractable, value);\n\n self\n", "file_path": "src/keychain/key/pair.rs", "rank": 72, "score": 52020.82599927731 }, { "content": " result.into()\n\n }\n\n}\n\n\n\n/// Builder for restoring a key from an external representation of that key parameters\n\n/// (passed to the underlying `SecKeyCreateWithData` function).\n\n///\n\n/// The key must have already been imported or generated.\n\n///\n\n/// For more information on restoring cryptographic keys in keychain, see\n\n/// <https://developer.apple.com/documentation/security/1643701-seckeycreatewithdata>\n\n#[derive(Clone, Debug)]\n\npub struct RestoreKeyParams {\n\n /// The category the key fits (public, private, or symmetric)\n\n pub key_class: AttrKeyClass,\n\n /// Data representing the key. The format of the data depends on the type of key\n\n /// being created.\n\n ///\n\n /// - RSA: PKCS#1 format\n\n /// - EC: ANSI X9.63 bytestring:\n", "file_path": "src/keychain/key/pair.rs", "rank": 73, "score": 52020.514146092064 }, { "content": "use crate::ffi::*;\n\nuse core_foundation::{base::TCFType, string::CFString};\n\n\n\n/// Cryptographic algorithms for use with keys stored in the keychain.\n\n///\n\n/// Wrapper for `SecKeyAlgorithm`. See:\n\n/// <https://developer.apple.com/documentation/security/seckeyalgorithm>\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\npub enum KeyAlgorithm {\n\n /// Elliptic Curve Encryption Standard X963\n\n ECIESEncryptionStandardX963SHA1AESGCM,\n\n\n\n /// Elliptic Curve Encryption Standard X963\n\n ECIESEncryptionStandardX963SHA224AESGCM,\n\n\n\n /// Elliptic Curve Encryption Standard X963\n\n ECIESEncryptionStandardX963SHA256AESGCM,\n\n\n\n /// Elliptic Curve Encryption Standard X963\n\n ECIESEncryptionStandardX963SHA384AESGCM,\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 74, "score": 52017.3040355584 }, { "content": "\n\nimpl CFIndexConvertible for KeyOperation {\n\n fn to_CFIndex(self) -> CFIndex {\n\n let i = match self {\n\n Decrypt => 3,\n\n Encrypt => 2,\n\n KeyExchange => 4,\n\n Sign => 0,\n\n Verify => 1,\n\n };\n\n i as CFIndex\n\n }\n\n}\n", "file_path": "src/keychain/key/operation.rs", "rank": 75, "score": 52017.17291539859 }, { "content": "use core_foundation::base::{CFIndex, CFIndexConvertible};\n\n\n\nuse self::KeyOperation::*;\n\n/// Types of operations that a cryptographic key can perform\n\n///\n\n/// Wrapper for `SecKeyOperationType`. See:\n\n/// <https://developer.apple.com/documentation/security/seckeyoperationtype>\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\npub enum KeyOperation {\n\n /// Decrypt operation\n\n Decrypt,\n\n /// Encrypt operation\n\n Encrypt,\n\n /// KeyExchange operation\n\n KeyExchange,\n\n /// Sign operation\n\n Sign,\n\n /// Verify operation\n\n Verify,\n\n}\n", "file_path": "src/keychain/key/operation.rs", "rank": 76, "score": 52016.31661298647 }, { "content": " pub fn as_CFString(self) -> CFString {\n\n unsafe {\n\n CFString::wrap_under_get_rule(match self {\n\n KeyAlgorithm::ECIESEncryptionStandardX963SHA1AESGCM => {\n\n kSecKeyAlgorithmECIESEncryptionStandardX963SHA1AESGCM\n\n }\n\n KeyAlgorithm::ECIESEncryptionStandardX963SHA224AESGCM => {\n\n kSecKeyAlgorithmECIESEncryptionStandardX963SHA224AESGCM\n\n }\n\n KeyAlgorithm::ECIESEncryptionStandardX963SHA256AESGCM => {\n\n kSecKeyAlgorithmECIESEncryptionStandardX963SHA256AESGCM\n\n }\n\n KeyAlgorithm::ECIESEncryptionStandardX963SHA384AESGCM => {\n\n kSecKeyAlgorithmECIESEncryptionStandardX963SHA384AESGCM\n\n }\n\n KeyAlgorithm::ECIESEncryptionStandardX963SHA512AESGCM => {\n\n kSecKeyAlgorithmECIESEncryptionStandardX963SHA512AESGCM\n\n }\n\n KeyAlgorithm::ECIESEncryptionStandardVariableIVX963SHA224AESGCM => {\n\n kSecKeyAlgorithmECIESEncryptionStandardVariableIVX963SHA224AESGCM\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 77, "score": 52016.27585194507 }, { "content": "\n\n /// RSA Signature Message PSS\n\n RSASignatureMessagePSSSHA1,\n\n\n\n /// RSA Signature Message PSS\n\n RSASignatureMessagePSSSHA224,\n\n\n\n /// RSA Signature Message PSS\n\n RSASignatureMessagePSSSHA256,\n\n\n\n /// RSA Signature Message PSS\n\n RSASignatureMessagePSSSHA384,\n\n\n\n /// RSA Signature Message PSS\n\n RSASignatureMessagePSSSHA512,\n\n}\n\n\n\nimpl KeyAlgorithm {\n\n /// Get `CFString` containing the `kSecKeyAlgorithm` dictionary value for\n\n /// a particular cryptographic algorithm.\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 78, "score": 52012.35977547006 }, { "content": " KeyAlgorithm::ECDHKeyExchangeCofactorX963SHA256 => {\n\n kSecKeyAlgorithmECDHKeyExchangeCofactorX963SHA256\n\n }\n\n KeyAlgorithm::ECDHKeyExchangeCofactorX963SHA384 => {\n\n kSecKeyAlgorithmECDHKeyExchangeCofactorX963SHA384\n\n }\n\n KeyAlgorithm::ECDHKeyExchangeCofactorX963SHA512 => {\n\n kSecKeyAlgorithmECDHKeyExchangeCofactorX963SHA512\n\n }\n\n KeyAlgorithm::ECDHKeyExchangeStandardX963SHA224 => {\n\n kSecKeyAlgorithmECDHKeyExchangeStandardX963SHA224\n\n }\n\n KeyAlgorithm::ECDHKeyExchangeStandardX963SHA256 => {\n\n kSecKeyAlgorithmECDHKeyExchangeStandardX963SHA256\n\n }\n\n KeyAlgorithm::ECDHKeyExchangeStandardX963SHA384 => {\n\n kSecKeyAlgorithmECDHKeyExchangeStandardX963SHA384\n\n }\n\n KeyAlgorithm::ECDHKeyExchangeStandardX963SHA512 => {\n\n kSecKeyAlgorithmECDHKeyExchangeStandardX963SHA512\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 79, "score": 52011.43109581085 }, { "content": " KeyAlgorithm::ECDSASignatureMessageX962SHA256 => {\n\n kSecKeyAlgorithmECDSASignatureMessageX962SHA256\n\n }\n\n KeyAlgorithm::ECDSASignatureMessageX962SHA384 => {\n\n kSecKeyAlgorithmECDSASignatureMessageX962SHA384\n\n }\n\n KeyAlgorithm::ECDSASignatureMessageX962SHA512 => {\n\n kSecKeyAlgorithmECDSASignatureMessageX962SHA512\n\n }\n\n KeyAlgorithm::ECDHKeyExchangeCofactor => kSecKeyAlgorithmECDHKeyExchangeCofactor,\n\n KeyAlgorithm::ECDHKeyExchangeStandard => kSecKeyAlgorithmECDHKeyExchangeStandard,\n\n KeyAlgorithm::ECDHKeyExchangeCofactorX963SHA1 => {\n\n kSecKeyAlgorithmECDHKeyExchangeCofactorX963SHA1\n\n }\n\n KeyAlgorithm::ECDHKeyExchangeStandardX963SHA1 => {\n\n kSecKeyAlgorithmECDHKeyExchangeStandardX963SHA1\n\n }\n\n KeyAlgorithm::ECDHKeyExchangeCofactorX963SHA224 => {\n\n kSecKeyAlgorithmECDHKeyExchangeCofactorX963SHA224\n\n }\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 80, "score": 52011.399281773425 }, { "content": " ECDHKeyExchangeCofactor,\n\n\n\n /// Elliptic Curve Key Exchange\n\n ECDHKeyExchangeStandard,\n\n\n\n /// Elliptic Curve Key Exchange\n\n ECDHKeyExchangeCofactorX963SHA1,\n\n\n\n /// Elliptic Curve Key Exchange\n\n ECDHKeyExchangeStandardX963SHA1,\n\n\n\n /// Elliptic Curve Key Exchange\n\n ECDHKeyExchangeCofactorX963SHA224,\n\n\n\n /// Elliptic Curve Key Exchange\n\n ECDHKeyExchangeCofactorX963SHA256,\n\n\n\n /// Elliptic Curve Key Exchange\n\n ECDHKeyExchangeCofactorX963SHA384,\n\n\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 81, "score": 52011.34946682663 }, { "content": " }\n\n KeyAlgorithm::RSAEncryptionRaw => kSecKeyAlgorithmRSAEncryptionRaw,\n\n KeyAlgorithm::RSAEncryptionPKCS1 => kSecKeyAlgorithmRSAEncryptionPKCS1,\n\n KeyAlgorithm::RSAEncryptionOAEPSHA1 => kSecKeyAlgorithmRSAEncryptionOAEPSHA1,\n\n KeyAlgorithm::RSAEncryptionOAEPSHA224 => kSecKeyAlgorithmRSAEncryptionOAEPSHA224,\n\n KeyAlgorithm::RSAEncryptionOAEPSHA256 => kSecKeyAlgorithmRSAEncryptionOAEPSHA256,\n\n KeyAlgorithm::RSAEncryptionOAEPSHA384 => kSecKeyAlgorithmRSAEncryptionOAEPSHA384,\n\n KeyAlgorithm::RSAEncryptionOAEPSHA512 => kSecKeyAlgorithmRSAEncryptionOAEPSHA512,\n\n KeyAlgorithm::RSAEncryptionOAEPSHA1AESGCM => {\n\n kSecKeyAlgorithmRSAEncryptionOAEPSHA1AESGCM\n\n }\n\n KeyAlgorithm::RSAEncryptionOAEPSHA224AESGCM => {\n\n kSecKeyAlgorithmRSAEncryptionOAEPSHA224AESGCM\n\n }\n\n KeyAlgorithm::RSAEncryptionOAEPSHA256AESGCM => {\n\n kSecKeyAlgorithmRSAEncryptionOAEPSHA256AESGCM\n\n }\n\n KeyAlgorithm::RSAEncryptionOAEPSHA384AESGCM => {\n\n kSecKeyAlgorithmRSAEncryptionOAEPSHA384AESGCM\n\n }\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 82, "score": 52011.34010619548 }, { "content": " /// Elliptic Curve Key Exchange\n\n ECDHKeyExchangeCofactorX963SHA512,\n\n\n\n /// Elliptic Curve Key Exchange\n\n ECDHKeyExchangeStandardX963SHA224,\n\n\n\n /// Elliptic Curve Key Exchange\n\n ECDHKeyExchangeStandardX963SHA256,\n\n\n\n /// Elliptic Curve Key Exchange\n\n ECDHKeyExchangeStandardX963SHA384,\n\n\n\n /// Elliptic Curve Key Exchange\n\n ECDHKeyExchangeStandardX963SHA512,\n\n\n\n /// RSA Encryption\n\n RSAEncryptionRaw,\n\n\n\n /// RSA Encryption\n\n RSAEncryptionPKCS1,\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 83, "score": 52011.24827469866 }, { "content": " KeyAlgorithm::RSASignatureDigestPSSSHA224 => {\n\n kSecKeyAlgorithmRSASignatureDigestPSSSHA224\n\n }\n\n KeyAlgorithm::RSASignatureDigestPSSSHA256 => {\n\n kSecKeyAlgorithmRSASignatureDigestPSSSHA256\n\n }\n\n KeyAlgorithm::RSASignatureDigestPSSSHA384 => {\n\n kSecKeyAlgorithmRSASignatureDigestPSSSHA384\n\n }\n\n KeyAlgorithm::RSASignatureDigestPSSSHA512 => {\n\n kSecKeyAlgorithmRSASignatureDigestPSSSHA512\n\n }\n\n KeyAlgorithm::RSASignatureMessagePSSSHA1 => {\n\n kSecKeyAlgorithmRSASignatureMessagePSSSHA1\n\n }\n\n KeyAlgorithm::RSASignatureMessagePSSSHA224 => {\n\n kSecKeyAlgorithmRSASignatureMessagePSSSHA224\n\n }\n\n KeyAlgorithm::RSASignatureMessagePSSSHA256 => {\n\n kSecKeyAlgorithmRSASignatureMessagePSSSHA256\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 84, "score": 52011.24990720619 }, { "content": " kSecKeyAlgorithmECIESEncryptionCofactorVariableIVX963SHA512AESGCM\n\n }\n\n KeyAlgorithm::ECIESEncryptionCofactorX963SHA1AESGCM => {\n\n kSecKeyAlgorithmECIESEncryptionCofactorX963SHA1AESGCM\n\n }\n\n KeyAlgorithm::ECIESEncryptionCofactorX963SHA224AESGCM => {\n\n kSecKeyAlgorithmECIESEncryptionCofactorX963SHA224AESGCM\n\n }\n\n KeyAlgorithm::ECIESEncryptionCofactorX963SHA256AESGCM => {\n\n kSecKeyAlgorithmECIESEncryptionCofactorX963SHA256AESGCM\n\n }\n\n KeyAlgorithm::ECIESEncryptionCofactorX963SHA384AESGCM => {\n\n kSecKeyAlgorithmECIESEncryptionCofactorX963SHA384AESGCM\n\n }\n\n KeyAlgorithm::ECIESEncryptionCofactorX963SHA512AESGCM => {\n\n kSecKeyAlgorithmECIESEncryptionCofactorX963SHA512AESGCM\n\n }\n\n KeyAlgorithm::ECDSASignatureRFC4754 => kSecKeyAlgorithmECDSASignatureRFC4754,\n\n KeyAlgorithm::ECDSASignatureDigestX962 => kSecKeyAlgorithmECDSASignatureDigestX962,\n\n KeyAlgorithm::ECDSASignatureDigestX962SHA1 => {\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 85, "score": 52011.2226291677 }, { "content": " }\n\n KeyAlgorithm::ECIESEncryptionStandardVariableIVX963SHA256AESGCM => {\n\n kSecKeyAlgorithmECIESEncryptionStandardVariableIVX963SHA256AESGCM\n\n }\n\n KeyAlgorithm::ECIESEncryptionStandardVariableIVX963SHA384AESGCM => {\n\n kSecKeyAlgorithmECIESEncryptionStandardVariableIVX963SHA384AESGCM\n\n }\n\n KeyAlgorithm::ECIESEncryptionStandardVariableIVX963SHA512AESGCM => {\n\n kSecKeyAlgorithmECIESEncryptionStandardVariableIVX963SHA512AESGCM\n\n }\n\n KeyAlgorithm::ECIESEncryptionCofactorVariableIVX963SHA224AESGCM => {\n\n kSecKeyAlgorithmECIESEncryptionCofactorVariableIVX963SHA224AESGCM\n\n }\n\n KeyAlgorithm::ECIESEncryptionCofactorVariableIVX963SHA256AESGCM => {\n\n kSecKeyAlgorithmECIESEncryptionCofactorVariableIVX963SHA256AESGCM\n\n }\n\n KeyAlgorithm::ECIESEncryptionCofactorVariableIVX963SHA384AESGCM => {\n\n kSecKeyAlgorithmECIESEncryptionCofactorVariableIVX963SHA384AESGCM\n\n }\n\n KeyAlgorithm::ECIESEncryptionCofactorVariableIVX963SHA512AESGCM => {\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 86, "score": 52011.18744016878 }, { "content": " KeyAlgorithm::RSAEncryptionOAEPSHA512AESGCM => {\n\n kSecKeyAlgorithmRSAEncryptionOAEPSHA512AESGCM\n\n }\n\n KeyAlgorithm::RSASignatureRaw => kSecKeyAlgorithmRSASignatureRaw,\n\n KeyAlgorithm::RSASignatureDigestPKCS1v15Raw => {\n\n kSecKeyAlgorithmRSASignatureDigestPKCS1v15Raw\n\n }\n\n KeyAlgorithm::RSASignatureDigestPKCS1v15SHA1 => {\n\n kSecKeyAlgorithmRSASignatureDigestPKCS1v15SHA1\n\n }\n\n KeyAlgorithm::RSASignatureDigestPKCS1v15SHA224 => {\n\n kSecKeyAlgorithmRSASignatureDigestPKCS1v15SHA224\n\n }\n\n KeyAlgorithm::RSASignatureDigestPKCS1v15SHA256 => {\n\n kSecKeyAlgorithmRSASignatureDigestPKCS1v15SHA256\n\n }\n\n KeyAlgorithm::RSASignatureDigestPKCS1v15SHA384 => {\n\n kSecKeyAlgorithmRSASignatureDigestPKCS1v15SHA384\n\n }\n\n KeyAlgorithm::RSASignatureDigestPKCS1v15SHA512 => {\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 87, "score": 52011.186524961515 }, { "content": " kSecKeyAlgorithmECDSASignatureDigestX962SHA1\n\n }\n\n KeyAlgorithm::ECDSASignatureDigestX962SHA224 => {\n\n kSecKeyAlgorithmECDSASignatureDigestX962SHA224\n\n }\n\n KeyAlgorithm::ECDSASignatureDigestX962SHA256 => {\n\n kSecKeyAlgorithmECDSASignatureDigestX962SHA256\n\n }\n\n KeyAlgorithm::ECDSASignatureDigestX962SHA384 => {\n\n kSecKeyAlgorithmECDSASignatureDigestX962SHA384\n\n }\n\n KeyAlgorithm::ECDSASignatureDigestX962SHA512 => {\n\n kSecKeyAlgorithmECDSASignatureDigestX962SHA512\n\n }\n\n KeyAlgorithm::ECDSASignatureMessageX962SHA1 => {\n\n kSecKeyAlgorithmECDSASignatureMessageX962SHA1\n\n }\n\n KeyAlgorithm::ECDSASignatureMessageX962SHA224 => {\n\n kSecKeyAlgorithmECDSASignatureMessageX962SHA224\n\n }\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 88, "score": 52011.18299271432 }, { "content": " kSecKeyAlgorithmRSASignatureDigestPKCS1v15SHA512\n\n }\n\n KeyAlgorithm::RSASignatureMessagePKCS1v15SHA1 => {\n\n kSecKeyAlgorithmRSASignatureMessagePKCS1v15SHA1\n\n }\n\n KeyAlgorithm::RSASignatureMessagePKCS1v15SHA224 => {\n\n kSecKeyAlgorithmRSASignatureMessagePKCS1v15SHA224\n\n }\n\n KeyAlgorithm::RSASignatureMessagePKCS1v15SHA256 => {\n\n kSecKeyAlgorithmRSASignatureMessagePKCS1v15SHA256\n\n }\n\n KeyAlgorithm::RSASignatureMessagePKCS1v15SHA384 => {\n\n kSecKeyAlgorithmRSASignatureMessagePKCS1v15SHA384\n\n }\n\n KeyAlgorithm::RSASignatureMessagePKCS1v15SHA512 => {\n\n kSecKeyAlgorithmRSASignatureMessagePKCS1v15SHA512\n\n }\n\n KeyAlgorithm::RSASignatureDigestPSSSHA1 => {\n\n kSecKeyAlgorithmRSASignatureDigestPSSSHA1\n\n }\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 89, "score": 52011.143398207554 }, { "content": " }\n\n KeyAlgorithm::RSASignatureMessagePSSSHA384 => {\n\n kSecKeyAlgorithmRSASignatureMessagePSSSHA384\n\n }\n\n KeyAlgorithm::RSASignatureMessagePSSSHA512 => {\n\n kSecKeyAlgorithmRSASignatureMessagePSSSHA512\n\n }\n\n })\n\n }\n\n }\n\n}\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 90, "score": 52011.06216101885 }, { "content": "\n\n /// Elliptic Curve Signature Digest X962\n\n ECDSASignatureDigestX962SHA512,\n\n\n\n /// Elliptic Curve Signature Message X962\n\n ECDSASignatureMessageX962SHA1,\n\n\n\n /// Elliptic Curve Signature Digest X962\n\n ECDSASignatureMessageX962SHA224,\n\n\n\n /// Elliptic Curve Signature Digest X962\n\n ECDSASignatureMessageX962SHA256,\n\n\n\n /// Elliptic Curve Signature Digest X962\n\n ECDSASignatureMessageX962SHA384,\n\n\n\n /// Elliptic Curve Signature Digest X962\n\n ECDSASignatureMessageX962SHA512,\n\n\n\n /// Elliptic Curve Key Exchange\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 91, "score": 52009.104990176435 }, { "content": " ECIESEncryptionCofactorVariableIVX963SHA256AESGCM,\n\n\n\n /// Elliptic Curve Encryption Cofactor Variable IVX963\n\n ECIESEncryptionCofactorVariableIVX963SHA384AESGCM,\n\n\n\n /// Elliptic Curve Encryption Cofactor Variable IVX963\n\n ECIESEncryptionCofactorVariableIVX963SHA512AESGCM,\n\n\n\n /// Elliptic Curve Encryption Cofactor X963\n\n ECIESEncryptionCofactorX963SHA1AESGCM,\n\n\n\n /// Elliptic Curve Encryption Cofactor X963\n\n ECIESEncryptionCofactorX963SHA224AESGCM,\n\n\n\n /// Elliptic Curve Encryption Cofactor X963\n\n ECIESEncryptionCofactorX963SHA256AESGCM,\n\n\n\n /// Elliptic Curve Encryption Cofactor X963\n\n ECIESEncryptionCofactorX963SHA384AESGCM,\n\n\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 92, "score": 52007.112222602664 }, { "content": "\n\n /// Elliptic Curve Encryption Standard X963\n\n ECIESEncryptionStandardX963SHA512AESGCM,\n\n\n\n /// Elliptic Curve Encryption Standard Variable IVX963\n\n ECIESEncryptionStandardVariableIVX963SHA224AESGCM,\n\n\n\n /// Elliptic Curve Encryption Standard Variable IVX963\n\n ECIESEncryptionStandardVariableIVX963SHA256AESGCM,\n\n\n\n /// Elliptic Curve Encryption Standard Variable IVX963\n\n ECIESEncryptionStandardVariableIVX963SHA384AESGCM,\n\n\n\n /// Elliptic Curve Encryption Standard Variable IVX963\n\n ECIESEncryptionStandardVariableIVX963SHA512AESGCM,\n\n\n\n /// Elliptic Curve Encryption Cofactor Variable IVX963\n\n ECIESEncryptionCofactorVariableIVX963SHA224AESGCM,\n\n\n\n /// Elliptic Curve Encryption Cofactor Variable IVX963\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 93, "score": 52007.112222602664 }, { "content": "\n\n /// RSA Encryption OAEP AES-GCM\n\n RSAEncryptionOAEPSHA256AESGCM,\n\n\n\n /// RSA Encryption OAEP AES-GCM\n\n RSAEncryptionOAEPSHA384AESGCM,\n\n\n\n /// RSA Encryption OAEP AES-GCM\n\n RSAEncryptionOAEPSHA512AESGCM,\n\n\n\n /// RSA Signature Raw\n\n RSASignatureRaw,\n\n\n\n /// RSA Signature Digest PKCS1v15\n\n RSASignatureDigestPKCS1v15Raw,\n\n\n\n /// RSA Signature Digest PKCS1v15\n\n RSASignatureDigestPKCS1v15SHA1,\n\n\n\n /// RSA Signature Digest PKCS1v15\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 94, "score": 52007.112222602664 }, { "content": " RSASignatureDigestPKCS1v15SHA224,\n\n\n\n /// RSA Signature Digest PKCS1v15\n\n RSASignatureDigestPKCS1v15SHA256,\n\n\n\n /// RSA Signature Digest PKCS1v15\n\n RSASignatureDigestPKCS1v15SHA384,\n\n\n\n /// RSA Signature Digest PKCS1v15\n\n RSASignatureDigestPKCS1v15SHA512,\n\n\n\n /// RSA Signature Message PKCS1v15\n\n RSASignatureMessagePKCS1v15SHA1,\n\n\n\n /// RSA Signature Digest PKCS1v15\n\n RSASignatureMessagePKCS1v15SHA224,\n\n\n\n /// RSA Signature Digest PKCS1v15\n\n RSASignatureMessagePKCS1v15SHA256,\n\n\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 95, "score": 52007.112222602664 }, { "content": " /// Elliptic Curve Encryption Cofactor X963\n\n ECIESEncryptionCofactorX963SHA512AESGCM,\n\n\n\n /// Elliptic Curve Signature RFC4754\n\n ECDSASignatureRFC4754,\n\n\n\n /// Elliptic Curve Signature Digest X962\n\n ECDSASignatureDigestX962,\n\n\n\n /// Elliptic Curve Signature Digest X962\n\n ECDSASignatureDigestX962SHA1,\n\n\n\n /// Elliptic Curve Signature Digest X962\n\n ECDSASignatureDigestX962SHA224,\n\n\n\n /// Elliptic Curve Signature Digest X962\n\n ECDSASignatureDigestX962SHA256,\n\n\n\n /// Elliptic Curve Signature Digest X962\n\n ECDSASignatureDigestX962SHA384,\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 96, "score": 52007.112222602664 }, { "content": " /// RSA Encryption OAEP\n\n RSAEncryptionOAEPSHA1,\n\n\n\n /// RSA Encryption OAEP\n\n RSAEncryptionOAEPSHA224,\n\n\n\n /// RSA Encryption OAEP\n\n RSAEncryptionOAEPSHA256,\n\n\n\n /// RSA Encryption OAEP\n\n RSAEncryptionOAEPSHA384,\n\n\n\n /// RSA Encryption OAEP\n\n RSAEncryptionOAEPSHA512,\n\n\n\n /// RSA Encryption OAEP AES-GCM\n\n RSAEncryptionOAEPSHA1AESGCM,\n\n\n\n /// RSA Encryption OAEP AES-GCM\n\n RSAEncryptionOAEPSHA224AESGCM,\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 97, "score": 52007.112222602664 }, { "content": " /// RSA Signature Digest PKCS1v15\n\n RSASignatureMessagePKCS1v15SHA384,\n\n\n\n /// RSA Signature Digest PKCS1v15\n\n RSASignatureMessagePKCS1v15SHA512,\n\n\n\n /// RSA Signature Digest PSS\n\n RSASignatureDigestPSSSHA1,\n\n\n\n /// RSA Signature Digest PSS\n\n RSASignatureDigestPSSSHA224,\n\n\n\n /// RSA Signature Digest PSS\n\n RSASignatureDigestPSSSHA256,\n\n\n\n /// RSA Signature Digest PSS\n\n RSASignatureDigestPSSSHA384,\n\n\n\n /// RSA Signature Digest PSS\n\n RSASignatureDigestPSSSHA512,\n", "file_path": "src/keychain/key/algorithm.rs", "rank": 98, "score": 52007.112222602664 }, { "content": "/// Generate a `key::Pair` for testing purposes\n\nfn generate_keypair(tag: &str, label: &str) -> KeyPair {\n\n let acl =\n\n AccessControl::create_with_flags(AttrAccessible::WhenUnlocked, Default::default()).unwrap();\n\n\n\n let generate_params = KeyPairGenerateParams::new(AttrKeyType::EcSecPrimeRandom, 256)\n\n .access_control(&acl)\n\n .application_tag(tag)\n\n .label(label)\n\n .permanent(true);\n\n\n\n KeyPair::generate(generate_params).unwrap()\n\n}\n\n\n\n/// Queries for secret keys\n", "file_path": "tests/interactive.rs", "rank": 99, "score": 45074.44074312066 } ]
Rust
kailua_types/src/ty/tag.rs
nxgtri/kailua
17eb0750ff8bbe4237d75f31f9b8aa23170c3c28
use std::fmt; use kailua_env::Spanned; use kailua_diag::{Result, Reporter}; use kailua_syntax::ast::{Attr, AttrValue}; use super::{Display, DisplayState, TypeResolver, ClassSystemId}; use message as m; #[derive(Copy, Clone, PartialEq, Eq)] pub enum Tag { #[doc(hidden)] _Subtype, #[doc(hidden)] _NoSubtype, #[doc(hidden)] _NoSubtype2, Require, Type, Assert, AssertNot, AssertType, GenericPairs, GlobalEnv, GlobalEval, BecomeModule, PackagePath, PackageCpath, StringMeta, MakeClass(ClassSystemId), KailuaGenTvar, KailuaAssertTvar, } impl Tag { pub fn from(attr: &Attr, resolv: &mut TypeResolver) -> Result<Option<Tag>> { let no_values = |resolv: &mut TypeResolver, tag| { if let Some(ref values) = attr.values { resolv.error(values, m::AttrCannotHaveAnyValues { name: &attr.name }).done()?; } Ok(Some(tag)) }; let values = |resolv: &mut TypeResolver, count| { if let Some(ref values) = attr.values { if values.len() != count { resolv.error(values, m::AttrRequiresFixedNumOfValues { name: &attr.name, count: count }) .done()?; } Ok(&values[..]) } else { resolv.error(&attr.name, m::AttrRequiresFixedNumOfValues { name: &attr.name, count: count }) .done()?; const EMPTY: &'static [Spanned<AttrValue>] = &[]; Ok(EMPTY) } }; match &attr.name.base[..] { b"internal subtype" => no_values(resolv, Tag::_Subtype), b"internal no_subtype" => no_values(resolv, Tag::_NoSubtype), b"internal no_subtype2" => no_values(resolv, Tag::_NoSubtype2), b"require" => no_values(resolv, Tag::Require), b"type" => no_values(resolv, Tag::Type), b"assert" => no_values(resolv, Tag::Assert), b"assert_not" => no_values(resolv, Tag::AssertNot), b"assert_type" => no_values(resolv, Tag::AssertType), b"generic_pairs" => no_values(resolv, Tag::GenericPairs), b"genv" => no_values(resolv, Tag::GlobalEnv), b"geval" => no_values(resolv, Tag::GlobalEval), b"become_module" => no_values(resolv, Tag::BecomeModule), b"package_path" => no_values(resolv, Tag::PackagePath), b"package_cpath" => no_values(resolv, Tag::PackageCpath), b"string_meta" => no_values(resolv, Tag::StringMeta), b"make_class" => { let values = values(resolv, 1)?; if let Some(&AttrValue::Name(ref system)) = values.get(0).map(|v| &v.base) { if let Some(system) = resolv.class_system_from_name(system)? { return Ok(Some(Tag::MakeClass(system))); } } Ok(None) }, b"internal kailua_gen_tvar" => no_values(resolv, Tag::KailuaGenTvar), b"internal kailua_assert_tvar" => no_values(resolv, Tag::KailuaAssertTvar), _ => { resolv.warn(&attr.name, m::UnknownAttrName { name: &attr.name.base }).done()?; Ok(None) } } } pub fn name(&self) -> &'static str { match *self { Tag::Require => "require", Tag::Type => "type", Tag::Assert => "assert", Tag::AssertNot => "assert_not", Tag::AssertType => "assert_type", Tag::GenericPairs => "generic_pairs", Tag::GlobalEnv => "genv", Tag::GlobalEval => "geval", Tag::BecomeModule => "become_module", Tag::PackagePath => "package_path", Tag::PackageCpath => "package_cpath", Tag::StringMeta => "string_meta", Tag::MakeClass(_) => "make_class", Tag::_Subtype => "internal subtype", Tag::_NoSubtype => "internal no_subtype", Tag::_NoSubtype2 => "internal no_subtype2", Tag::KailuaGenTvar => "internal kailua_gen_tvar", Tag::KailuaAssertTvar => "internal kailua_assert_tvar", } } pub fn scope_local(&self) -> bool { match *self { Tag::Type | Tag::Assert | Tag::AssertNot | Tag::AssertType | Tag::GenericPairs | Tag::MakeClass(_) | Tag::KailuaGenTvar | Tag::KailuaAssertTvar => true, _ => false, } } pub fn needs_subtype(&self) -> bool { match *self { Tag::_Subtype => true, Tag::_NoSubtype => false, Tag::_NoSubtype2 => false, Tag::PackagePath | Tag::PackageCpath => false, _ => true, } } } impl fmt::Debug for Tag { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.name())?; match *self { Tag::MakeClass(csid) => { write!(f, "({:?})", csid)?; } _ => {} } Ok(()) } } impl Display for Tag { fn fmt_displayed(&self, f: &mut fmt::Formatter, st: &DisplayState) -> fmt::Result { write!(f, "{}", self.name())?; match *self { Tag::MakeClass(csid) => { write!(f, "(")?; st.context.fmt_class_system_name(csid, f, st)?; write!(f, ")")?; } _ => {} } Ok(()) } }
use std::fmt; use kailua_env::Spanned; use kailua_diag::{Result, Reporter}; use kailua_syntax::ast::{Attr, AttrValue}; use super::{Display, DisplayState, TypeResolver, ClassSystemId}; use message as m; #[derive(Copy, Clone, PartialEq, Eq)] pub enum Tag { #[doc(hidden)] _Subtype, #[doc(hidden)] _NoSubtype, #[doc(hidden)] _NoSubtype2, Require, Type, Assert, AssertNot, AssertType, GenericPairs, GlobalEnv, GlobalEval, BecomeModule, PackagePath, PackageCpath, StringMeta, MakeClass(ClassSystemId), KailuaGenTvar, KailuaAssertTvar, } impl Tag { pub fn from(attr: &Attr, resolv: &mut TypeResolver) -> Result<Option<Tag>> { let no_values = |resolv: &mut TypeResolver, tag| { if let Some(ref values) = attr.values { resolv.error(values, m::AttrCannotHaveAnyValues { name: &attr.name }).done()?; } Ok(Some(tag)) }; let values = |resolv: &mut TypeResolver, count| { if let Some(ref values) = attr.values { if values.len() != count { resolv.error(values, m::AttrRequiresFixedNumOfValues { name: &attr.name, count: count }) .done()?; } Ok(&values[..]) } else { resolv.error(&attr.name, m::AttrRequiresFixedNumOfValues { name: &attr.name, count: count }) .done()?; const EMPTY: &'static [Spanned<AttrValue>] = &[]; Ok(EMPTY) } }; match &attr.name.base[..] { b"internal subtype" => no_values(resolv, Tag::_Subtype), b"internal no_subtype" => no_values(resolv, Tag::_NoSubtype), b"internal no_subtype2" => no_values(resolv, Tag::_NoSubtype2), b"require" => no_values(resolv, Tag::Require), b"type" => no_values(resolv, Tag::Type), b"assert" => no_values(resolv, Tag::Assert), b"assert_not" => no_values(resolv, Tag::AssertNot), b"assert_type" => no_values(resolv, Tag::AssertType), b"generic_pairs" => no_values(resolv, Tag::GenericPairs), b"genv" => no_values(resolv, Tag::GlobalEnv), b"geval" => no_values(resolv, Tag::GlobalEval), b"become_module" => no_values(resolv, Tag::BecomeModule), b"package_path" => no_values(resolv, Tag::PackagePath), b"package_cpath" => no_values(resolv, Tag::PackageCpath), b"string_meta" => no_values(resolv, Tag::StringMeta), b"make_class" => { let values = values(resolv, 1)?; if let Some(&AttrValue::Name(ref system)) = values.get(0).map(|v| &v.base) { if let Some(system) = resolv.class_system_from_name(system)? { return Ok(Some(Tag::MakeClass(system))); } } Ok(None) }, b"internal kailua_gen_tvar" => no_values(resolv, Tag::KailuaGenTvar), b"internal kailua_assert_tvar" => no_values(resolv, Tag::KailuaAssertTvar), _ => { resolv.warn(&attr.name, m::UnknownAttrName { name: &attr.name.base }).done()?; Ok(None) } } } pub fn name(&self) -> &'static str { match *self { Tag::Require => "require", Tag::Type => "type", Tag::Assert => "assert", Tag::AssertNot => "assert_not", Tag::AssertType => "assert_type", Tag::GenericPairs => "generic_pairs", Tag::GlobalEnv => "genv", Tag::GlobalEval => "geval", Tag::BecomeModule => "become_module", Tag::PackagePath => "package_path", Tag::PackageCpath => "package_cpath", Tag::StringMeta => "string_meta", Tag::MakeClass(_) => "make_class", Tag::_Subtype => "internal subtype", Tag::_NoSubtype => "internal no_subtype", Tag::_NoSubtype2 => "internal no_subtype2", Tag::KailuaGenTvar => "internal kailua_gen_tvar", Tag::KailuaAssertTvar => "internal kailua_assert_tvar", } }
pub fn needs_subtype(&self) -> bool { match *self { Tag::_Subtype => true, Tag::_NoSubtype => false, Tag::_NoSubtype2 => false, Tag::PackagePath | Tag::PackageCpath => false, _ => true, } } } impl fmt::Debug for Tag { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.name())?; match *self { Tag::MakeClass(csid) => { write!(f, "({:?})", csid)?; } _ => {} } Ok(()) } } impl Display for Tag { fn fmt_displayed(&self, f: &mut fmt::Formatter, st: &DisplayState) -> fmt::Result { write!(f, "{}", self.name())?; match *self { Tag::MakeClass(csid) => { write!(f, "(")?; st.context.fmt_class_system_name(csid, f, st)?; write!(f, ")")?; } _ => {} } Ok(()) } }
pub fn scope_local(&self) -> bool { match *self { Tag::Type | Tag::Assert | Tag::AssertNot | Tag::AssertType | Tag::GenericPairs | Tag::MakeClass(_) | Tag::KailuaGenTvar | Tag::KailuaAssertTvar => true, _ => false, } }
function_block-full_function
[ { "content": "pub fn get_defs(name: &str) -> Option<&'static [Def]> {\n\n match name {\n\n \"lua51\" => Some(LUA51_DEFS),\n\n \"lua51_base\" => Some(LUA51_BASE_DEFS),\n\n \"lua51_package\" => Some(LUA51_PACKAGE_DEFS),\n\n \"lua51_string\" => Some(LUA51_STRING_DEFS),\n\n \"lua51_table\" => Some(LUA51_TABLE_DEFS),\n\n \"lua51_math\" => Some(LUA51_MATH_DEFS),\n\n \"lua51_io\" => Some(LUA51_IO_DEFS),\n\n \"lua51_os\" => Some(LUA51_OS_DEFS),\n\n \"lua51_debug\" => Some(LUA51_DEBUG_DEFS),\n\n\n\n // only internally used\n\n \"internal kailua_test\" => Some(KAILUA_TEST_DEFS),\n\n\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "kailua_check/src/defs.rs", "rank": 0, "score": 332839.8920828331 }, { "content": "/// Returns the predefined class system object from the name.\n\n///\n\n/// This is currently the only way to define class systems from the Kailua source code.\n\npub fn make_predefined_class_system(name: &str) -> Option<Box<ClassSystem>> {\n\n match name {\n\n \"gideros\" => Some(Box::new(gideros::GiderosClassSystem::new())),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "kailua_check/src/class_system/mod.rs", "rank": 1, "score": 305252.5240146907 }, { "content": "fn tag_is_eq(lhs: Option<Tag>, rhs: Option<Tag>) -> bool {\n\n match (lhs, rhs) {\n\n // some tag requires the subtyping, so if any operand has such tag\n\n // and the tag doesn't match bail out\n\n (Some(ltag), Some(rtag)) =>\n\n ltag == rtag || !(ltag.needs_subtype() || rtag.needs_subtype()),\n\n\n\n (Some(ltag), None) => !ltag.needs_subtype(),\n\n (None, Some(rtag)) => !rtag.needs_subtype(),\n\n\n\n (None, None) => true,\n\n }\n\n}\n\n\n\nmacro_rules! define_ty_impls {\n\n ($(impl[$($param:tt)*] $l:ident: $lhs:ty, $r:ident: $rhs:ty {\n\n origin = $origin:expr;\n\n text = $ltext:expr, $rtext:expr;\n\n ty = $lty:expr, $rty:expr;\n\n tag = $ltag:expr, $rtag:expr;\n", "file_path": "kailua_types/src/ty/value.rs", "rank": 2, "score": 270821.56779573724 }, { "content": "/// A superset of the type context that also provides type name resolution.\n\n///\n\n/// This is required for converting a syntax-level type (\"kind\") to the actual type.\n\npub trait TypeResolver: Report {\n\n /// Returns an immutable reference to associated type context.\n\n fn context(&self) -> &TypeContext;\n\n\n\n /// Returns a mutable reference to associated type context.\n\n fn context_mut(&mut self) -> &mut TypeContext;\n\n\n\n /// Resolves a type name to a type if any. The span is used for error reporting.\n\n fn ty_from_name(&self, name: &Spanned<Name>) -> Result<Ty>;\n\n\n\n /// Resolves a class system name to an identifier if any. The span is used for error reporting.\n\n fn class_system_from_name(&self, name: &Spanned<Name>) -> Result<Option<ClassSystemId>>;\n\n}\n\n\n\nimpl<'a, R: TypeResolver + ?Sized> TypeResolver for &'a mut R {\n\n fn context(&self) -> &TypeContext {\n\n (**self).context()\n\n }\n\n fn context_mut(&mut self) -> &mut TypeContext {\n\n (**self).context_mut()\n\n }\n\n fn ty_from_name(&self, name: &Spanned<Name>) -> Result<Ty> {\n\n (**self).ty_from_name(name)\n\n }\n\n fn class_system_from_name(&self, name: &Spanned<Name>) -> Result<Option<ClassSystemId>> {\n\n (**self).class_system_from_name(name)\n\n }\n\n}\n\n\n", "file_path": "kailua_types/src/ty/mod.rs", "rank": 3, "score": 251917.6866070165 }, { "content": "// this should be called for _each_ chunk in the workspace\n\npub fn global_var_uses(tokens: &[NestedToken], chunk: &Chunk, name: &Name) -> Vec<Span> {\n\n let mut spans = Vec::new();\n\n\n\n for (tok, aux) in tokens.iter().zip(chunk.token_aux.iter()) {\n\n let found = match (&tok.tok.base, aux) {\n\n (&Tok::Name(ref tok_name), &TokenAux::LocalVarName(ref tok_id)) => {\n\n match chunk.local_names.get(tok_id).map(|def| &def.kind) {\n\n Some(&LocalNameKind::AssumedToGlobal) => tok_name[..] == name[..],\n\n _ => false,\n\n }\n\n },\n\n (&Tok::Name(ref tok_name), &TokenAux::GlobalVarName) => tok_name[..] == name[..],\n\n (_, _) => false,\n\n };\n\n\n\n if found {\n\n spans.push(tok.tok.span);\n\n }\n\n }\n\n\n\n spans\n\n}\n\n\n", "file_path": "kailua_langsvr/src/ops/definition.rs", "rank": 4, "score": 232326.54180601187 }, { "content": "fn tag_is_sub(lhs: Option<Tag>, rhs: Option<Tag>) -> bool {\n\n match (lhs, rhs) {\n\n // some tag requires the subtyping, so if any operand has such tag\n\n // and the tag doesn't match bail out\n\n (Some(ltag), Some(rtag)) =>\n\n ltag == rtag || !(ltag.needs_subtype() || rtag.needs_subtype()),\n\n\n\n (None, Some(rtag)) => !rtag.needs_subtype(),\n\n\n\n // every tagged types are subtypes of the original type\n\n (_, None) => true,\n\n }\n\n}\n\n\n", "file_path": "kailua_types/src/ty/value.rs", "rank": 5, "score": 232311.77756035893 }, { "content": "/// An one-off function to check a chunk with given `Options`.\n\npub fn check_from_chunk<R: Report>(\n\n context: &mut env::Context<R>,\n\n chunk: kailua_syntax::Chunk,\n\n opts: Rc<RefCell<options::Options>>\n\n) -> kailua_diag::Result<()> {\n\n let mut env = env::Env::new(context, opts, chunk.map);\n\n let mut checker = Checker::new(&mut env);\n\n checker.visit(&chunk.block)\n\n}\n\n\n", "file_path": "kailua_check/src/lib.rs", "rank": 6, "score": 224101.25227163098 }, { "content": "/// Same to `check_from_chunk` but with preloading.\n\npub fn check_from_chunk_with_preloading<R: Report>(\n\n context: &mut env::Context<R>,\n\n chunk: kailua_syntax::Chunk,\n\n opts: Rc<RefCell<options::Options>>,\n\n preload: &Preload\n\n) -> kailua_diag::Result<()> {\n\n // preload `--# open`s into the context\n\n for name in &preload.open {\n\n context.open_library(name.as_ref().map(|n| &n[..]), opts.clone())?;\n\n }\n\n\n\n let mut env = env::Env::new(context, opts, chunk.map);\n\n let mut checker = Checker::new(&mut env);\n\n\n\n // preload `require`s into the checker\n\n for name in &preload.require {\n\n checker.require(name.as_ref().map(|n| &n[..]), name.span)?;\n\n }\n\n\n\n checker.visit(&chunk.block)\n\n}\n\n\n", "file_path": "kailua_check/src/lib.rs", "rank": 7, "score": 220973.02765820327 }, { "content": "fn keywords_per_category(nesting_category: NestingCategory) -> &'static [&'static str] {\n\n match nesting_category {\n\n NestingCategory::Expr => EXPR_KEYWORDS,\n\n NestingCategory::Meta => META_KEYWORDS,\n\n }\n\n}\n\n\n", "file_path": "kailua_langsvr/src/ops/completion.rs", "rank": 8, "score": 220273.2510356013 }, { "content": "fn extract_parent(mut argtys: SpannedSlotSeq, ctx: &mut TypeContext,\n\n report: &Report) -> kailua_diag::Result<Option<Option<Spanned<ClassId>>>> {\n\n let argty = argtys.ensure_at(0);\n\n\n\n let parent = if let Some(arg) = ctx.resolve_exact_type(&argty.unlift()) {\n\n if let T::None = *arg {\n\n Some(None)\n\n } else if let T::Class(Class::Prototype(cid)) = *arg {\n\n if arg.nil() == Nil::Silent { Some(Some(cid)) } else { None }\n\n } else {\n\n None\n\n }\n\n } else {\n\n None\n\n };\n\n\n\n if let Some(parent) = parent {\n\n Ok(Some(parent.map(|cid| cid.with_loc(argty))))\n\n } else {\n\n report.error(argty, m::BadClassParent { ty: argty.unlift().display(ctx) }).done()?;\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "kailua_check/src/class_system/mod.rs", "rank": 9, "score": 219571.7301262651 }, { "content": "fn invalid_value(s: &str) -> ! {\n\n Error::with_description(s, ErrorKind::InvalidValue).exit();\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 10, "score": 211184.69452625763 }, { "content": "// this should be called for _each_ chunk in the workspace,\n\n// since definition can occur multiple times throughout the project\n\npub fn global_var_definition(chunk: &Chunk, name: &Name) -> Option<Span> {\n\n chunk.global_scope.get(name).cloned()\n\n}\n\n\n", "file_path": "kailua_langsvr/src/ops/definition.rs", "rank": 11, "score": 210935.46073383634 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nstruct FixedName(&'static str);\n\n\n\nimpl Localize for FixedName {\n\n fn fmt_localized(&self, f: &mut fmt::Formatter, _locale: Locale) -> fmt::Result {\n\n write!(f, \"`{}`\", self.0)\n\n }\n\n}\n\n\n\nimpl Expectable for FixedName {\n\n fn check_token(&self, tok: &Tok) -> bool {\n\n if let Tok::Name(ref name) = *tok { **name == *self.0.as_bytes() } else { false } \n\n }\n\n}\n\n\n", "file_path": "kailua_syntax/src/parser.rs", "rank": 12, "score": 201743.25625020592 }, { "content": "/// Returns a default locale for the current environment, if any.\n\npub fn get_message_locale() -> Option<Locale> {\n\n get_locale_string().and_then(|locale| Locale::new(&locale))\n\n}\n\n\n", "file_path": "kailua_diag/src/message.rs", "rank": 13, "score": 201232.660389938 }, { "content": "/// An one-off function to parse a chunk from a given span in the `Source`.\n\n///\n\n/// Most parsing errors can be recovered, so the caller should also determine if\n\n/// it can continue in spite of reported errors.\n\n/// `kailua_diag::report::TrackMaxKind` is useful for this.\n\npub fn parse_chunk(source: &Source, span: Span, report: &Report) -> kailua_diag::Result<Chunk> {\n\n if let Some(mut iter) = source.iter_from_span(span) {\n\n let mut lexer = Lexer::new(&mut iter, &report);\n\n let mut nest = Nest::new(&mut lexer);\n\n let parser = Parser::new(&mut nest, &report);\n\n parser.into_chunk()\n\n } else {\n\n use kailua_diag::Reporter;\n\n report.fatal(span, message::NoFileForSpan {}).done()\n\n }\n\n}\n\n\n", "file_path": "kailua_syntax/src/lib.rs", "rank": 14, "score": 195808.18139877077 }, { "content": "/// Any types with subtyping or equivalence relations.\n\npub trait Lattice<Other = Self> {\n\n /// Asserts that `self` is a consistent subtype of `other` under the type context.\n\n fn assert_sub(&self, other: &Other, ctx: &mut TypeContext) -> TypeResult<()>;\n\n\n\n /// Asserts that `self` is a consistent type equal to `other` under the type context.\n\n fn assert_eq(&self, other: &Other, ctx: &mut TypeContext) -> TypeResult<()>;\n\n}\n\n\n\nimpl<A: Union<B>, B> Union<Box<B>> for Box<A> {\n\n type Output = <A as Union<B>>::Output;\n\n\n\n fn union(&self, other: &Box<B>, explicit: bool,\n\n ctx: &mut TypeContext) -> TypeResult<Self::Output> {\n\n (**self).union(other, explicit, ctx)\n\n }\n\n}\n\n\n\nimpl<A: Lattice<B>, B> Lattice<Box<B>> for Box<A> {\n\n fn assert_sub(&self, other: &Box<B>, ctx: &mut TypeContext) -> TypeResult<()> {\n\n (**self).assert_sub(other, ctx)\n", "file_path": "kailua_types/src/ty/mod.rs", "rank": 15, "score": 195510.30841125737 }, { "content": "/// Any types that can produce a union type, which is a supertype of two input types.\n\npub trait Union<Other = Self> {\n\n /// A type of the resulting type.\n\n type Output;\n\n\n\n /// Calculates a union type of `self` and `other`, explicitly or implicitly.\n\n ///\n\n /// Kailua distinguishes two kinds of union types, explicitly constructed or not.\n\n /// Explicitly constructed types are from the AST and should be retained as much as possible,\n\n /// with a good fact that types constructible from the AST are limited and simpler.\n\n /// `3 | 4` is one such example.\n\n ///\n\n /// Implicitly constructed types are used for `or` operations or implicit return types,\n\n /// and will use a much more coarse lattice than the explicit construction.\n\n /// `3 | 4` will result in `integer` in this mode.\n\n /// Because this is severely limited, the implicit union can only shrink the type's size.\n\n fn union(&self, other: &Other, explicit: bool,\n\n ctx: &mut TypeContext) -> TypeResult<Self::Output>;\n\n}\n\n\n", "file_path": "kailua_types/src/ty/mod.rs", "rank": 16, "score": 195505.7312082719 }, { "content": "/// An extension to `kailua_diag::ReportMore` for type reports.\n\npub trait TypeReportMore {\n\n /// Generates (normal) reports from given type report with an optional display hint.\n\n fn report_types(self, r: TypeReport, hint: TypeReportHint) -> Self;\n\n}\n\n\n\nimpl<'a, T> TypeReportMore for ReportMore<'a, T> {\n\n fn report_types(mut self, r: TypeReport, mut hint: TypeReportHint) -> ReportMore<'a, T> {\n\n trace!(\"collected type reports: {:#?}\", r);\n\n\n\n fn report_binary<\n\n 'a, 'b, T,\n\n M: 'b + Localize, Msg: FnOnce(&'b str, &'b str) -> M,\n\n MS: 'b + Localize, MsgSelf: FnOnce(&'b str, &'b str) -> MS,\n\n MFA: 'b + Localize, MsgFuncArgs: FnOnce(&'b str, &'b str, Ordinal) -> MFA,\n\n MMA: 'b + Localize, MsgMethodArgs: FnOnce(&'b str, &'b str, Ordinal) -> MMA,\n\n MR: 'b + Localize, MsgReturns: FnOnce(&'b str, &'b str, Ordinal) -> MR,\n\n >(\n\n more: ReportMore<'a, T>, lhs: &'b Spanned<String>, rhs: &'b Spanned<String>,\n\n idx: Option<usize>, hint: &mut TypeReportHint,\n\n make_msg: Msg, make_msg_in_self: MsgSelf, make_msg_in_func_args: MsgFuncArgs,\n", "file_path": "kailua_types/src/diag.rs", "rank": 17, "score": 183277.56689531813 }, { "content": "fn strip_newline(mut s: SourceSlice) -> SourceSlice {\n\n match s {\n\n SourceSlice::U8(ref mut s) => loop {\n\n match s.last() {\n\n Some(&b'\\r') | Some(&b'\\n') => { *s = &s[..s.len()-1]; }\n\n _ => { break; }\n\n }\n\n },\n\n SourceSlice::U16(ref mut s) => loop {\n\n match s.last() {\n\n Some(&0x0a) | Some(&0x0d) => { *s = &s[..s.len()-1]; }\n\n _ => { break; }\n\n }\n\n },\n\n }\n\n s\n\n}\n\n\n\n/// An implementation of `Report` that reports to stderr, optionally colored.\n\n///\n", "file_path": "kailua_diag/src/report.rs", "rank": 18, "score": 182465.800699643 }, { "content": "#[derive(Clone, Debug)]\n\nenum ReportItem {\n\n Binary(BinaryReportKind, Origin, Spanned<String>, Spanned<String>, Option<usize>),\n\n LessArity(Span, Spanned<String>, usize),\n\n MoreArity(Spanned<String>, Span, usize),\n\n CannotUnionSingle(Spanned<String>),\n\n CannotAssign(Origin, Spanned<String>, Spanned<String>),\n\n CannotUpdate(Origin, Spanned<String>),\n\n CannotFilter(Origin, Spanned<String>),\n\n InextensibleRec(Span),\n\n RecursiveRec(Span),\n\n RecDuplicateKey(Span, Spanned<Key>),\n\n RecCannotHaveKey(Span, Spanned<Key>),\n\n RecShouldHaveKeys(Span, Spanned<Vec<Key>>),\n\n RecExtendedWithNonNil(Span, Spanned<Key>, Spanned<String>),\n\n}\n\n\n\nimpl TypeReport {\n\n pub fn new(locale: Locale) -> TypeReport {\n\n TypeReport { locale: locale, messages: Vec::new() }\n\n }\n", "file_path": "kailua_types/src/diag.rs", "rank": 19, "score": 178953.46667290688 }, { "content": "/// A report receiver.\n\n///\n\n/// This trait is not suitable for actual reporting; consider using the `Reporter` trait instead.\n\n/// (An additional trait is required to make this trait object-friendly.)\n\npub trait Report {\n\n fn message_locale(&self) -> Locale;\n\n fn add_span(&self, kind: Kind, span: Span, msg: &Localize) -> Result<()>;\n\n}\n\n\n\nimpl<'a, R: Report + ?Sized> Report for &'a R {\n\n fn message_locale(&self) -> Locale { (**self).message_locale() }\n\n fn add_span(&self, k: Kind, s: Span, m: &Localize) -> Result<()> { (**self).add_span(k, s, m) }\n\n}\n\n\n\nimpl<'a, R: Report + ?Sized> Report for &'a mut R {\n\n fn message_locale(&self) -> Locale { (**self).message_locale() }\n\n fn add_span(&self, k: Kind, s: Span, m: &Localize) -> Result<()> { (**self).add_span(k, s, m) }\n\n}\n\n\n\nimpl<'a, R: Report + ?Sized> Report for Box<R> {\n\n fn message_locale(&self) -> Locale { (**self).message_locale() }\n\n fn add_span(&self, k: Kind, s: Span, m: &Localize) -> Result<()> { (**self).add_span(k, s, m) }\n\n}\n\n\n\nimpl<'a, R: Report + ?Sized> Report for Rc<R> {\n\n fn message_locale(&self) -> Locale { (**self).message_locale() }\n\n fn add_span(&self, k: Kind, s: Span, m: &Localize) -> Result<()> { (**self).add_span(k, s, m) }\n\n}\n\n\n\nimpl<'a, R: Report + ?Sized> Report for Arc<R> {\n\n fn message_locale(&self) -> Locale { (**self).message_locale() }\n\n fn add_span(&self, k: Kind, s: Span, m: &Localize) -> Result<()> { (**self).add_span(k, s, m) }\n\n}\n\n\n", "file_path": "kailua_diag/src/report.rs", "rank": 20, "score": 178433.4821824297 }, { "content": "fn build_app() -> App<'static, 'static> {\n\n clap_app!(kailua =>\n\n (@setting SubcommandRequiredElseHelp)\n\n (@setting UnifiedHelpMessage)\n\n (@setting NextLineHelp)\n\n (@setting VersionlessSubcommands)\n\n (version: option_env!(\"CARGO_PKG_VERSION\").unwrap_or(\"(version unknown)\"))\n\n (about:\n\n \"\\u{1f334} Type Checker and IDE Support for Lua.\\n\\\n\n https://github.com/devcat-studio/kailua/\")\n\n (max_term_width: 100)\n\n (@subcommand check =>\n\n (@setting UnifiedHelpMessage)\n\n (@setting NextLineHelp)\n\n (about:\n\n \"Performs type checking in the workspace.\\n\\\n\n \\n\\\n\n Prints reports (can be suppressed with `-q`) to the standard error,\\n\\\n\n then terminates with an exit code 1 on error.\\n\\\n\n The configuration can be either given as JSON or command-line options.\")\n", "file_path": "src/main.rs", "rank": 21, "score": 176188.78682940814 }, { "content": "#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\nenum BinaryReportKind {\n\n NotSubtype,\n\n NotEqual,\n\n CannotUnion(bool /*explicit*/),\n\n}\n\n\n", "file_path": "kailua_types/src/diag.rs", "rank": 22, "score": 175339.94139748602 }, { "content": "#[test]\n\nfn test_locale_names() {\n\n assert!(Locale::new(\"\").is_none());\n\n assert!(Locale::new(\"e\").is_none());\n\n assert!(Locale::new(\"en\").is_some());\n\n assert!(Locale::new(\"ko\").is_some());\n\n assert!(Locale::new(\"ko-KR\").is_some());\n\n assert_eq!(Locale::from(\"ko-KR\"), Locale::from(\"ko_kr\"));\n\n assert_ne!(Locale::from(\"ko-KR\"), Locale::from(\"ko\"));\n\n assert_ne!(Locale::from(\"kor\"), Locale::from(\"ko\"));\n\n assert_eq!(Locale::from(\"KO\"), Locale::from(\"ko\"));\n\n assert!(Locale::new(\"ko-KR-x-qqq\").is_none());\n\n}\n\n\n", "file_path": "kailua_diag/src/message.rs", "rank": 23, "score": 175167.18718797583 }, { "content": "pub fn main() {\n\n use kailua_diag::message::{Locale, get_message_locale};\n\n use kailua_workspace::{Config, Workspace};\n\n use kailua_langsvr::Target;\n\n\n\n env_logger::init().unwrap();\n\n\n\n let matches = build_app().get_matches();\n\n\n\n if let Some(ref matches) = matches.subcommand_matches(\"check\") {\n\n let path = Path::new(matches.value_of(\"path\").unwrap_or(\".\"));\n\n\n\n let mut config = if path.is_dir() {\n\n Config::from_base_dir(path.to_owned())\n\n } else {\n\n Config::from_start_path(path.to_owned())\n\n };\n\n\n\n if let Some(config_path) = matches.value_of(\"config\") {\n\n match config.set_config_path(Path::new(config_path).to_owned()) {\n", "file_path": "src/main.rs", "rank": 24, "score": 171035.18258433143 }, { "content": "/// Extension methods for `Report`. This is what you normally want to use.\n\npub trait Reporter: Report + Sized {\n\n /// Reports a fatal error with given location and message.\n\n /// Additional errors can be chained and should finish with `.done()` call.\n\n fn fatal<Loc: Into<Span>, Msg: Localize, T>(&self, loc: Loc, msg: Msg) -> ReportMore<T> {\n\n info!(\"reporting fatal error: {:?}\", msg);\n\n let ret = self.add_span(Kind::Fatal, loc.into(), &msg);\n\n let ret = ret.map(|_| panic!(\"Report::fatal should always return Err\"));\n\n ReportMore::new(self, ret)\n\n }\n\n\n\n /// Reports a recoverable error with given location and message.\n\n /// Additional errors can be chained and should finish with `.done()` call.\n\n fn error<Loc: Into<Span>, Msg: Localize>(&self, loc: Loc, msg: Msg) -> ReportMore<()> {\n\n info!(\"reporting error: {:?}\", msg);\n\n let ret = self.add_span(Kind::Error, loc.into(), &msg);\n\n ReportMore::new(self, ret)\n\n }\n\n\n\n /// Reports a warning with given location and message.\n\n /// Additional errors can be chained and should finish with `.done()` call.\n", "file_path": "kailua_diag/src/report.rs", "rank": 25, "score": 169737.5601423845 }, { "content": "pub fn complete_name(tokens: &[NestedToken], name_idx: usize, nesting_category: NestingCategory,\n\n pos: Pos, last_chunk: &Chunk, all_chunks: &[Arc<Chunk>],\n\n source: &Source) -> Vec<CompletionItem> {\n\n let mut items = Vec::new();\n\n\n\n // check if the caret is at the name definition and autocompletion should be disabled\n\n if nesting_category == NestingCategory::Expr && is_name_completion_disabled(tokens, name_idx) {\n\n return items;\n\n }\n\n\n\n // if the current word being typed matches exactly a keyword, we temporarily pause\n\n // the completion to avoid capturing the carriage return from the completion. (XXX suboptimal)\n\n let name_token = &tokens[name_idx].tok;\n\n if name_token.span.end() == pos {\n\n if let Tok::Keyword(_) = name_token.base {\n\n return items;\n\n }\n\n }\n\n\n\n let mut seen = HashSet::new();\n", "file_path": "kailua_langsvr/src/ops/completion.rs", "rank": 26, "score": 168325.23053276542 }, { "content": "fn io_error(s: &str) -> ! {\n\n Error::with_description(s, ErrorKind::Io).exit();\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 27, "score": 163465.0453422279 }, { "content": "pub fn help<F>(outputs: &[Arc<Output>], pos: Pos, source: &Source, mut localize: F) -> Option<Hover>\n\n where F: for<'a> FnMut(&'a Localize) -> Localized<'a, Localize>\n\n{\n\n // for multiple outputs, we deduplicate the identical types\n\n let mut hover_range = None;\n\n let mut contents = Vec::new();\n\n let mut seen = HashSet::new();\n\n\n\n for output in outputs {\n\n // find all slot-associated spans that contains the pos...\n\n let spans = output.spanned_slots().contains(pos);\n\n // ...and pick the smallest one among them (there should be at most one such span).\n\n let closest_slot = spans.min_by_key(|slot| slot.span.len());\n\n\n\n // format the slot if available\n\n if let Some(slot) = closest_slot {\n\n // the resulting output should be colorized as if it's in `--:`.\n\n // in order to use a single syntax, we use a sequence of random invisible\n\n // characters to \"trick\" the colorizer.\n\n const TYPE_PREFIX: &'static str =\n", "file_path": "kailua_langsvr/src/ops/hover.rs", "rank": 28, "score": 155935.73098885338 }, { "content": "fn is_name_or_str(tok: &NestedToken) -> bool {\n\n match tok.tok.base { Tok::Str(_) | Tok::Name(_) => true, _ => false }\n\n}\n\n\n", "file_path": "kailua_langsvr/src/ops/definition.rs", "rank": 29, "score": 154521.87784656283 }, { "content": "fn split_line<'a>(s: &'a str, file: Option<&'a str>,\n\n lineno: usize) -> Result<(&'a str, Option<Expected<'a>>), TestError> {\n\n use regex::Regex;\n\n\n\n lazy_static! {\n\n static ref LINE_PATTERN: Regex =\n\n Regex::new(r\"(?xs)\n\n ^ (?P<line> .*?)\n\n --@ (?: (?P<line1> \\d+ | < | \\^+ | v+)\n\n (?: - (?P<line2> \\d+ | < | \\^+ | v+) )?\n\n )?\n\n \\s+ (?P<kind> \\w+): (?P<msg> .*)\n\n $\").unwrap();\n\n }\n\n\n\n fn kind_from_str(s: &str) -> Option<Kind> {\n\n match &s.to_ascii_lowercase()[..] {\n\n \"note\" => Some(Kind::Note),\n\n \"warn\" | \"warning\" => Some(Kind::Warning),\n\n \"cause\" | \"because\" => Some(Kind::Cause),\n", "file_path": "kailua_test/src/lib.rs", "rank": 30, "score": 154407.59499417106 }, { "content": "// serde-json does not allow comments that we really need to...\n\n// this will roughly \"tokenize\" (seemingly) JSON and remove comments as much as possible.\n\n// also a stray comma before `]` or `}` will be removed.\n\nfn dehumanize_json(s: &str) -> String {\n\n use regex::Regex;\n\n\n\n lazy_static! {\n\n static ref TOKEN_PATTERN: Regex =\n\n Regex::new(r#\"(?xs)\n\n \"(?:\\\\.|[^\"])*\" | # strings should be skipped altogether\n\n //[^\\r\\n]* | # single-line comment\n\n /\\*.*?\\*/ | # possibly-multi-line comment\n\n . # others are simply passed through\n\n \"#).unwrap();\n\n }\n\n\n\n let mut out = String::new();\n\n let mut prev_was_comma = false;\n\n for tok in TOKEN_PATTERN.find_iter(s) {\n\n let tok = tok.as_str();\n\n if tok.starts_with(\"//\") || tok.starts_with(\"/*\") {\n\n out.push(' ');\n\n } else if tok == \" \" || tok == \"\\t\" || tok == \"\\n\" || tok == \"\\r\" {\n", "file_path": "kailua_workspace/src/lib.rs", "rank": 31, "score": 151557.14007252306 }, { "content": "fn io_error_while(e: io::Error, s: &str) -> ! {\n\n let mut e = Error::from(e);\n\n e.message = format!(\"{} (while {})\", e.message, s);\n\n e.exit();\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 32, "score": 146305.6712033645 }, { "content": "pub fn local_var_uses(tokens: &[NestedToken], chunk: &Chunk, scoped_id: &ScopedId) -> Vec<Span> {\n\n let mut spans = Vec::new();\n\n\n\n for (tok, aux) in tokens.iter().zip(chunk.token_aux.iter()) {\n\n let found = match (&tok.tok.base, aux) {\n\n (&Tok::Name(_), &TokenAux::LocalVarName(ref tok_id)) => {\n\n match chunk.local_names.get(tok_id).map(|def| &def.kind) {\n\n Some(&LocalNameKind::User) => tok_id == scoped_id,\n\n Some(&LocalNameKind::AssumedToLocal(ref id)) => id == scoped_id,\n\n _ => false,\n\n }\n\n },\n\n (_, _) => false,\n\n };\n\n\n\n if found {\n\n spans.push(tok.tok.span);\n\n }\n\n }\n\n\n\n spans\n\n}\n\n\n", "file_path": "kailua_langsvr/src/ops/definition.rs", "rank": 33, "score": 146302.89656426417 }, { "content": "fn collect_tokens(source: &Source, span: Span, report: &Report) -> Vec<NestedToken> {\n\n let mut iter = source.iter_from_span(span).unwrap();\n\n let tokens = {\n\n let mut lexer = Lexer::new(&mut iter, report);\n\n let nest = Nest::new(&mut lexer);\n\n nest.collect::<Vec<_>>()\n\n };\n\n assert!(!tokens.is_empty()); // should include EOF\n\n tokens\n\n}\n\n\n", "file_path": "kailua_langsvr/src/workspace.rs", "rank": 34, "score": 145825.3945869545 }, { "content": "pub fn stderr_or_dummy() -> Box<StderrTerminal> {\n\n match term::stderr() {\n\n Some(t) => t,\n\n None => Box::new(DummyTerminal::new(io::stderr())),\n\n }\n\n}\n\n\n", "file_path": "kailua_diag/src/dummy_term.rs", "rank": 35, "score": 144885.3913469559 }, { "content": "fn parse_to_chunk(tokens: Vec<NestedToken>, report: &Report) -> kailua_diag::Result<Chunk> {\n\n let mut tokens = tokens.into_iter();\n\n let chunk = Parser::new(&mut tokens, report).into_chunk();\n\n chunk\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct OpenDocument {\n\n uri: String,\n\n lang_id: String,\n\n last_version: u64,\n\n last_text: String,\n\n}\n\n\n\nimpl OpenDocument {\n\n fn new(item: protocol::TextDocumentItem) -> OpenDocument {\n\n OpenDocument {\n\n uri: item.uri,\n\n lang_id: item.languageId,\n\n last_version: item.version,\n", "file_path": "kailua_langsvr/src/workspace.rs", "rank": 36, "score": 144311.5582271323 }, { "content": "// internal use only, not exposed outside\n\npub fn unit_from_u32(unit: u32) -> Unit {\n\n Unit { unit: unit }\n\n}\n\n\n\nconst BUILTIN_UNIT: u32 = 0xffffffff;\n\n\n\nimpl Unit {\n\n pub fn dummy() -> Unit {\n\n Unit { unit: 0 }\n\n }\n\n\n\n pub fn builtin() -> Unit {\n\n Unit { unit: BUILTIN_UNIT }\n\n }\n\n\n\n pub fn is_dummy(&self) -> bool {\n\n self.unit == 0\n\n }\n\n\n\n pub fn is_source_dependent(&self) -> bool {\n", "file_path": "kailua_env/src/loc.rs", "rank": 37, "score": 142883.43886982702 }, { "content": "fn uri_to_path(uri: &str) -> WorkspaceResult<PathBuf> {\n\n let url = Url::parse(uri).map_err(|_| WorkspaceError(\"invalid URI\"))?;\n\n if url.scheme() != \"file\" {\n\n return Err(WorkspaceError(\"non-file URI\"));\n\n }\n\n if let Ok(path) = url.to_file_path() {\n\n return Ok(path);\n\n }\n\n\n\n #[cfg(windows)]\n\n {\n\n use std::ffi::OsString;\n\n use std::path::Component;\n\n use url::Host;\n\n\n\n // Url::to_file_path only handles no host or localhost, which is different from vscode-uri\n\n // we first try localhost then retry by temporarily setting the authority part on windows\n\n let host = match url.host() {\n\n Some(Host::Domain(name)) => name.to_string(),\n\n Some(Host::Ipv4(addr)) => addr.to_string(),\n", "file_path": "kailua_langsvr/src/workspace.rs", "rank": 38, "score": 139374.4199630747 }, { "content": "/// Tries to connect to given target and launches a language server.\n\n///\n\n/// This may return early if it couldn't connect to the target.\n\n/// Also returns after receiving a shutdown request.\n\npub fn main(target: Target) -> io::Result<()> {\n\n use std::net::TcpStream;\n\n\n\n info!(\"starting kailua_langsvr {}\",\n\n option_env!(\"CARGO_PKG_VERSION\").unwrap_or(\"unknown version\"));\n\n\n\n let server = match target {\n\n Target::Stdio => Server::from_stdio(),\n\n Target::TCP(addr) => {\n\n let stream = TcpStream::connect(addr)?;\n\n Server::from_tcp_stream(stream)?\n\n },\n\n };\n\n info!(\"established connection\");\n\n\n\n let workspace = Arc::new(RwLock::new(initialize_workspace(&server)));\n\n info!(\"initialized workspace, starting a main loop\");\n\n\n\n main_loop(server, workspace);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "kailua_langsvr/src/lib.rs", "rank": 39, "score": 139358.36320521517 }, { "content": "#[test]\n\nfn test_types_tvar() {\n\n let mut types = Types::new(Locale::dummy(), Box::new(DummyClassProvider));\n\n\n\n { // idempotency of bounds\n\n let v1 = types.gen_tvar();\n\n assert!(types.assert_tvar_sub(v1, &Ty::new(T::Integer)).is_ok());\n\n assert!(types.assert_tvar_sub(v1, &Ty::new(T::Integer)).is_ok());\n\n assert!(types.assert_tvar_sub(v1, &Ty::new(T::String)).is_err());\n\n }\n\n\n\n { // empty bounds (lb & ub = bottom)\n\n let v1 = types.gen_tvar();\n\n assert!(types.assert_tvar_sub(v1, &Ty::new(T::Integer)).is_ok());\n\n assert!(types.assert_tvar_sup(v1, &Ty::new(T::String)).is_err());\n\n\n\n let v2 = types.gen_tvar();\n\n assert!(types.assert_tvar_sup(v2, &Ty::new(T::Integer)).is_ok());\n\n assert!(types.assert_tvar_sub(v2, &Ty::new(T::String)).is_err());\n\n }\n\n\n", "file_path": "kailua_types/src/env/mod.rs", "rank": 40, "score": 138099.53854455648 }, { "content": "#[test]\n\nfn test_types_is_send_and_sync() {\n\n fn _assert_send<T: Send>(_x: T) {}\n\n fn _assert_sync<T: Sync>(_x: T) {}\n\n\n\n _assert_send(Types::new(Locale::dummy(), Box::new(DummyClassProvider)));\n\n _assert_sync(Types::new(Locale::dummy(), Box::new(DummyClassProvider)));\n\n}\n\n\n", "file_path": "kailua_types/src/env/mod.rs", "rank": 41, "score": 136134.16821170598 }, { "content": "/// A trait that provides every type-related operations.\n\n///\n\n/// This interface is used to decouple the dependency between types and the type environment.\n\n/// In practice, the full implementation is almost always provided by `kailua_types::env::Types`.\n\npub trait TypeContext {\n\n /// Generates a new, empty type report.\n\n fn gen_report(&self) -> TypeReport;\n\n\n\n /// Returns the latest type variable generated, if any.\n\n fn last_tvar(&self) -> Option<TVar>;\n\n\n\n /// Generates a new fresh type variable.\n\n fn gen_tvar(&mut self) -> TVar;\n\n\n\n /// Copies a type variable so that a new variable has the same constraints to the original\n\n /// but is no longer connected to the original.\n\n ///\n\n /// Mainly used for generalization.\n\n fn copy_tvar(&mut self, tvar: TVar) -> TVar;\n\n\n\n /// Asserts that the type variable has given upper bound.\n\n fn assert_tvar_sub(&mut self, lhs: TVar, rhs: &Ty) -> TypeResult<()>;\n\n\n\n /// Asserts that the type variable has given lower bound.\n", "file_path": "kailua_types/src/ty/mod.rs", "rank": 42, "score": 135795.91133210846 }, { "content": "--v function(self: map<string, integer>)\n\nfunction Hello.init(self) --@< Error: The type `function(self: map<string, integer>) --> ()` of the constructor (`init` method) doesn't have a correct type for the first argument\n\n self.foo = 42\n\nend\n\n\n\nlocal h = Hello.new() --: Hello\n\n--! error\n\n\n", "file_path": "kailua_check/src/tests/class_gideros.lua", "rank": 43, "score": 133010.7901468302 }, { "content": "// internal use only, not exposed outside\n\npub fn pos_from_u32(unit: Unit, pos: u32) -> Pos {\n\n Pos { unit: unit.unit, pos: pos }\n\n}\n\n\n\nimpl Pos {\n\n pub fn dummy() -> Pos {\n\n Pos { unit: 0, pos: 0 }\n\n }\n\n\n\n pub fn builtin() -> Pos {\n\n Pos { unit: BUILTIN_UNIT, pos: 0 }\n\n }\n\n\n\n pub fn is_dummy(&self) -> bool {\n\n self.unit().is_dummy()\n\n }\n\n\n\n pub fn is_source_dependent(&self) -> bool {\n\n self.unit().is_source_dependent()\n\n }\n", "file_path": "kailua_env/src/loc.rs", "rank": 44, "score": 132361.38619562224 }, { "content": "pub fn complete_field(tokens: &[NestedToken], sep_idx: usize,\n\n outputs: &[Arc<Output>]) -> Option<Vec<CompletionItem>> {\n\n let end = if let Some((_idx, tok)) = last_non_comment(&tokens[..sep_idx]) {\n\n tok.tok.span.end()\n\n } else {\n\n // there is no chance that this will yield completions\n\n return Some(Vec::new());\n\n };\n\n\n\n // for multiple outputs, we combine all possible fields and deduplicate them\n\n let mut items = Vec::new();\n\n let mut seen = HashSet::new(); // we never return the same name twice\n\n for output in outputs {\n\n let slot = get_prefix_expr_slot(end, output);\n\n debug!(\"complete_field: get_prefix_expr_slot({:#?}) returns {:?}\", end, slot);\n\n\n\n if let Some(slot) = slot {\n\n // now we've got the closest slot for given position;\n\n // check if it's actually a table or similar (if it's not, we will fail fast)\n\n if let Some(fields) = output.get_available_fields(&slot.unlift()) {\n", "file_path": "kailua_langsvr/src/ops/completion.rs", "rank": 45, "score": 131933.89403237088 }, { "content": "#[derive(Clone, Debug)]\n\nenum Field {\n\n // the field is defined in this class\n\n Slot(Spanned<Slot>),\n\n\n\n // field(s) are defined in the children class\n\n Children,\n\n\n\n // field(s) are defined in the instance of current class\n\n Instance,\n\n}\n\n\n", "file_path": "kailua_check/src/class_system/gideros.rs", "rank": 46, "score": 131016.55059768079 }, { "content": "#[test]\n\nfn test_unioned_simplify() {\n\n assert_eq!(Unioned::empty().simplify(), T::None);\n\n assert_eq!(Unioned::explicit_bool(true).simplify(),\n\n T::Union(Cow::Owned(Unioned::explicit_bool(true))));\n\n assert_eq!(Unioned::explicit_int(42).simplify(),\n\n T::Union(Cow::Owned(Unioned::explicit_int(42))));\n\n assert_eq!(Unioned::explicit_str(b\"foo\"[..].into()).simplify(),\n\n T::Union(Cow::Owned(Unioned::explicit_str(b\"foo\"[..].into()))));\n\n assert_eq!(Unioned { numbers: Some(Numbers::All), ..Unioned::empty() }.simplify(),\n\n T::Number);\n\n}\n\n\n", "file_path": "kailua_types/src/ty/union.rs", "rank": 47, "score": 126403.84778535579 }, { "content": "/// Any type that can have a dummy value for errors.\n\npub trait Dummy {\n\n /// Generates a dummy value.\n\n fn dummy() -> Self;\n\n}\n\n\n\n/// An implementation of `TypeContext` that raises an error for most methods.\n\n///\n\n/// Useful for ensuring that no operations involve type variables or row variables.\n\npub struct NoTypeContext;\n\n\n\nimpl TypeContext for NoTypeContext {\n\n fn gen_report(&self) -> TypeReport {\n\n TypeReport::new(Locale::dummy())\n\n }\n\n fn last_tvar(&self) -> Option<TVar> {\n\n None\n\n }\n\n fn gen_tvar(&mut self) -> TVar {\n\n panic!(\"gen_tvar is not supposed to be called here\");\n\n }\n", "file_path": "kailua_types/src/ty/mod.rs", "rank": 48, "score": 126379.81331652861 }, { "content": "pub trait Partition {\n\n fn create(parent: usize, rank: usize) -> Self;\n\n fn read(&self) -> (usize /*parent*/, usize /*rank*/);\n\n fn write_parent(&self, parent: usize);\n\n fn increment_rank(&mut self);\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Partitions<T> {\n\n map: VecMap<T>,\n\n}\n\n\n\nimpl<T: Partition> Partitions<T> {\n\n pub fn new() -> Partitions<T> {\n\n Partitions { map: VecMap::new() }\n\n }\n\n\n\n pub fn find(&self, i: usize) -> usize {\n\n if let Some(u) = self.map.get(i) {\n\n let (mut parent, _) = u.read();\n", "file_path": "kailua_types/src/env/partitions.rs", "rank": 49, "score": 126368.87540349034 }, { "content": "pub fn classify(tokens: &[NestedToken], pos: Pos) -> Option<Class> {\n\n let (idx, end, after) = index_and_neighbor(tokens, pos, |tok| tok.tok.span);\n\n\n\n let ptok = if idx > 0 { tokens.get(idx - 1) } else { None };\n\n let tok = tokens.get(idx);\n\n match (end, after, ptok.map(|tok| &tok.tok.base), tok.map(|tok| &tok.tok.base)) {\n\n // ... `.` | ...\n\n // ... `:` | ...\n\n (true, true, _, Some(&Tok::Punct(Punct::Dot))) |\n\n (true, true, _, Some(&Tok::Punct(Punct::Colon))) => {\n\n Some(Class::Field(idx))\n\n },\n\n\n\n // ... `.` NAM|E ...\n\n // ... `:` NAM|E ...\n\n // ... `.` NAME | ... (with no space between NAME and the caret)\n\n // ... `:` NAME | ...\n\n (_, true, Some(&Tok::Punct(Punct::Dot)), Some(&Tok::Name(_))) |\n\n (_, true, Some(&Tok::Punct(Punct::Dot)), Some(&Tok::Keyword(_))) |\n\n (_, true, Some(&Tok::Punct(Punct::Colon)), Some(&Tok::Name(_))) |\n", "file_path": "kailua_langsvr/src/ops/completion.rs", "rank": 50, "score": 126024.26173858822 }, { "content": "pub fn locate(tokens: &[NestedToken], pos: Pos) -> Option<Loc> {\n\n enclosing_func_call(tokens, pos).map(|(token_idx, arg_idx)| {\n\n Loc { args_token_idx: token_idx, arg_idx: arg_idx }\n\n })\n\n}\n\n\n", "file_path": "kailua_langsvr/src/ops/signature.rs", "rank": 51, "score": 126024.26173858822 }, { "content": "fn format_ascii_vec(f: &mut fmt::Formatter, s: &[u8]) -> fmt::Result {\n\n for &c in s {\n\n match c {\n\n b'\\t' => write!(f, \"\\\\t\")?,\n\n b'\\n' => write!(f, \"\\\\n\")?,\n\n b'\\r' => write!(f, \"\\\\r\")?,\n\n b'\"' | b'\\'' | b'`' | b'\\\\' => write!(f, \"\\\\{}\", c as char)?,\n\n b'\\x20'...b'\\x7e' => write!(f, \"{}\", c as char)?,\n\n _ => write!(f, \"\\\\x{:02x}\", c)?,\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\n/// A name from the source code.\n\n///\n\n/// This may include non-identifier bytes if constructed from a quoted name in the meta block.\n\n#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub struct Name(Box<[u8]>);\n\n\n", "file_path": "kailua_syntax/src/string.rs", "rank": 52, "score": 125615.82247755936 }, { "content": "// internal use only, not exposed outside\n\npub fn span_from_u32(unit: Unit, begin: u32, end: u32) -> Span {\n\n Span { unit: unit.unit, begin: begin, end: end }\n\n}\n\n\n\nimpl Span {\n\n pub fn new(begin: Pos, end: Pos) -> Span {\n\n if begin.is_dummy() || end.is_dummy() {\n\n Span::dummy()\n\n } else {\n\n assert!(begin.unit == end.unit, \"Span::new with positions from different units\");\n\n if begin.pos <= end.pos {\n\n Span { unit: begin.unit, begin: begin.pos, end: end.pos }\n\n } else {\n\n // this is possible when the range actually describes an empty span.\n\n // in the ordinary case we take the beginning of the first token and\n\n // the end of the last token for the span:\n\n //\n\n // function f() FIRST_TOKEN ... LAST_TOKEN end\n\n // ^ begin end ^\n\n //\n", "file_path": "kailua_env/src/loc.rs", "rank": 53, "score": 123530.83876526494 }, { "content": "pub fn translate_span(span: Span, source: &Source) -> Option<(String, Range)> {\n\n source.get_file(span.unit()).and_then(|file| {\n\n translate_span_without_path(span, file).map(|range| (file.path().to_owned(), range))\n\n })\n\n}\n\n\n\n// hierarchical diagnostics, forming a DAG.\n\n// a report tree can be optionally associated to a particular path, in which case the path is\n\n// considered to be included in the reports (even though no report for that path is generated).\n\n#[derive(Clone)]\n\npub struct ReportTree {\n\n inner: Arc<ReportTreeInner>,\n\n}\n\n\n", "file_path": "kailua_langsvr/src/diags.rs", "rank": 54, "score": 123519.67519732397 }, { "content": "pub fn translate_span_without_path(span: Span, file: &SourceFile) -> Option<Range> {\n\n // ignore any unknown span\n\n if span.unit() != file.span().unit() {\n\n return None;\n\n }\n\n let (beginline, mut spans, endline) = match file.lines_from_span(span) {\n\n Some(lines) => lines,\n\n None => return None,\n\n };\n\n\n\n // VS Code expects the u16 offset (i.e. as JavaScript .charAt expects)\n\n fn calculate_u16_offset(linebegin: Pos, pos: Pos, file: &SourceFile) -> usize {\n\n let begin = linebegin.to_usize();\n\n let off = pos.to_usize();\n\n assert!(begin <= off);\n\n\n\n match file.data() {\n\n // for u8 data, calculate the number of u16s from the slice\n\n SourceSlice::U8(s) => {\n\n let s = &s[begin..off];\n", "file_path": "kailua_langsvr/src/diags.rs", "rank": 55, "score": 122784.89054894689 }, { "content": "function p.a() end --@< Error: Cannot update the immutable type `const {}` by indexing\n\np.a() --@< Error: Missing key \"a\" in `const {}`\n\n--! error\n\n\n\n--8<-- method-decl-const-nested\n\nlocal p = { a = {} } --: const {a: const {}}\n", "file_path": "kailua_check/src/tests/tests.lua", "rank": 56, "score": 122760.61605066629 }, { "content": "function p.a.b() end --@< Error: Cannot update the immutable type `const {}` by indexing\n\np.a.b() --@< Error: Missing key \"b\" in `const {}`\n\n--! error\n\n\n\n--8<-- methodcall-string-meta-table\n\n--# assume blah: [string_meta] { byte: function(string) --> integer }\n\nlocal x = ('f'):byte() --: integer\n\n--! ok\n\n\n\n--8<-- methodcall-string-meta-dynamic\n\n--# assume blah: [string_meta] WHATEVER\n\nlocal x = ('f'):foobar(1, 'what', false) --: string\n\n--! ok\n\n\n\n--8<-- methodcall-string-meta-undefined\n\nlocal x = ('f'):byte()\n\n--@^ Error: Cannot use string methods as a metatable for `string` type is not yet defined\n\n--! error\n\n\n\n--8<-- methodcall-string-meta-non-table\n", "file_path": "kailua_check/src/tests/tests.lua", "rank": 57, "score": 122760.61605066629 }, { "content": "/// Any type that can be formatted into a localized text.\n\npub trait Localize: fmt::Debug {\n\n fn fmt_localized(&self, f: &mut fmt::Formatter, locale: Locale) -> fmt::Result;\n\n}\n\n\n\nimpl<'a> Localize for &'a Localize {\n\n fn fmt_localized(&self, f: &mut fmt::Formatter, locale: Locale) -> fmt::Result {\n\n (**self).fmt_localized(f, locale)\n\n }\n\n}\n\n\n\nimpl<T: fmt::Display + fmt::Debug> Localize for T {\n\n fn fmt_localized(&self, f: &mut fmt::Formatter, _locale: Locale) -> fmt::Result {\n\n fmt::Display::fmt(self, f)\n\n }\n\n}\n\n\n\n/// A helper type for formatting the localized text.\n\n///\n\n/// For example, `format!(\"{}\", Localized::new(&v))` will give a localized string for `v`.\n\npub struct Localized<'b, T: Localize + ?Sized + 'b> {\n", "file_path": "kailua_diag/src/message.rs", "rank": 58, "score": 120071.72306176132 }, { "content": "pub fn local_var_definition(last_chunk: &Chunk, scoped_id: &ScopedId) -> Option<Span> {\n\n last_chunk.local_names.get(scoped_id).map(|def| def.def_span)\n\n}\n\n\n", "file_path": "kailua_langsvr/src/ops/definition.rs", "rank": 59, "score": 119808.01661244468 }, { "content": "#[cfg(not(windows))]\n\nfn get_locale_string() -> Option<String> {\n\n get_locale_string_from_env()\n\n}\n\n\n", "file_path": "kailua_diag/src/message.rs", "rank": 60, "score": 119471.44656952965 }, { "content": "/// Defines the various characteristics of class systems.\n\n///\n\n/// While each class system gets a unique class system identifier (`ClassSystemId`),\n\n/// the system itself has no permanent knowledge about its identifier.\n\n/// Instead every method that may have to produce a `ClassId` is given the current `ClassSystemId`,\n\n/// either by a part of the input `ClassId` or a separate parameter if necessary.\n\n///\n\n/// The implementations are expected to lock internal data structures as necessary.\n\npub trait ClassSystem: Send + Sync + fmt::Debug {\n\n /// Invoked when a function with the `[make_class(<this system>)]` attribute gets called.\n\n /// Should return an internal handle to the class if possible, which gets stored in the context.\n\n ///\n\n /// This happens after the argument type checking, so the `argtys` are guaranteed to be\n\n /// correctly typed according to the original function (but one should take care of\n\n /// the situation that the function prototype itself is wrong).\n\n ///\n\n /// This method has a default implementation which accepts a single optional parameter\n\n /// for the parent class prototype and calls the `assume_class` method.\n\n fn make_class(&self, self_csid: ClassSystemId, argtys: SpannedSlotSeq, outerspan: Span,\n\n ctx: &mut TypeContext, report: &Report) -> kailua_diag::Result<Option<ClassId>> {\n\n if let Some(parent) = extract_parent(argtys, ctx, report)? {\n\n self.assume_class(self_csid, parent, outerspan, ctx, report)\n\n } else {\n\n Ok(None)\n\n }\n\n }\n\n\n\n /// Invoked when `--# assume class(<this system>)` gets processed.\n", "file_path": "kailua_check/src/class_system/mod.rs", "rank": 61, "score": 119198.17428297352 }, { "content": "// the implied slot for `n` in `Tables::ArrayN(v)`\n\nfn nslot(v: &Slot) -> Slot {\n\n Slot::new(v.flex(), Ty::new(T::Integer))\n\n}\n\n\n\nimpl Tables {\n\n pub fn generalize(self, ctx: &mut TypeContext) -> Tables {\n\n match self {\n\n Tables::Fields(r) => Tables::Fields(ctx.copy_rvar(r)),\n\n Tables::Array(v) => Tables::Array(v.generalize(ctx)),\n\n Tables::ArrayN(v) => Tables::ArrayN(v.generalize(ctx)),\n\n Tables::Map(k, v) => {\n\n let k = k.generalize(ctx);\n\n let v = v.generalize(ctx);\n\n Tables::Map(k, v)\n\n },\n\n Tables::All => Tables::All,\n\n }\n\n }\n\n\n\n fn fmt_generic<WriteTy, WriteSlot>(&self, f: &mut fmt::Formatter,\n", "file_path": "kailua_types/src/ty/tables.rs", "rank": 62, "score": 118422.29860197638 }, { "content": "pub fn classify(tokens: &[NestedToken], chunk: &Chunk, pos: Pos) -> Option<Class> {\n\n // locate a name or str token which entirely include `pos` or share an end point with `pos`.\n\n // for correctly parsed tokens there may be at most two such tokens. pick the later one.\n\n // (why later? in order to uniformly handle `require\"foo\"` and `require \"foo\"`.)\n\n let name_idx = match tokens.binary_search_by(|tok| tok.tok.span.begin().cmp(&pos)) {\n\n Ok(i) => { // tokens[i].begin == pos\n\n if is_name_or_str(&tokens[i]) {\n\n Some(i)\n\n } else if i > 0 && tokens[i-1].tok.span.end() >= pos && is_name_or_str(&tokens[i-1]) {\n\n Some(i - 1)\n\n } else {\n\n None\n\n }\n\n },\n\n Err(0) => { // pos < tokens[0].begin or inf\n\n None\n\n },\n\n Err(i) => { // tokens[i-1].begin < pos < tokens[i].begin or inf\n\n if pos <= tokens[i-1].tok.span.end() && is_name_or_str(&tokens[i-1]) {\n\n Some(i - 1)\n", "file_path": "kailua_langsvr/src/ops/definition.rs", "rank": 63, "score": 118017.83834383942 }, { "content": "pub fn help<F>(tokens: &[NestedToken], loc: &Loc, outputs: &[Arc<Output>],\n\n mut localize: F) -> Option<SignatureHelp>\n\n where F: for<'a> FnMut(&'a Localize) -> Localized<'a, Localize>\n\n{\n\n let empty_signature = || {\n\n SignatureHelp { signatures: Vec::new(), activeSignature: None, activeParameter: None }\n\n };\n\n\n\n let (end_idx, end) = if let Some((idx, tok)) = last_non_comment(&tokens[..loc.args_token_idx]) {\n\n (idx, tok.tok.span.end())\n\n } else {\n\n // fail fast, this is not a prefix expression\n\n return Some(empty_signature());\n\n };\n\n\n\n // now this is definitely a function, so seek more to determine this is a method call or not.\n\n // tokens[end_idx] is never a comment, so we are sure that\n\n // tokens[end_idx] is a name and preceding non-comment token is `:`\n\n // when this is a method call.\n\n let mut is_method = false;\n", "file_path": "kailua_langsvr/src/ops/signature.rs", "rank": 64, "score": 118017.83834383942 }, { "content": "fn apply_search_paths_template(mut search_paths: &[u8], start_path: &Path) -> Option<Vec<u8>> {\n\n let start_dir = if let Some(dir) = start_path.parent() {\n\n if dir == Path::new(\"\") {\n\n Path::new(\".\")\n\n } else {\n\n dir\n\n }\n\n } else {\n\n return None;\n\n };\n\n\n\n let mut ret = Vec::new();\n\n loop {\n\n if let Some(i) = search_paths.iter().position(|&c| c == b'{' || c == b'}') {\n\n if search_paths[i] == b'}' {\n\n return None;\n\n }\n\n ret.extend_from_slice(&search_paths[..i]);\n\n search_paths = &search_paths[i+1..];\n\n if let Some(i) = search_paths.iter().position(|&c| c == b'{' || c == b'}') {\n", "file_path": "kailua_workspace/src/lib.rs", "rank": 65, "score": 117661.44895679102 }, { "content": "// XXX won't work well in Windows\n\nfn get_locale_string_from_env() -> Option<String> {\n\n if let Ok(s) = env::var(\"LC_ALL\") {\n\n if !s.is_empty() { return Some(s); }\n\n }\n\n if let Ok(s) = env::var(\"LC_MESSAGES\") {\n\n if !s.is_empty() { return Some(s); }\n\n }\n\n // per POSIX, allow an empty string here; in Windows the empty envvar is forbidden\n\n env::var(\"LANG\").ok()\n\n}\n\n\n", "file_path": "kailua_diag/src/message.rs", "rank": 66, "score": 117389.80552785075 }, { "content": "fn flex_from_usize(v: usize) -> F {\n\n match v & FLEX_MASK {\n\n UNKNOWN_BITS => F::Unknown,\n\n DYNAMIC_USER_BITS => F::Dynamic(Dyn::User),\n\n DYNAMIC_OOPS_BITS => F::Dynamic(Dyn::Oops),\n\n JUST_BITS => F::Just,\n\n CONST_BITS => F::Const,\n\n VAR_BITS => F::Var,\n\n MODULE_BITS => F::Module,\n\n _ => panic!(\"unknown flex bits {:#x}\", v),\n\n }\n\n}\n\n\n", "file_path": "kailua_types/src/ty/slot.rs", "rank": 67, "score": 116258.79186075988 }, { "content": "// check if the caret is located in regions where the autocompletion should be disabled:\n\n//\n\n// 1. `local NAME ... | ... [= ...]`\n\n// 2. `for NAME ... | ... = ... do ... end`\n\n// 3. `function NAME ... | ( ... )`\n\n// 4. `function [NAME ...] ( ... | ... )`\n\n//\n\n// the check for 1 and 2 is handled by looking backward for the first token\n\n// that is not a comment, a name or a comma and is in the same nesting as the caret.\n\n// if the token exists and it's `local` or `for`, autocompletion is disabled.\n\n//\n\n// the check for 3 is handled by looking backward for the first token\n\n// that is not a comment, a name, a dot or a colon and is in the same nesting as the caret.\n\n// if the token exists and it's `function`, autocompletion is disabled.\n\n//\n\n// the check for 4 is handled similarly to the check for 1 and 2,\n\n// but once seen a `(` token, it will switch to the check for 3 at the parent nesting.\n\nfn is_name_completion_disabled(tokens: &[NestedToken], name_idx: usize) -> bool {\n\n let name_tok = &tokens[name_idx];\n\n let mut init_depth = name_tok.depth;\n\n let init_serial = name_tok.serial;\n\n\n\n let mut name_decl_possible = true; // case 1, 2 and 4a\n\n let mut func_sig_possible = true; // case 3 and 4b\n\n for (i, tok) in tokens[..name_idx].iter().enumerate().rev().take(LOOKBEHIND_LIMIT) {\n\n if !(name_decl_possible || func_sig_possible) { break; }\n\n\n\n if tok.depth <= init_depth && tok.serial != init_serial {\n\n // escaped the current nesting, stop the search\n\n return false;\n\n } else if tok.depth > init_depth {\n\n // ignore more nested tokens (but count them towards the threshold)\n\n continue;\n\n }\n\n\n\n // name_decl_possible can continue to func_sig_possible in place, so this should be first\n\n if func_sig_possible {\n", "file_path": "kailua_langsvr/src/ops/completion.rs", "rank": 68, "score": 115477.2315825342 }, { "content": "--v function()\n\nfunction M.foo() end --@< Error: Cannot index `<initializing> table` without further type information; specify more detailed type, or use `--# assume` as a last resort\n\n\n\n--! error\n\n\n\n--8<-- module-integer\n\nlocal M = 42 --: module\n\n\n", "file_path": "kailua_check/src/tests/module.lua", "rank": 69, "score": 115259.79667075368 }, { "content": "/// Provides the specific knowledge about defined classes and class systems.\n\npub trait ClassProvider: Send + Sync {\n\n /// Should print a type name for given nominal identifier to the formatter.\n\n fn fmt_class_name(&self, cid: ClassId, f: &mut fmt::Formatter,\n\n st: &DisplayState) -> fmt::Result;\n\n\n\n /// Should print a type name for given nominal set identifier to the formatter.\n\n fn fmt_class_system_name(&self, csid: ClassSystemId, f: &mut fmt::Formatter,\n\n st: &DisplayState) -> fmt::Result;\n\n\n\n /// Should return true if the nominal identifier `lhs` is\n\n /// a subtype of another nominal identifier `rhs`.\n\n fn is_subclass_of(&self, lhs: ClassId, rhs: ClassId) -> bool;\n\n}\n\n\n\nimpl<'a, T: ClassProvider + ?Sized> ClassProvider for &'a T {\n\n fn fmt_class_name(&self, cid: ClassId, f: &mut fmt::Formatter,\n\n st: &DisplayState) -> fmt::Result {\n\n (**self).fmt_class_name(cid, f, st)\n\n }\n\n fn fmt_class_system_name(&self, csid: ClassSystemId, f: &mut fmt::Formatter,\n", "file_path": "kailua_types/src/env/mod.rs", "rank": 70, "score": 114878.05927104599 }, { "content": "fn usize_from_flex(flex: F) -> usize {\n\n match flex {\n\n F::Unknown => UNKNOWN_BITS,\n\n F::Dynamic(Dyn::User) => DYNAMIC_USER_BITS,\n\n F::Dynamic(Dyn::Oops) => DYNAMIC_OOPS_BITS,\n\n F::Just => JUST_BITS,\n\n F::Const => CONST_BITS,\n\n F::Var => VAR_BITS,\n\n F::Module => MODULE_BITS,\n\n }\n\n}\n\n\n", "file_path": "kailua_types/src/ty/slot.rs", "rank": 71, "score": 114199.89745654845 }, { "content": "fn is_new_key(key: &Key) -> bool {\n\n match *key {\n\n Key::Str(ref s) => &s[..] == NEW_KEY,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "kailua_check/src/class_system/gideros.rs", "rank": 72, "score": 113242.75896439528 }, { "content": "fn is_init_key(key: &Key) -> bool {\n\n match *key {\n\n Key::Str(ref s) => &s[..] == INIT_KEY,\n\n _ => false,\n\n }\n\n}\n\n\n\nimpl GiderosClassSystem {\n\n pub fn new() -> GiderosClassSystem {\n\n GiderosClassSystem {\n\n classes: RwLock::new(Vec::new()),\n\n class_names: RwLock::new(VecMap::new()),\n\n }\n\n }\n\n\n\n fn lookup<'a>(classes: &'a [ClassDef], cid: ClassId, proto: bool,\n\n key: &Key) -> Option<&'a Spanned<Slot>> {\n\n for (_, cls) in Ancestors::new(&classes, cid.1) {\n\n let field = cls.fields(proto).get(key);\n\n if let Some(&Field::Slot(ref info)) = field {\n", "file_path": "kailua_check/src/class_system/gideros.rs", "rank": 73, "score": 113242.75896439528 }, { "content": "/// Human-readable description of various types requiring the type context.\n\n///\n\n/// Expected to implement `std::fmt::Display`.\n\npub trait Display: fmt::Debug + Sized {\n\n fn fmt_displayed(&self, f: &mut fmt::Formatter, st: &DisplayState) -> fmt::Result;\n\n\n\n fn display<'b, C>(&'b self, ctx: C) -> Displayed<'b, Self, C> {\n\n Displayed { base: self, ctx: ctx }\n\n }\n\n}\n\n\n\nimpl<T: Display> Display for Spanned<T> {\n\n fn fmt_displayed(&self, f: &mut fmt::Formatter, st: &DisplayState) -> fmt::Result {\n\n self.base.fmt_displayed(f, st)\n\n }\n\n}\n\n\n\nimpl<T: Display + ?Sized> Display for Box<T> {\n\n fn fmt_displayed(&self, f: &mut fmt::Formatter, st: &DisplayState) -> fmt::Result {\n\n (**self).fmt_displayed(f, st)\n\n }\n\n}\n\n\n", "file_path": "kailua_types/src/ty/display.rs", "rank": 74, "score": 112866.77588115506 }, { "content": "// is this bound trivial so that one can always overwrite?\n\nfn is_bound_trivial(t: &Option<Ty>) -> bool {\n\n // TODO special casing ? is not enough, should resolve b.bound's inner ?s as well\n\n if let Some(ref t) = *t {\n\n match **t { T::None | T::Dynamic(_) => true, _ => false }\n\n } else {\n\n true\n\n }\n\n}\n\n\n\nimpl Partition for Box<Bound> {\n\n fn create(parent: usize, rank: usize) -> Box<Bound> {\n\n Box::new(Bound { parent: Atomic::new(parent as u32), rank: rank as u8, bound: None })\n\n }\n\n\n\n fn read(&self) -> (usize /*parent*/, usize /*rank*/) {\n\n (self.parent.load(Relaxed) as usize, self.rank as usize)\n\n }\n\n\n\n fn write_parent(&self, parent: usize) {\n\n self.parent.store(parent as u32, Relaxed);\n", "file_path": "kailua_types/src/env/mod.rs", "rank": 75, "score": 112217.70662676326 }, { "content": "fn send_diagnostics(server: Server, root: &ReportTree) -> io::Result<()> {\n\n use std::path::Path;\n\n use std::collections::HashMap;\n\n use url::Url;\n\n use protocol::*;\n\n\n\n // try to deduplicate diagnostics from different paths.\n\n // each report tree has at most two reports (parsing & checking) for each path,\n\n // since they do not overlap to each other,\n\n // any duplication from different report trees can be regarded that\n\n // the same file is `require`d through different start paths.\n\n // we do count the multiplicity (i.e. one report tree _may_ have duplicate reports) however.\n\n\n\n fn build_key(path: &str, diag: &Diagnostic)\n\n -> (String, u64, u64, u64, u64, Option<DiagnosticSeverity>, String)\n\n {\n\n (path.to_owned(),\n\n diag.range.start.line, diag.range.start.character,\n\n diag.range.end.line, diag.range.end.character,\n\n diag.severity, diag.message.to_owned())\n", "file_path": "kailua_langsvr/src/lib.rs", "rank": 76, "score": 104267.46351102002 }, { "content": "fn parse_init_options(opts: Option<serde_json::Value>) -> InitOptions {\n\n #[derive(Deserialize)]\n\n struct Options {\n\n default_locale: String,\n\n }\n\n\n\n if let Some(opts) = opts {\n\n if let Ok(opts) = serde_json::from_value::<Options>(opts) {\n\n if let Some(locale) = kailua_diag::Locale::new(&opts.default_locale) {\n\n return InitOptions { default_locale: locale };\n\n }\n\n }\n\n }\n\n InitOptions::default()\n\n}\n\n\n", "file_path": "kailua_langsvr/src/lib.rs", "rank": 77, "score": 103920.8187985817 }, { "content": "\n\ndefine_msg! { pub UnknownAttrName<'a> { name: &'a Name }:\n\n \"ko\" => \"{name} 타입 속성을 알 수 없어서 무시합니다\",\n\n _ => \"{name} is an unknown type attribute and ignored\",\n\n}\n\n\n\ndefine_msg! { pub AttrCannotHaveAnyValues<'a> { name: &'a Name }:\n\n \"ko\" => \"{name} 타입 속성에는 아무 값도 붙을 수 없습니다\",\n\n _ => \"The type attribute {name} cannot have any values\",\n\n}\n\n\n\ndefine_msg! { pub AttrRequiresFixedNumOfValues<'a> { name: &'a Name, count: usize }:\n\n \"ko\" => \"{name} 타입 속성에는 정확히 {count}개의 값이 붙어야 합니다\",\n\n _ => \"The type attribute {name} requires exactly {count} value(s)\",\n\n}\n\n\n\ndefine_msg! { pub DuplicateAttr<'a> { ty: Ty<'a> }:\n\n \"ko\" => \"이미 속성이 붙어 있는 `{ty}` 타입에 속성을 더 붙일 수 없습니다\",\n\n _ => \"Cannot add an attribute to a type `{ty}` with an existing attribute\",\n\n}\n", "file_path": "kailua_types/src/message.rs", "rank": 78, "score": 97256.49410053743 }, { "content": "use l10nutils::Ordinal;\n\nuse ty::{self, Key, Displayed, TypeContext};\n\nuse kailua_syntax::Name;\n\n\n\npub type Ty<'a> = Displayed<'a, ty::Ty, &'a TypeContext>;\n\n\n\ndefine_msg! { pub NotSubtype<'a> { sub: &'a str, sup: &'a str }:\n\n \"ko\" => \"`{sub}`이(가) `{sup}`의 서브타입이 아닙니다\",\n\n _ => \"`{sub}` is not a subtype of `{sup}`\",\n\n}\n\n\n\ndefine_msg! { pub NotSubtypeInSelf<'a> { sub: &'a str, sup: &'a str }:\n\n \"ko\" => \"`self` 자리에 있는 `{sub}`이(가) `{sup}`의 서브타입이 아닙니다\",\n\n _ => \"`{sub}` in the `self` position is not a subtype of `{sup}`\",\n\n}\n\n\n\ndefine_msg! { pub NotSubtypeInFuncArgs<'a> { sub: &'a str, sup: &'a str, index: Ordinal }:\n\n \"ko\" => \"함수의 {index} 인자 `{sub}`이(가) `{sup}`의 서브타입이 아닙니다\",\n\n _ => \"{index:+} function argument `{sub}` is not a subtype of `{sup}`\",\n\n}\n", "file_path": "kailua_types/src/message.rs", "rank": 79, "score": 97255.84532016717 }, { "content": "\n\ndefine_msg! { pub NotSubtypeInMethodArgs<'a> { sub: &'a str, sup: &'a str, index: Ordinal }:\n\n \"ko\" => \"메소드의 {index} 인자 `{sub}`이(가) `{sup}`의 서브타입이 아닙니다\",\n\n _ => \"{index:+} method argument `{sub}` is not a subtype of `{sup}`\",\n\n}\n\n\n\ndefine_msg! { pub NotSubtypeInReturns<'a> { sub: &'a str, sup: &'a str, index: Ordinal }:\n\n \"ko\" => \"함수의 {index} 반환값인 `{sub}`이(가) `{sup}`의 서브타입이 아닙니다\",\n\n _ => \"{index:+} return type `{sub}` is not a subtype of `{sup}`\",\n\n}\n\n\n\ndefine_msg! { pub NotEqual<'a> { lhs: &'a str, rhs: &'a str }:\n\n \"ko\" => \"`{lhs}`와(과) `{rhs}`이(가) 같은 타입이 아닙니다\",\n\n _ => \"`{lhs}` does not equal to `{rhs}`\",\n\n}\n\n\n\ndefine_msg! { pub NotEqualInSelf<'a> { lhs: &'a str, rhs: &'a str }:\n\n \"ko\" => \"`self` 자리에 있는 `{lhs}`와(과) `{rhs}`이(가) 같은 타입이 아닙니다\",\n\n _ => \"`{lhs}` in the `self` position does not equal to `{rhs}`\",\n\n}\n", "file_path": "kailua_types/src/message.rs", "rank": 80, "score": 97246.11523516412 }, { "content": "\n\ndefine_msg! { pub UnsupportedErrorType:\n\n \"ko\" => \"`error \\\"메시지\\\"` 타입은 아직 지원되지 않습니다\",\n\n _ => \"`error \\\"message\\\"` type is not yet supported\",\n\n}\n\n\n\ndefine_msg! { pub DuplicateFieldNameInRec<'a> { name: &'a Name }:\n\n \"ko\" => \"타입에서 레코드 이름 {name}이 중복됩니다\",\n\n _ => \"Duplicate record field {name} in the type specification\",\n\n}\n\n\n\ndefine_msg! { pub FirstFieldNameInRec:\n\n \"ko\" => \"여기서 처음 나왔습니다\",\n\n _ => \"The first duplicate appeared here\",\n\n}\n\n\n\ndefine_msg! { pub UnsupportedUnionTypeSpec:\n\n \"ko\" => \"이 합 타입은 타입 명세에서 지원되지 않습니다\",\n\n _ => \"This union type is not supported in the specification\",\n\n}\n\n\n", "file_path": "kailua_types/src/message.rs", "rank": 81, "score": 97242.97824809347 }, { "content": "\n\ndefine_msg! { pub InvalidUnionType<'a> { lhs: &'a str, rhs: &'a str }:\n\n \"ko\" => \"`{lhs}`와(과) `{rhs}`의 합 타입을 만들 수 없습니다\",\n\n _ => \"Cannot create a union type of `{lhs}` and `{rhs}`\",\n\n}\n\n\n\ndefine_msg! { pub InvalidUnionTypeInSelf<'a> { lhs: &'a str, rhs: &'a str }:\n\n \"ko\" => \"`self` 자리에 있는 `{lhs}`와(과) `{rhs}`의 합 타입을 만들 수 없습니다\",\n\n _ => \"Cannot create a union type of `{lhs}` and `{rhs}` in the `self` position\",\n\n}\n\n\n\ndefine_msg! { pub InvalidUnionTypeInFuncArgs<'a> { lhs: &'a str, rhs: &'a str, index: Ordinal }:\n\n \"ko\" => \"함수의 {index} 인자에서 `{lhs}`와(과) `{rhs}`의 합 타입을 만들 수 없습니다\",\n\n _ => \"Cannot create a union type of `{lhs}` and `{rhs}` in the {index} function argument\",\n\n}\n\n\n\ndefine_msg! { pub InvalidUnionTypeInMethodArgs<'a> { lhs: &'a str, rhs: &'a str, index: Ordinal }:\n\n \"ko\" => \"메소드의 {index} 인자에서 `{lhs}`와(과) `{rhs}`의 합 타입을 만들 수 없습니다\",\n\n _ => \"Cannot create a union type of `{lhs}` and `{rhs}` in the {index} method argument\",\n\n}\n", "file_path": "kailua_types/src/message.rs", "rank": 82, "score": 97242.07683198358 }, { "content": "\n\ndefine_msg! { pub LessArityInReturns<'a> { other: &'a str, index: Ordinal }:\n\n \"ko\" => \"반대편 타입이 `{other}`이기 때문에 {index} 반환값을 생략할 수 없습니다\",\n\n _ => \"{index:+} return value cannot be omitted because its type is `{other}`\",\n\n}\n\n\n\ndefine_msg! { pub MoreArityInFuncArgs { index: usize }:\n\n \"ko\" => \"함수에 {index}개를 넘는 인자를 넣을 수 없습니다\",\n\n _ => \"Cannot give more than {index} argument(s) to the function\",\n\n}\n\n\n\ndefine_msg! { pub MoreArityInMethodArgs { index: usize }:\n\n \"ko\" => \"`self`를 포함해 메소드에 {index}개를 넘는 인자를 넣을 수 없습니다\",\n\n _ => \"Cannot give more than {index} argument(s) including `self` to the method\",\n\n}\n\n\n\ndefine_msg! { pub MoreArityInReturns { index: usize }:\n\n \"ko\" => \"{index}개를 넘는 값을 반환할 수 없습니다\",\n\n _ => \"Cannot return more than {index} value(s)\",\n\n}\n", "file_path": "kailua_types/src/message.rs", "rank": 83, "score": 97241.34880837404 }, { "content": "\n\ndefine_msg! { pub InvalidUnionTypeInReturns<'a> { lhs: &'a str, rhs: &'a str, index: Ordinal }:\n\n \"ko\" => \"함수의 {index} 반환값에서 `{lhs}`와(과) `{rhs}`의 합 타입을 만들 수 없습니다\",\n\n _ => \"Cannot create a union type of `{lhs}` and `{rhs}` in the {index} return type\",\n\n}\n\n\n\ndefine_msg! { pub ArityMismatch<'a> { other: &'a str, index: Ordinal }:\n\n \"ko\" => \"반대편 타입이 `{other}`이기 때문에 {index} 타입을 생략할 수 없습니다\",\n\n _ => \"{index:+} type cannot be omitted because the other type is `{other}`\",\n\n}\n\n\n\ndefine_msg! { pub LessArityInFuncArgs<'a> { other: &'a str, index: Ordinal }:\n\n \"ko\" => \"명시된 타입이 `{other}`이기 때문에 함수의 {index} 인자를 생략할 수 없습니다\",\n\n _ => \"{index:+} function argument cannot be omitted because its type is `{other}`\",\n\n}\n\n\n\ndefine_msg! { pub LessArityInMethodArgs<'a> { other: &'a str, index: Ordinal }:\n\n \"ko\" => \"반대편 타입이 `{other}`이기 때문에 메소드의 {index} 인자를 생략할 수 없습니다\",\n\n _ => \"{index:+} method argument cannot be omitted because its type is `{other}`\",\n\n}\n", "file_path": "kailua_types/src/message.rs", "rank": 84, "score": 97240.38348007151 }, { "content": "\n\ndefine_msg! { pub NotEqualInFuncArgs<'a> { lhs: &'a str, rhs: &'a str, index: Ordinal }:\n\n \"ko\" => \"함수의 {index} 인자 `{lhs}`와(과) `{rhs}`이(가) 같은 타입이 아닙니다\",\n\n _ => \"{index:+} function argument `{lhs}` does not equal to `{rhs}`\",\n\n}\n\n\n\ndefine_msg! { pub NotEqualInMethodArgs<'a> { lhs: &'a str, rhs: &'a str, index: Ordinal }:\n\n \"ko\" => \"메소드의 {index} 인자 `{lhs}`와(과) `{rhs}`이(가) 같은 타입이 아닙니다\",\n\n _ => \"{index:+} method argument `{lhs}` does not equal to `{rhs}`\",\n\n}\n\n\n\ndefine_msg! { pub NotEqualInReturns<'a> { lhs: &'a str, rhs: &'a str, index: Ordinal }:\n\n \"ko\" => \"함수의 {index} 반환값인 `{lhs}`와(과) `{rhs}`이(가) 같은 타입이 아닙니다\",\n\n _ => \"{index:+} return type `{lhs}` does not equal to `{rhs}`\",\n\n}\n\n\n\ndefine_msg! { pub CannotUnionType<'a> { ty: &'a str }:\n\n \"ko\" => \"`{ty}` 타입을 포함하는 합 타입을 만들 수 없습니다\",\n\n _ => \"Cannot create a union type including `{ty}`\",\n\n}\n", "file_path": "kailua_types/src/message.rs", "rank": 85, "score": 97239.89183242475 }, { "content": " _ => \"The record cannot add a new field with the key `{key}` and \\\n\n the value type `{slot}` that is not explicitly nilable\",\n\n}\n\n\n\n// should be same to kailua_check's version\n\ndefine_msg! { pub CannotUpdate<'a> { tab: &'a str }:\n\n \"ko\" => \"변경할 수 없는 `{tab}` 타입을 인덱싱해서 갱신할 수 없습니다\",\n\n _ => \"Cannot update the immutable type `{tab}` by indexing\",\n\n}\n\n\n\n// should be same to kailua_check's version\n\ndefine_msg! { pub CannotAssign<'a> { lhs: &'a str, rhs: &'a str }:\n\n \"ko\" => \"`{lhs}` 타입에 `{rhs}` 타입을 대입할 수 없습니다\",\n\n _ => \"Cannot assign `{rhs}` into `{lhs}`\",\n\n}\n\n\n\ndefine_msg! { pub CannotFilter<'a> { ty: &'a str }:\n\n \"ko\" => \"`{ty}` 타입을 좁힐 수 없습니다\",\n\n _ => \"Cannot narrow `{ty}`\",\n\n}\n", "file_path": "kailua_types/src/message.rs", "rank": 86, "score": 97237.27729594956 }, { "content": " \"ko\" => \"레코드 타입이 `{key}` 필드를 중복으로 가집니다\",\n\n _ => \"Duplicate key `{key}` found in the record type\",\n\n}\n\n\n\n// TODO should point to the correct span\n\ndefine_msg! { pub RecCannotHaveKey<'a> { key: &'a Key }:\n\n \"ko\" => \"레코드 타입이 `{key}` 필드를 가질 수 없습니다\",\n\n _ => \"The record cannot have a field with the key `{key}`\",\n\n}\n\n\n\n// TODO should point to the correct span\n\ndefine_msg! { pub RecShouldHaveKeys<'a> { keys: &'a str }:\n\n \"ko\" => \"레코드 타입이 {keys} 필드를 포함하지 않습니다\",\n\n _ => \"The record does not have a field with the key(s) {keys}\",\n\n}\n\n\n\n// TODO should point to the correct span\n\ndefine_msg! { pub RecExtendedWithNonNil<'a> { key: &'a Key, slot: &'a str }:\n\n \"ko\" => \"레코드 타입에 원래 존재하지 않던 `{key}` 필드는 \\\n\n 명시적으로 nil을 포함하지 않는 `{slot}` 타입으로 추가될 수 없습니다\",\n", "file_path": "kailua_types/src/message.rs", "rank": 87, "score": 97232.21809628159 }, { "content": "\n\ndefine_msg! { pub OtherTypeOrigin:\n\n \"ko\" => \"다른 타입은 여기에서 만들어졌습니다\",\n\n _ => \"The other type originates here\",\n\n}\n\n\n\n// TODO should point to the correct span\n\ndefine_msg! { pub InextensibleRec:\n\n \"ko\" => \"레코드 타입에 더 이상 새 필드를 추가할 수 없습니다\",\n\n _ => \"No longer possible to add a new field to this record type\",\n\n}\n\n\n\n// TODO should point to the correct span\n\ndefine_msg! { pub RecursiveRec:\n\n \"ko\" => \"레코드 타입에서 재귀 참조가 발견되었습니다\",\n\n _ => \"Recursive cycles detected in the record type\",\n\n}\n\n\n\n// TODO should point to the correct span\n\ndefine_msg! { pub RecDuplicateKey<'a> { key: &'a Key }:\n", "file_path": "kailua_types/src/message.rs", "rank": 88, "score": 97231.78620738677 }, { "content": "use std::i32;\n\nuse std::cmp;\n\nuse std::ops;\n\nuse std::str;\n\nuse std::borrow::Cow;\n\nuse std::collections::HashMap;\n\nuse take_mut::take;\n\n\n\nuse kailua_env::{Span, Spanned, WithLoc};\n\nuse kailua_diag::{self, Result, Report, Reporter};\n\nuse kailua_syntax::{Str, Name};\n\nuse kailua_syntax::ast::{self, NameRef, Var, TypeSpec, Kind, Sig, Ex, Exp, UnOp, BinOp, Table};\n\nuse kailua_syntax::ast::{SelfParam, TypeScope, Args, St, Stmt, Block, K, Attr, M, MM, Varargs};\n\nuse kailua_types::diag::{TypeReport, TypeReportHint, TypeReportMore};\n\nuse kailua_types::ty::{Displayed, Display, TypeContext, TypeResolver};\n\nuse kailua_types::ty::{Dyn, Nil, T, Ty, TySeq, SpannedTySeq, Lattice, Union, Dummy};\n\nuse kailua_types::ty::{Key, Tables, Function, Functions};\n\nuse kailua_types::ty::{F, Slot, SlotSeq, SpannedSlotSeq, Tag, Class, ClassId};\n\nuse kailua_types::ty::flags::*;\n\nuse kailua_types::env::Types;\n\nuse env::{Env, Returns, Frame, Scope, Module, Context, SlotSpec};\n\nuse class_system::make_predefined_class_system;\n\nuse message as m;\n\n\n\n#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug)]\n", "file_path": "kailua_check/src/check.rs", "rank": 99, "score": 44.91425655665256 } ]
Rust
src/api/reddit/repository.rs
tigorlazuardi/ridit-rs
bae8b50db3237df03e925048bf5c5eb6d575b2af
use std::{convert::TryInto, path::PathBuf, sync::Arc, time::Duration, usize}; use anyhow::{bail, Context, Error, Result}; use imagesize::blob_size; use reqwest::{header::RANGE, Client, Response}; use tokio::{ fs::{self, File}, io::AsyncWriteExt, sync::{mpsc::UnboundedSender, Semaphore}, }; use tokio_retry::{ strategy::{jitter, FixedInterval}, Retry, }; use super::models::{download_meta::DownloadMeta, download_status::DownloadStatus}; use crate::api::{ config::{config::Config, configuration::Subreddit}, reddit::models::{error::RedditError, listing::Listing}, }; #[derive(Clone, Debug)] pub struct Repository { client: Arc<Client>, config: Arc<Config>, semaphore: Arc<Semaphore>, } #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] pub enum PrintOut { Bar, Text, None, } static APP_USER_AGENT: &str = concat!( "id.web.tigor.", env!("CARGO_PKG_NAME"), "/v", env!("CARGO_PKG_VERSION"), " (by /u/CrowFX)" ); impl Repository { pub fn new(config: Arc<Config>) -> Self { let os = std::env::consts::OS; let user_agent = os.to_string() + ":" + APP_USER_AGENT; let client = reqwest::Client::builder() .user_agent(user_agent) .connect_timeout(Duration::from_secs(config.timeout.into())) .build() .context("failed to create request client") .unwrap(); let semaphore = Arc::new(Semaphore::new(config.download_threads)); Self { client: Arc::new(client), config, semaphore, } } pub async fn download( &self, display: PrintOut, progress: UnboundedSender<DownloadStatus>, ) -> Vec<(DownloadMeta, Result<(), Error>)> { let mut handlers = Vec::new(); for (_, subreddit) in &self.config.subreddits { let this = self.clone(); let subreddit = subreddit.clone(); let progress = progress.clone(); let handle = tokio::spawn(async move { Ok::<_, Error>(this.exec_download(subreddit, display, progress).await?) }); handlers.push(handle); } let mut v = Vec::new(); for handle in handlers { match handle.await.unwrap() { Ok(vec) => v.extend(vec), Err(err) => println!("{:?}", err), } } v } async fn exec_download( &self, subreddit: Subreddit, display: PrintOut, progress: UnboundedSender<DownloadStatus>, ) -> Result<Vec<(DownloadMeta, Result<(), Error>)>> { let print = || { println!("{} downloading listing", subreddit.padded_proper_name()); }; match display { PrintOut::None => {} _ => print(), } let downloads = self.download_listing(&subreddit).await?; Ok(self.download_images(downloads, subreddit, progress).await) } async fn download_images( &self, downloads: Vec<DownloadMeta>, subreddit: Subreddit, progress: UnboundedSender<DownloadStatus>, ) -> Vec<(DownloadMeta, Result<(), Error>)> { let mut handlers = Vec::new(); 'meta: for mut meta in downloads.into_iter() { for profile in &meta.profile { if self.file_exists(profile, &meta).await { continue 'meta; } } let this = self.clone(); let sem = self.semaphore.clone(); let subreddit = subreddit.clone(); let progress = progress.clone(); let handle = tokio::spawn(async move { let _x = sem.acquire().await.unwrap(); let op = this.download_image(&mut meta, subreddit, progress).await; (meta, op) }); handlers.push(handle); } let mut v = Vec::new(); for handle in handlers { v.push(handle.await.unwrap()); } v } async fn download_listing(&self, subreddit: &Subreddit) -> Result<Vec<DownloadMeta>> { let listing_url = format!( "https://reddit.com/r/{}/{}.json?limit=100", subreddit.proper_name, subreddit.sort ); let retry_strategy = FixedInterval::from_millis(100).map(jitter).take(3); let resp: Response = Retry::spawn(retry_strategy, || async { let res = self.client.get(&listing_url).send().await?; Ok::<Response, Error>(res) }) .await .with_context(|| { format!( "failed to open connection to download listing from: {}", listing_url ) })?; if !resp.status().is_success() { let err = resp.json::<RedditError>().await.with_context(|| { format!("failed to deserialize json body from: {}", listing_url) })?; bail!( "downloading listing from [{}] give error: {}", subreddit.proper_name, err ); } let listing: Listing = resp .json() .await .with_context(|| format!("failed to deserialize json body from: {}", listing_url))?; Ok(listing.into_download_metas(&self.config)) } async fn download_image( &self, meta: &mut DownloadMeta, subreddit: Subreddit, progress: UnboundedSender<DownloadStatus>, ) -> Result<()> { if subreddit.download_first { self.poke_image_size(meta).await?; let mut should_continue = false; for (profile, setting) in self.config.iter() { if !meta.passed_checks(setting) { continue; } if self.file_exists(profile, meta).await { continue; } should_continue = true; meta.profile.push(profile.to_owned()); } if !should_continue { return Ok(()); } } let retry_strategy = FixedInterval::from_millis(100).map(jitter).take(3); let response: Response = Retry::spawn(retry_strategy, || async { let res = self.client.get(&meta.url).send().await?; Ok::<Response, Error>(res) }) .await .with_context(|| { format!( "failed to open connection to download image from: {}", meta.url ) })?; let status = response.status(); if !status.is_success() { bail!(format!( "download from {} gives [{}: {}] status code", meta.url, status.as_u16(), status.canonical_reason().unwrap_or("Unknown Reason"), )); } self.ensure_download_dir(meta).await?; let temp_file = self.store_to_temp(response, meta, progress).await?; for profile in &meta.profile { let download_location = self.download_location(profile, meta); fs::copy(&temp_file, &download_location) .await .with_context(|| { format!( "failed to copy file from tmp dir to {}", download_location.display() ) })?; let dir_path = std::env::temp_dir() .join("ridit") .join(&meta.subreddit_name) .join(&meta.filename); fs::remove_file(&dir_path).await.with_context(|| { format!( "failed to remove temp downloaded file {}", download_location.display() ) })?; } Ok(()) } async fn ensure_download_dir(&self, meta: &DownloadMeta) -> Result<()> { for profile in &meta.profile { let download_dir = self.config.path.join(profile).join(&meta.subreddit_name); fs::create_dir_all(&download_dir).await.with_context(|| { format!( "failed to create download directory on: {}", download_dir.display() ) })?; } Ok(()) } async fn poke_image_size(&self, meta: &mut DownloadMeta) -> Result<()> { const LIMIT: usize = 1024 * 2 * 10; let retry_strategy = FixedInterval::from_millis(100).map(jitter).take(3); let mut resp = Retry::spawn(retry_strategy, || async { let res = self .client .get(&meta.url) .header(RANGE, LIMIT) .send() .await?; Ok::<Response, Error>(res) }) .await .with_context(|| { format!( "failed to partial download an image to get image size from: {}", meta.url ) })?; let mut data: Vec<u8> = Vec::new(); while let Some(chunk) = resp.chunk().await? { data.append(&mut chunk.to_vec()); if data.len() >= LIMIT { break; } } let size = blob_size(&data) .with_context(|| format!("error getting image dimension from: {}", meta.url))?; meta.image_height = size .height .try_into() .with_context(|| "image height is too big to process")?; meta.image_width = size .width .try_into() .with_context(|| "image width is too big to process")?; Ok(()) } fn download_dir(&self, profile: &str, meta: &DownloadMeta) -> PathBuf { self.config.path.join(profile).join(&meta.subreddit_name) } fn download_location(&self, profile: &str, meta: &DownloadMeta) -> PathBuf { self.download_dir(profile, meta).join(&meta.filename) } async fn file_exists(&self, profile: &str, meta: &DownloadMeta) -> bool { fs::metadata(self.download_location(profile, meta)) .await .is_ok() } async fn store_to_temp( &self, mut resp: Response, meta: &DownloadMeta, progress: UnboundedSender<DownloadStatus>, ) -> Result<PathBuf> { let dir_path = std::env::temp_dir() .join("ridit") .join(&meta.subreddit_name); fs::create_dir_all(&dir_path).await?; let file_path = dir_path.join(&meta.filename); let mut file = File::create(&file_path) .await .context("cannot create file on tmp dir")?; let download_length = resp.content_length().unwrap_or(0); progress .send(meta.as_download_status(download_length, 0)) .unwrap(); while let Some(chunk) = resp.chunk().await? { progress .send(meta.as_download_status(download_length, chunk.len().try_into().unwrap())) .unwrap(); if let Err(err) = file.write(&chunk).await { progress .send( meta.as_download_status(download_length, chunk.len().try_into().unwrap()) .with_error(err.to_string()), ) .unwrap(); bail!("failed to save image from {}. cause: {}", meta.url, err) } } progress .send(meta.as_download_status(download_length, 0).set_finished()) .unwrap(); Ok(file_path) } pub async fn subreddit_exist(subreddit: &mut String) -> Result<bool> { let url = format!("https://reddit.com/r/{}.json", subreddit); let retry_strategy = FixedInterval::from_millis(100).map(jitter).take(3); let resp: Response = Retry::spawn(retry_strategy, || async { let res = reqwest::get(&url).await?; Ok::<Response, Error>(res) }) .await .with_context(|| format!("failed to check subreddit {}", subreddit))?; let listing: Listing = resp .json() .await .with_context(|| format!("failed to deserialize json body from: {}", url))?; match listing.data.children.get(0) { Some(v) => { subreddit.clear(); subreddit.push_str(&v.data.subreddit); Ok(true) } None => Ok(false), } } }
use std::{convert::TryInto, path::PathBuf, sync::Arc, time::Duration, usize}; use anyhow::{bail, Context, Error, Result}; use imagesize::blob_size; use reqwest::{header::RANGE, Client, Response}; use tokio::{ fs::{self, File}, io::AsyncWriteExt, sync::{mpsc::UnboundedSender, Semaphore}, }; use tokio_retry::{ strategy::{jitter, FixedInterval}, Retry, }; use super::models::{download_meta::DownloadMeta, download_status::DownloadStatus}; use crate::api::{ config::{config::Config, configuration::Subreddit}, reddit::models::{error::RedditError, listing::Listing}, }; #[derive(Clone, Debug)] pub struct Repository { client: Arc<Client>, config: Arc<Config>, semaphore: Arc<Semaphore>, } #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] pub enum PrintOut { Bar, Text, None, } static APP_USER_AGENT: &str = concat!( "id.web.tigor.", env!("CARGO_PKG_NAME"), "/v", env!("CARGO_PKG_VERSION"), " (by /u/CrowFX)" ); impl Repository { pub fn new(config: Arc<Config>) -> Self { let os = std::env::consts::OS; let user_agent = os.to_string() + ":" + APP_USER_AGENT; let client = reqwest::Client::builder() .user_agent(user_agent) .connect_timeout(Duration::from_secs(config.timeout.into())) .build() .context("failed to create request client") .unwrap(); let semaphore = Arc::new(Semaphore::new(config.download_threads)); Self { client: Arc::new(client), config, semaphore, } } pub async fn download( &self, display: PrintOut, progress: UnboundedSender<DownloadStatus>, ) -> Vec<(DownloadMeta, Result<(), Error>)> { let mut handlers = Vec::new(); for (_, subreddit) in &self.config.subreddits { let this = self.clone(); let subreddit = subreddit.clone(); let progress = progress.clone(); let handle = tokio::spawn(async move { Ok::<_, Error>(this.exec_download(subreddit, display, progress).await?) }); handlers.push(handle); } let mut v = Vec::new(); for handle in handlers { match handle.await.unwrap() { Ok(vec) => v.extend(vec), Err(err) => println!("{:?}", err), } } v } async fn exec_download( &self, subreddit: Subreddit, display: PrintOut, progress: UnboundedSender<DownloadStatus>, ) -> Result<Vec<(DownloadMeta, Result<(), Error>)>> { let print = || { println!("{} downloading listing", subreddit.padded_proper_name()); }; match display { PrintOut::None => {} _ => print(), } let downloads = self.download_listing(&subreddit).await?; Ok(self.download_images(downloads, subreddit, progress).await) } async fn download_images( &self, downloads: Vec<DownloadMeta>, subreddit: Subreddit, progress: UnboundedSender<DownloadStatus>, ) -> Vec<(DownloadMeta, Result<(), Error>)> { let mut handlers = Vec::new(); 'meta: for mut meta in downloads.into_iter() { for profile in &meta.profile { if self.file_exists(profile, &meta).await { continue 'meta; } } let this = self.clone(); let sem = self.semaphore.clone(); let subreddit = subreddit.clone(); let progress = progress.clone(); let handle = tokio::spawn(async move { let _x = sem.acquire().await.unwrap(); let op = this.download_image(&mut meta, subreddit, progress).await; (meta, op) }); handlers.push(handle); } let mut v = Vec::new(); for handle in handlers { v.push(handle.await.unwrap()); } v } async fn download_listing(&self, subreddit: &Subreddit) -> Result<Vec<DownloadMeta>> { let listing_url = format!( "https://reddit.com/r/{}/{}.json?limit=100", subreddit.proper_name, subreddit.sort ); let retry_strategy = FixedInterval::from_millis(100).map(jitter).take(3); let resp: Response = Retry::spawn(retry_strategy, || async { let res = self.client.get(&listing_url).send().await?; Ok::<Response, Error>(res) }) .await .with_context(|| { format!( "failed to open connection to download listing from: {}", listing_url ) })?; if !resp.status().is_success() { let err = resp.json::<RedditError>().await.with_context(|| { format!("failed to deserialize json body from: {}", listing_url) })?; bail!( "downloading listing from [{}] give error: {}", subreddit.proper_name, err ); } let listing: Listing = resp .json() .await .with_context(|| format!("failed to deserialize json body from: {}", listing_url))?; Ok(listing.into_download_metas(&self.config)) } async fn download_image( &self, meta: &mut DownloadMeta, subreddit: Subreddit, progress: UnboundedSender<DownloadStatus>, ) -> Result<()> { if subreddit.download_first { self.poke_image_size(meta).await?; let mut should_continue = false; for (profile, setting) in self.config.iter() { if !meta.passed_checks(setting) { continue; } if self.file_exists(profile, meta).await { continue; } should_continue = true; meta.profile.push(profile.to_owned()); } if !should_continue { return Ok(()); } } let retry_strategy = FixedInterval::from_millis(100).map(jitter).take(3); let response: Response = Retry::spawn(retry_strategy, || async { let res = self.client.get(&meta.url).send().await?; Ok::<Response, Error>(res) }) .await .with_context(|| { format!( "failed to open connection to download image from: {}", meta.url ) })?; let status = response.status(); if !status.is_success() { bail!(format!( "download from {} gives [{}: {}] status code", meta.url, status.as_u16(), status.canonical_reason().unwrap_or("Unknown Reason"), )); } self.ensure_download_dir(meta).await?; let temp_file = self.store_to_temp(response, meta, progress).await?; for profile in &meta.profile { let download_location = self.download_location(profile, meta); fs::copy(&temp_file, &download_location) .await .with_context(|| { format!( "failed to copy file from tmp dir to {}", download_location.display() ) })?; let dir_path = std::env::temp_dir() .join("ridit") .join(&meta.subreddit_name) .join(&meta.filename); fs::remove_file(&dir_path).await.with_context(|| { format!( "failed to remove temp downloaded file {}", download_location.display() ) })?; } Ok(()) }
async fn poke_image_size(&self, meta: &mut DownloadMeta) -> Result<()> { const LIMIT: usize = 1024 * 2 * 10; let retry_strategy = FixedInterval::from_millis(100).map(jitter).take(3); let mut resp = Retry::spawn(retry_strategy, || async { let res = self .client .get(&meta.url) .header(RANGE, LIMIT) .send() .await?; Ok::<Response, Error>(res) }) .await .with_context(|| { format!( "failed to partial download an image to get image size from: {}", meta.url ) })?; let mut data: Vec<u8> = Vec::new(); while let Some(chunk) = resp.chunk().await? { data.append(&mut chunk.to_vec()); if data.len() >= LIMIT { break; } } let size = blob_size(&data) .with_context(|| format!("error getting image dimension from: {}", meta.url))?; meta.image_height = size .height .try_into() .with_context(|| "image height is too big to process")?; meta.image_width = size .width .try_into() .with_context(|| "image width is too big to process")?; Ok(()) } fn download_dir(&self, profile: &str, meta: &DownloadMeta) -> PathBuf { self.config.path.join(profile).join(&meta.subreddit_name) } fn download_location(&self, profile: &str, meta: &DownloadMeta) -> PathBuf { self.download_dir(profile, meta).join(&meta.filename) } async fn file_exists(&self, profile: &str, meta: &DownloadMeta) -> bool { fs::metadata(self.download_location(profile, meta)) .await .is_ok() } async fn store_to_temp( &self, mut resp: Response, meta: &DownloadMeta, progress: UnboundedSender<DownloadStatus>, ) -> Result<PathBuf> { let dir_path = std::env::temp_dir() .join("ridit") .join(&meta.subreddit_name); fs::create_dir_all(&dir_path).await?; let file_path = dir_path.join(&meta.filename); let mut file = File::create(&file_path) .await .context("cannot create file on tmp dir")?; let download_length = resp.content_length().unwrap_or(0); progress .send(meta.as_download_status(download_length, 0)) .unwrap(); while let Some(chunk) = resp.chunk().await? { progress .send(meta.as_download_status(download_length, chunk.len().try_into().unwrap())) .unwrap(); if let Err(err) = file.write(&chunk).await { progress .send( meta.as_download_status(download_length, chunk.len().try_into().unwrap()) .with_error(err.to_string()), ) .unwrap(); bail!("failed to save image from {}. cause: {}", meta.url, err) } } progress .send(meta.as_download_status(download_length, 0).set_finished()) .unwrap(); Ok(file_path) } pub async fn subreddit_exist(subreddit: &mut String) -> Result<bool> { let url = format!("https://reddit.com/r/{}.json", subreddit); let retry_strategy = FixedInterval::from_millis(100).map(jitter).take(3); let resp: Response = Retry::spawn(retry_strategy, || async { let res = reqwest::get(&url).await?; Ok::<Response, Error>(res) }) .await .with_context(|| format!("failed to check subreddit {}", subreddit))?; let listing: Listing = resp .json() .await .with_context(|| format!("failed to deserialize json body from: {}", url))?; match listing.data.children.get(0) { Some(v) => { subreddit.clear(); subreddit.push_str(&v.data.subreddit); Ok(true) } None => Ok(false), } } }
async fn ensure_download_dir(&self, meta: &DownloadMeta) -> Result<()> { for profile in &meta.profile { let download_dir = self.config.path.join(profile).join(&meta.subreddit_name); fs::create_dir_all(&download_dir).await.with_context(|| { format!( "failed to create download directory on: {}", download_dir.display() ) })?; } Ok(()) }
function_block-full_function
[ { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n\ttonic_build::compile_protos(\"proto/server.proto\")?;\n\n\tOk(())\n\n}\n", "file_path": "src/build.rs", "rank": 0, "score": 114218.0222370955 }, { "content": "fn project_dir() -> ProjectDirs {\n\n\tProjectDirs::from(\"id.web\", \"tigor\", \"ridit\")\n\n\t\t.context(\"failed to get project directory\")\n\n\t\t.unwrap()\n\n}\n\n\n", "file_path": "src/api/config/config.rs", "rank": 1, "score": 90891.80509467734 }, { "content": "fn filename() -> PathBuf {\n\n\tlet pd = project_dir();\n\n\tpd.config_dir().join(CONFIG_FILENAME)\n\n}\n\n\n\npub async fn read_config() -> Result<Config> {\n\n\tlet filename = filename();\n\n\tif !config_exist().await {\n\n\t\tprintln!(\n\n\t\t\t\"file config does not exist. creating a new config on {}\",\n\n\t\t\tproject_dir().config_dir().join(CONFIG_FILENAME).display()\n\n\t\t);\n\n\t\tcreate_config_dir().await;\n\n\t\twrite_config(&Config::default()).await?;\n\n\t}\n\n\tlet content = fs::read_to_string(&filename)\n\n\t\t.await\n\n\t\t.with_context(|| format!(\"cannot find configuration file in: {}\", filename.display()))?;\n\n\n\n\tlet config: Config =\n", "file_path": "src/api/config/config.rs", "rank": 2, "score": 60424.88316497913 }, { "content": "use std::path::{Path, PathBuf};\n\n\n\nuse pad::PadStr;\n\n\n\nuse crate::api::config::configuration::Configuration;\n\n\n\nuse super::download_status::DownloadStatus;\n\n\n\npub struct DownloadMeta {\n\n\tpub url: String,\n\n\tpub subreddit_name: String,\n\n\tpub image_height: u32,\n\n\tpub image_width: u32,\n\n\tpub post_link: String,\n\n\tpub nsfw: bool,\n\n\tpub filename: String,\n\n\tpub title: String,\n\n\tpub author: String,\n\n\tpub profile: Vec<String>,\n\n}\n", "file_path": "src/api/reddit/models/download_meta.rs", "rank": 3, "score": 51143.46158273301 }, { "content": "\n\nimpl DownloadMeta {\n\n\tpub fn get_file_location<P: AsRef<Path>>(&self, base_location: P) -> PathBuf {\n\n\t\tbase_location\n\n\t\t\t.as_ref()\n\n\t\t\t.join(&self.subreddit_name)\n\n\t\t\t.join(&self.filename)\n\n\t\t\t.to_path_buf()\n\n\t}\n\n\n\n\tpub fn passed_checks(&self, config: &Configuration) -> bool {\n\n\t\tself.passed_aspect_ratio(config) && self.passed_mininum_size(config)\n\n\t}\n\n\n\n\tpub fn passed_aspect_ratio(&self, config: &Configuration) -> bool {\n\n\t\tif !config.aspect_ratio.enable {\n\n\t\t\treturn true;\n\n\t\t}\n\n\t\tlet ar = config.aspect_ratio.width as f32 / config.aspect_ratio.height as f32;\n\n\t\tlet min_ratio = ar - config.aspect_ratio.range;\n", "file_path": "src/api/reddit/models/download_meta.rs", "rank": 4, "score": 51138.536825291965 }, { "content": "\t\tlet max_ratio = ar + config.aspect_ratio.range;\n\n\t\tlet image_ratio = self.image_width as f32 / self.image_height as f32;\n\n\t\timage_ratio >= min_ratio && image_ratio <= max_ratio\n\n\t}\n\n\n\n\tpub fn passed_mininum_size(&self, config: &Configuration) -> bool {\n\n\t\tif !config.minimum_size.enable {\n\n\t\t\treturn true;\n\n\t\t}\n\n\t\tself.image_width >= config.minimum_size.width\n\n\t\t\t&& self.image_height >= config.minimum_size.height\n\n\t}\n\n\n\n\tpub fn padded_subreddit_name(&self) -> String {\n\n\t\t(\"[\".to_string() + &self.subreddit_name + \"]\").pad_to_width(23)\n\n\t}\n\n\n\n\tpub fn padded_profiles(&self) -> String {\n\n\t\tformat!(\"{:?}\", self.profile).pad_to_width(23)\n\n\t}\n", "file_path": "src/api/reddit/models/download_meta.rs", "rank": 5, "score": 51137.86239777335 }, { "content": "\n\n\tpub fn as_download_status(&self, download_length: u64, chunk_length: u64) -> DownloadStatus {\n\n\t\tDownloadStatus::new(\n\n\t\t\tself.subreddit_name.to_owned(),\n\n\t\t\tself.profile.to_owned(),\n\n\t\t\tdownload_length,\n\n\t\t\tchunk_length,\n\n\t\t\tself.url.to_owned(),\n\n\t\t)\n\n\t}\n\n}\n", "file_path": "src/api/reddit/models/download_meta.rs", "rank": 6, "score": 51131.79603710601 }, { "content": "\t\ttoml::from_str(&content).context(\"bad configuration. failed to parse config file\")?;\n\n\tOk(config)\n\n}\n\n\n\npub async fn write_config(c: &Config) -> Result<()> {\n\n\tlet filename = filename();\n\n\tlet buf = toml::to_string_pretty(c)?;\n\n\tfs::write(&filename, &buf).await.with_context(|| {\n\n\t\tformat!(\n\n\t\t\t\"failed to write configuration to config directory: {}\",\n\n\t\t\tfilename.display(),\n\n\t\t)\n\n\t})?;\n\n\tOk(())\n\n}\n\n\n\npub async fn create_config_dir() {\n\n\tlet pd = project_dir();\n\n\tlet pd = pd.config_dir();\n\n\tfs::create_dir_all(&pd).await.ok();\n\n}\n\n\n\npub async fn config_exist() -> bool {\n\n\tlet pd = project_dir();\n\n\tlet pd = pd.config_dir().join(CONFIG_FILENAME);\n\n\tfs::metadata(pd).await.is_ok()\n\n}\n", "file_path": "src/api/config/config.rs", "rank": 7, "score": 34452.36258991358 }, { "content": "use std::{\n\n\tcollections::BTreeMap,\n\n\tops::{Deref, DerefMut},\n\n\tpath::PathBuf,\n\n};\n\n\n\nuse anyhow::{Context, Result};\n\nuse directories::{ProjectDirs, UserDirs};\n\nuse dirs::home_dir;\n\nuse serde::{Deserialize, Serialize};\n\nuse tokio::fs;\n\n\n\nuse super::{\n\n\tconfiguration::{AspectRatio, Configuration, MinimumSize, Subreddit},\n\n\tserver::ServerConfig,\n\n};\n\n\n\npub static CONFIG_FILENAME: &str = \"ridit.toml\";\n\n\n\npub type Subreddits = BTreeMap<String, Subreddit>;\n", "file_path": "src/api/config/config.rs", "rank": 8, "score": 34452.08021862757 }, { "content": "pub type Settings = BTreeMap<String, Configuration>;\n\n\n\n#[derive(Debug, Deserialize, Serialize, Clone)]\n\n#[serde(default)]\n\npub struct Config {\n\n\t/// Profile to set configurations to\n\n\tpub focused_profile: String,\n\n\tpub timeout: u32,\n\n\tpub download_threads: usize,\n\n\tpub path: PathBuf,\n\n\tpub server: ServerConfig,\n\n\tpub settings: Settings,\n\n\tpub subreddits: Subreddits,\n\n}\n\n\n\nimpl Config {\n\n\tpub fn get_mut_configuration(&mut self) -> Result<&mut Configuration> {\n\n\t\tlet active = self.focused_profile.to_owned();\n\n\t\tOk(self\n\n\t\t\t.get_mut(&active)\n", "file_path": "src/api/config/config.rs", "rank": 9, "score": 34450.632368592094 }, { "content": "\t\t\t.with_context(|| format!(\"profile {} does not exist!\", active))?)\n\n\t}\n\n\n\n\tpub fn get_configuration(&self) -> Result<&Configuration> {\n\n\t\tOk(self\n\n\t\t\t.get(&self.focused_profile)\n\n\t\t\t.with_context(|| format!(\"profile {} does not exist!\", self.focused_profile))?)\n\n\t}\n\n}\n\n\n\nimpl Deref for Config {\n\n\ttype Target = BTreeMap<String, Configuration>;\n\n\n\n\tfn deref(&self) -> &Self::Target {\n\n\t\t&self.settings\n\n\t}\n\n}\n\n\n\nimpl DerefMut for Config {\n\n\tfn deref_mut(&mut self) -> &mut Self::Target {\n", "file_path": "src/api/config/config.rs", "rank": 10, "score": 34447.02159455416 }, { "content": "\t\t&mut self.settings\n\n\t}\n\n}\n\n\n\nimpl Default for Config {\n\n\tfn default() -> Self {\n\n\t\tlet mut m: BTreeMap<String, Configuration> = BTreeMap::new();\n\n\t\tm.insert(\"main\".to_string(), Configuration::default());\n\n\t\tlet mut subs: Subreddits = BTreeMap::new();\n\n\t\tlet wallpaper = \"wallpaper\".to_string();\n\n\t\tlet wallpapers = \"wallpapers\".to_string();\n\n\t\tsubs.insert(wallpaper.clone(), Subreddit::new_default(wallpaper));\n\n\t\tsubs.insert(wallpapers.clone(), Subreddit::new_default(wallpapers));\n\n\t\tsubs.insert(\n\n\t\t\t\"mobilewallpaper\".to_string(),\n\n\t\t\tSubreddit::new_default(String::from(\"MobileWallpaper\")),\n\n\t\t);\n\n\t\tlet mobile_config = Configuration {\n\n\t\t\taspect_ratio: AspectRatio {\n\n\t\t\t\tenable: true,\n", "file_path": "src/api/config/config.rs", "rank": 11, "score": 34435.88649713911 }, { "content": "\t\t\t\theight: 16,\n\n\t\t\t\twidth: 9,\n\n\t\t\t\trange: 0.3,\n\n\t\t\t},\n\n\t\t\tminimum_size: MinimumSize {\n\n\t\t\t\tenable: true,\n\n\t\t\t\theight: 1920,\n\n\t\t\t\twidth: 1080,\n\n\t\t\t},\n\n\t\t};\n\n\t\tm.insert(\"mobile\".to_string(), mobile_config);\n\n\t\tlet p = match UserDirs::new()\n\n\t\t\t.expect(\"cannot find user directory\")\n\n\t\t\t.picture_dir()\n\n\t\t{\n\n\t\t\tSome(path) => path.join(\"ridit\"),\n\n\t\t\tNone => home_dir()\n\n\t\t\t\t.expect(\"cannot found home dir for current user\")\n\n\t\t\t\t.join(\"Pictures\")\n\n\t\t\t\t.join(\"ridit\"),\n", "file_path": "src/api/config/config.rs", "rank": 12, "score": 34432.63355201352 }, { "content": "\t\t};\n\n\t\tConfig {\n\n\t\t\tfocused_profile: \"main\".to_string(),\n\n\t\t\tpath: p,\n\n\t\t\tdownload_threads: 8,\n\n\t\t\ttimeout: 10,\n\n\t\t\tsettings: m,\n\n\t\t\tsubreddits: subs,\n\n\t\t\tserver: ServerConfig::default(),\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "src/api/config/config.rs", "rank": 13, "score": 34431.10436982891 }, { "content": "use anyhow::Result;\n\nuse structopt::StructOpt;\n\n\n\nuse crate::api::config::config::Config;\n\n\n\nuse super::subreddit::OutFormat;\n\n\n\n#[derive(Debug, StructOpt, Clone)]\n\npub struct Print {\n\n\t/// Selects output format\n\n\t#[structopt(long, short, default_value = \"toml\")]\n\n\tpub format: OutFormat,\n\n}\n\n\n\nimpl Print {\n\n\tpub fn print(&self, config: &Config) -> Result<()> {\n\n\t\tOk(match self.format {\n\n\t\t\tOutFormat::JSON => {\n\n\t\t\t\tlet val = serde_json::to_string_pretty(config)?;\n\n\t\t\t\tprintln!(\"{}\", val);\n\n\t\t\t}\n\n\t\t\tOutFormat::TOML => {\n\n\t\t\t\tlet val = toml::to_string_pretty(config)?;\n\n\t\t\t\tprintln!(\"{}\", val);\n\n\t\t\t}\n\n\t\t})\n\n\t}\n\n}\n", "file_path": "src/cli/print.rs", "rank": 14, "score": 29225.862366758316 }, { "content": "\t/// Examples:\n\n\t///\n\n\t/// adding subreddit using default settings: `ridit subreddit add wallpaper wallpapers`\n\n\t#[structopt(visible_aliases = &[\"insert\", \"update\"])]\n\n\tAdd(AddSubreddit),\n\n\t/// Remove subreddit(s) from subscription\n\n\t#[structopt(visible_aliases = &[\"delete\", \"rm\"])]\n\n\tRemove(InputOnly),\n\n\t/// List added subreddits\n\n\t#[structopt(visible_alias = \"ls\")]\n\n\tList(Format),\n\n}\n\n\n\nimpl Subreddit {\n\n\tpub async fn handle(&self, config: &mut Config) -> Result<()> {\n\n\t\tOk(match &self {\n\n\t\t\tSelf::Add(add) => Self::add_subreddit(add, config).await?,\n\n\t\t\tSelf::Remove(rem) => Self::remove_subreddit(rem, config).await?,\n\n\t\t\tSelf::List(opts) => Self::list(opts, config).await?,\n\n\t\t})\n", "file_path": "src/cli/subreddit.rs", "rank": 15, "score": 28815.796789074127 }, { "content": "use std::{convert::Infallible, fmt::Display, str::FromStr};\n\nuse structopt::StructOpt;\n\n\n\nuse crate::api::{\n\n\tconfig::{\n\n\t\tconfig::{write_config, Config},\n\n\t\tconfiguration::Sort,\n\n\t},\n\n\treddit::repository::Repository,\n\n};\n\nuse anyhow::{bail, Context, Error, Result};\n\n\n\nuse crate::api::config::configuration::Subreddit as SubredditConf;\n\n\n\nuse super::Format;\n\n\n\n#[derive(Debug, StructOpt, Clone)]\n\npub enum Subreddit {\n\n\t/// Add subreddit(s) to subscribe\n\n\t///\n", "file_path": "src/cli/subreddit.rs", "rank": 16, "score": 28812.50584305106 }, { "content": "\t}\n\n}\n\n\n\nimpl Display for OutFormat {\n\n\tfn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n\t\tmatch self {\n\n\t\t\tSelf::JSON => write!(f, \"json\"),\n\n\t\t\tSelf::TOML => write!(f, \"toml\"),\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl FromStr for OutFormat {\n\n\ttype Err = Infallible;\n\n\n\n\tfn from_str(s: &str) -> std::result::Result<Self, Self::Err> {\n\n\t\tmatch s.to_lowercase().as_str() {\n\n\t\t\t\"json\" => Ok(Self::JSON),\n\n\t\t\t\"toml\" => Ok(Self::TOML),\n\n\t\t\t_ => Ok(Self::TOML),\n\n\t\t}\n\n\t}\n\n}\n", "file_path": "src/cli/subreddit.rs", "rank": 17, "score": 28810.01303643411 }, { "content": "\t\t}\n\n\t\twrite_config(config).await?;\n\n\t\tprintln!(\"added subreddits: {:?}\", result);\n\n\t\tOk(())\n\n\t}\n\n\n\n\tasync fn remove_subreddit(remove: &InputOnly, config: &mut Config) -> Result<()> {\n\n\t\tif remove.input.len() < 1 {\n\n\t\t\tbail!(\"no subreddit specified to remove\")\n\n\t\t}\n\n\t\tlet mut result = vec![];\n\n\t\tfor name in &remove.input {\n\n\t\t\tmatch config.subreddits.remove(name) {\n\n\t\t\t\tSome(_) => result.push(name.to_owned()),\n\n\t\t\t\tNone => println!(\"subreddit {} does not exist in configuration\", name),\n\n\t\t\t}\n\n\t\t}\n\n\t\twrite_config(config).await?;\n\n\t\tprintln!(\"removed subreddits: {:?}\", result);\n\n\t\tOk(())\n", "file_path": "src/cli/subreddit.rs", "rank": 18, "score": 28809.969687583063 }, { "content": "\t}\n\n\n\n\tasync fn list(opts: &Format, config: &Config) -> Result<()> {\n\n\t\tmatch opts.format {\n\n\t\t\tOutFormat::JSON => {\n\n\t\t\t\tlet val = serde_json::to_string_pretty(&config.subreddits)\n\n\t\t\t\t\t.context(\"failed to serialize subreddits to json format\")?;\n\n\t\t\t\tprintln!(\"{}\", val);\n\n\t\t\t}\n\n\t\t\tOutFormat::TOML => {\n\n\t\t\t\tlet val = toml::to_string_pretty(&config.subreddits)\n\n\t\t\t\t\t.context(\"failed to serialize subreddits to toml format\")?;\n\n\t\t\t\tprintln!(\"{}\", val);\n\n\t\t\t}\n\n\t\t}\n\n\n\n\t\tOk(())\n\n\t}\n\n}\n\n\n", "file_path": "src/cli/subreddit.rs", "rank": 19, "score": 28809.837266155962 }, { "content": "\t}\n\n\n\n\tasync fn add_subreddit(add: &AddSubreddit, config: &mut Config) -> Result<()> {\n\n\t\tif add.input.len() < 1 {\n\n\t\t\tbail!(\"no new subreddits specified\")\n\n\t\t}\n\n\t\tlet mut conf = SubredditConf::new_default(\"\".to_string());\n\n\t\tconf.nsfw = !add.no_nsfw;\n\n\t\tconf.download_first = add.download_first;\n\n\t\tconf.sort = add.sort;\n\n\t\tlet mut handlers = Vec::new();\n\n\t\tfor name in &add.input {\n\n\t\t\tlet exist = config.subreddits.get(name).is_some();\n\n\t\t\tlet mut name = name.to_owned();\n\n\t\t\tlet handler = tokio::spawn(async move {\n\n\t\t\t\tif exist {\n\n\t\t\t\t\treturn (name, Ok::<bool, Error>(true));\n\n\t\t\t\t}\n\n\t\t\t\tlet result = Repository::subreddit_exist(&mut name).await;\n\n\t\t\t\t(name, result)\n", "file_path": "src/cli/subreddit.rs", "rank": 20, "score": 28808.441540952797 }, { "content": "\t\t\t});\n\n\t\t\thandlers.push(handler);\n\n\t\t}\n\n\t\tlet mut result = vec![];\n\n\t\tfor handler in handlers {\n\n\t\t\tlet (name, join_result) = handler.await.unwrap();\n\n\t\t\tmatch join_result {\n\n\t\t\t\tOk(b) if b => {\n\n\t\t\t\t\tlet mut conf = conf.clone();\n\n\t\t\t\t\tconf.proper_name = name.clone();\n\n\t\t\t\t\tconfig.subreddits.insert(name.to_lowercase(), conf.clone());\n\n\t\t\t\t\tresult.push(name);\n\n\t\t\t\t}\n\n\t\t\t\tOk(_) => {\n\n\t\t\t\t\tprintln!(\"subreddit '{}' seems to be empty\", name);\n\n\t\t\t\t}\n\n\t\t\t\tErr(_) => {\n\n\t\t\t\t\tprintln!(\"subreddit '{}' seems to be invalid or don't exist\", name);\n\n\t\t\t\t}\n\n\t\t\t}\n", "file_path": "src/cli/subreddit.rs", "rank": 21, "score": 28805.335759533635 }, { "content": "\n\n\t/// Sets the sort method. defaults to `new`\n\n\t#[structopt(short, long, default_value = \"new\")]\n\n\tsort: Sort,\n\n}\n\n\n\n#[derive(Debug, StructOpt, Clone)]\n\npub struct InputOnly {\n\n\tinput: Vec<String>,\n\n}\n\n\n\n#[derive(Clone, Copy, Debug)]\n\npub enum OutFormat {\n\n\tJSON,\n\n\tTOML,\n\n}\n\n\n\nimpl Default for OutFormat {\n\n\tfn default() -> Self {\n\n\t\tSelf::TOML\n", "file_path": "src/cli/subreddit.rs", "rank": 22, "score": 28800.220633435078 }, { "content": "#[derive(Debug, StructOpt, Clone)]\n\npub struct AddSubreddit {\n\n\tinput: Vec<String>,\n\n\t/// Prevent nsfw tagged images from being downloaded.\n\n\t#[structopt(short, long)]\n\n\tno_nsfw: bool,\n\n\t/// Images are downloaded first before checked for size.\n\n\t///\n\n\t/// Not all subreddit has metadata for image size. For those kind of subreddits, you have to\n\n\t/// download them first before the size can be checked and added to list.\n\n\t///\n\n\t/// How to know which subreddits have them? Add to subscribe list and see if any images are downloaded\n\n\t/// from them. If there's no images downloaded after making sure the settings are correct and the\n\n\t/// subreddit is in fact, an images collection subreddit, then enable this flag when adding\n\n\t/// subreddit.\n\n\t///\n\n\t/// You can replace existing subreddit settings using the add command. It will update the\n\n\t/// settings instead of adding double entry.\n\n\t#[structopt(short, long)]\n\n\tdownload_first: bool,\n", "file_path": "src/cli/subreddit.rs", "rank": 23, "score": 28795.23356122513 }, { "content": "\t\t};\n\n\t\tOk(())\n\n\t}\n\n\n\n\tasync fn set_profile(&self, profile_name: &str, config: &mut Config) -> Result<()> {\n\n\t\tif let None = config.get(profile_name) {\n\n\t\t\tbail!(\"profile '{}' does not exist in configuration\", profile_name)\n\n\t\t}\n\n\t\tconfig.focused_profile = profile_name.to_string();\n\n\t\twrite_config(config).await?;\n\n\t\tprintln!(\"profile is set to '{}'\", profile_name);\n\n\t\tOk(())\n\n\t}\n\n\n\n\tfn list_profile(&self, fmt: &Format, config: &Config) -> Result<()> {\n\n\t\tlet text = match fmt.format {\n\n\t\t\tOutFormat::TOML => toml::to_string_pretty(&config.settings)?,\n\n\t\t\tOutFormat::JSON => serde_json::to_string_pretty(&config.settings)?,\n\n\t\t};\n\n\t\tprintln!(\"{}\", text);\n", "file_path": "src/cli/profile.rs", "rank": 24, "score": 28714.226279193514 }, { "content": "}\n\n\n\nimpl Download {\n\n\tpub async fn handle(&self, config: &mut Config) -> Result<()> {\n\n\t\tOk(match &self {\n\n\t\t\tSelf::Path { input } => Download::path(input, config).await?,\n\n\t\t\tSelf::ConnectTimeout { input } => Download::connect_timeout(*input, config).await?,\n\n\t\t\tSelf::Threads { input } => Download::threads(*input, config).await?,\n\n\t\t})\n\n\t}\n\n\n\n\tasync fn path<P: AsRef<Path>>(path: P, config: &mut Config) -> Result<()> {\n\n\t\tlet p = path.as_ref().to_path_buf();\n\n\t\tconfig.path = p.clone();\n\n\t\twrite_config(config).await?;\n\n\t\tprintln!(\"download path is set to {}\", p.display());\n\n\t\tOk(())\n\n\t}\n\n\n\n\tasync fn connect_timeout(input: u32, config: &mut Config) -> Result<()> {\n", "file_path": "src/cli/download.rs", "rank": 25, "score": 28711.5662449998 }, { "content": "impl Profile for ProfileController {\n\n\tasync fn upsert(&self, request: Request<ProfileUpsert>) -> Result<Response<Reply>, Status> {\n\n\t\tlet mut config = Config::server_read_config().await?;\n\n\t\tconfig.server_upsert_profile(request.into_inner()).await?;\n\n\t\tOk(Response::new(Reply::acknowledged()))\n\n\t}\n\n\n\n\tasync fn remove(&self, request: Request<ProfileRemove>) -> Result<Response<Reply>, Status> {\n\n\t\tlet mut config = Config::server_read_config().await?;\n\n\t\tlet req = request.into_inner();\n\n\t\tif let None = config.remove(&req.name) {\n\n\t\t\tErr(Status::not_found(format!(\n\n\t\t\t\t\"profile '{}' does not exist in configuration\",\n\n\t\t\t\treq.name\n\n\t\t\t)))\n\n\t\t} else {\n\n\t\t\twrite_config(&config)\n\n\t\t\t\t.await\n\n\t\t\t\t.map_err(|err| Status::failed_precondition(err.to_string()))?;\n\n\t\t\tOk(Response::new(Reply::acknowledged()))\n", "file_path": "src/server/profile.rs", "rank": 26, "score": 28705.899215699596 }, { "content": "\t}\n\n\n\n\tasync fn remove_profile(&self, input: &str, config: &mut Config) -> Result<()> {\n\n\t\tif let None = config.remove(input) {\n\n\t\t\tbail!(\"profile '{}' does not exist in configuration\", input)\n\n\t\t}\n\n\t\twrite_config(config).await?;\n\n\t\tprintln!(\"removed profile '{}'\", input);\n\n\t\tOk(())\n\n\t}\n\n}\n", "file_path": "src/cli/profile.rs", "rank": 27, "score": 28704.76370517833 }, { "content": "\t/// minimum size height to check\n\n\t#[structopt(long, default_value = \"1080\")]\n\n\tpub minimum_size_height: u32,\n\n\t/// minimum size width to check\n\n\t#[structopt(long, default_value = \"1920\")]\n\n\tpub minimum_size_width: u32,\n\n\t/// profile name to add\n\n\tpub profile_name: String,\n\n}\n\n\n\nimpl Profile {\n\n\tpub async fn handle(&self, config: &mut Config) -> Result<()> {\n\n\t\tmatch self {\n\n\t\t\tProfile::AspectRatio(ar) => ar.handle(config).await?,\n\n\t\t\tProfile::Set { profile_name } => self.set_profile(profile_name, config).await?,\n\n\t\t\tProfile::Active => println!(\"{}\", config.focused_profile),\n\n\t\t\tProfile::List(fmt) => self.list_profile(fmt, config)?,\n\n\t\t\tProfile::Add(ao) => self.add_profile(ao, config).await?,\n\n\t\t\tProfile::Remove { profile_name } => self.remove_profile(profile_name, config).await?,\n\n\t\t\tProfile::MinimumSize(ms) => ms.handle(config).await?,\n", "file_path": "src/cli/profile.rs", "rank": 28, "score": 28703.686372395354 }, { "content": "use anyhow::Result;\n\nuse std::path::{Path, PathBuf};\n\nuse structopt::StructOpt;\n\n\n\nuse crate::api::config::config::{write_config, Config};\n\n\n\n#[derive(StructOpt, Debug, Clone)]\n\npub enum Download {\n\n\t/// Sets download path\n\n\t#[structopt(visible_alias = \"p\")]\n\n\tPath {\n\n\t\t#[structopt(parse(from_os_str))]\n\n\t\tinput: PathBuf,\n\n\t},\n\n\t/// Sets connect timeout (in seconds)\n\n\t#[structopt(visible_aliases = &[\"ct\", \"connect\"])]\n\n\tConnectTimeout { input: u32 },\n\n\t/// Sets the download threads\n\n\t#[structopt(visible_aliases = &[\"thr\", \"thread\"])]\n\n\tThreads { input: usize },\n", "file_path": "src/cli/download.rs", "rank": 29, "score": 28700.321280586206 }, { "content": "use crate::api::config::config::{write_config, Config};\n\n\n\nuse super::ridit_proto::profile_server::Profile;\n\nuse super::ridit_proto::{EmptyMsg, ProfileListMap, ProfileRemove, ProfileUpsert, Reply};\n\nuse tonic::{Request, Response, Status};\n\n\n\n#[derive(Debug, Clone)]\n\npub struct ProfileController;\n\n\n\nimpl Reply {\n\n\tpub fn new(message: String) -> Self {\n\n\t\tSelf { message }\n\n\t}\n\n\n\n\tpub fn acknowledged() -> Self {\n\n\t\tSelf::new(\"acknowledged\".to_string())\n\n\t}\n\n}\n\n\n\n#[tonic::async_trait]\n", "file_path": "src/server/profile.rs", "rank": 30, "score": 28699.986150400455 }, { "content": "use anyhow::{bail, Result};\n\nuse structopt::StructOpt;\n\n\n\nuse crate::api::config::{\n\n\tconfig::{write_config, Config},\n\n\tconfiguration::Configuration,\n\n};\n\n\n\nuse super::{aspect_ratio::AspectRatio, minimum_size::MinimumSize, subreddit::OutFormat, Format};\n\n\n\n#[derive(Debug, StructOpt, Clone)]\n\npub enum Profile {\n\n\t/// Sets the main focused profile to set settings with\n\n\t#[structopt(visible_alias = \"s\")]\n\n\tSet { profile_name: String },\n\n\t/// Shows the focused profile\n\n\t#[structopt(visible_aliases = &[\"a\", \"show\"])]\n\n\tActive,\n\n\t/// Configures aspect ratio settings for current profile\n\n\t///\n", "file_path": "src/cli/profile.rs", "rank": 31, "score": 28699.56694874188 }, { "content": "\t\t}\n\n\t}\n\n\n\n\tasync fn list(&self, _: Request<EmptyMsg>) -> Result<Response<ProfileListMap>, Status> {\n\n\t\tlet config = Config::server_read_config().await?;\n\n\t\t// check crate::server::foreign_impl for implementation\n\n\t\tOk(Response::new(ProfileListMap::from(config.settings)))\n\n\t}\n\n}\n", "file_path": "src/server/profile.rs", "rank": 32, "score": 28698.21927257574 }, { "content": "\t\tconfig.timeout = input;\n\n\t\twrite_config(config).await?;\n\n\t\tprintln!(\"timeout is set to {} seconds\", input);\n\n\t\tOk(())\n\n\t}\n\n\n\n\tasync fn threads(input: usize, config: &mut Config) -> Result<()> {\n\n\t\tconfig.download_threads = input;\n\n\t\twrite_config(config).await?;\n\n\t\tprintln!(\"download thread is set to {} threads\", input);\n\n\t\tOk(())\n\n\t}\n\n}\n", "file_path": "src/cli/download.rs", "rank": 33, "score": 28696.247661356898 }, { "content": "\t\tOk(())\n\n\t}\n\n\n\n\tasync fn add_profile(&self, opt: &AddOption, config: &mut Config) -> Result<()> {\n\n\t\tlet mut setting = Configuration::default();\n\n\t\tsetting.aspect_ratio.enable = !opt.disable_aspect_ratio_check;\n\n\t\tsetting.aspect_ratio.height = opt.aspect_ratio_height;\n\n\t\tsetting.aspect_ratio.width = opt.aspect_ratio_width;\n\n\t\tsetting.minimum_size.enable = !opt.disable_minimum_size_check;\n\n\t\tsetting.minimum_size.height = opt.minimum_size_height;\n\n\t\tsetting.minimum_size.width = opt.minimum_size_width;\n\n\n\n\t\tlet text = toml::to_string_pretty(&setting).unwrap();\n\n\t\tprintln!(\n\n\t\t\t\"added (or replaced) profile: '{}' with setting:\\n{}\",\n\n\t\t\topt.profile_name, text\n\n\t\t);\n\n\t\tconfig.insert(opt.profile_name.to_owned(), setting);\n\n\t\twrite_config(config).await?;\n\n\t\tOk(())\n", "file_path": "src/cli/profile.rs", "rank": 34, "score": 28696.105174660614 }, { "content": "\t///\n\n\t/// Example commands:\n\n\t///\n\n\t/// Enabling Aspect Ratio Check: `ridit aspect-ratio enable`\n\n\t///\n\n\t/// Disabling Aspect Ratio Check: `ridit aspect-ratio disable`\n\n\t///\n\n\t/// Set Aspect Ratio Height: `ridit aspect-ratio height 9`\n\n\t///\n\n\t/// Set Aspect Ratio Width: `ridit aspect-ratio width 16`\n\n\tAspectRatio(AspectRatio),\n\n\t/// List all profiles in configuration\n\n\t#[structopt(visible_alias = \"ls\")]\n\n\tList(Format),\n\n\t/// Add new profile\n\n\tAdd(AddOption),\n\n\t/// Remove profile\n\n\t#[structopt(visible_alias = \"rm\")]\n\n\tRemove { profile_name: String },\n\n\t/// Configures minimum size image checks for current profile\n", "file_path": "src/cli/profile.rs", "rank": 35, "score": 28686.28851364863 }, { "content": "\t/// Aspect Ratio handles how `square` the image is. Aspect ratio value is gained by dividing\n\n\t/// `width` with `height`.\n\n\t///\n\n\t/// Aspect ratio with value of 1 is considered square. Value of >1 will prone to landscape images\n\n\t/// while <1 will prone to potrait images.\n\n\t///\n\n\t/// Usually you want to set aspect ratio like your device monitor would\n\n\t/// so you can fetch images that will fit nicely as desktop wallpaper for your monitor.\n\n\t/// if your monitor is 16x9, then set width to 16, while height to 9.\n\n\t///\n\n\t/// Range handles if image is within acceptable range of your aspect ratio value.\n\n\t///\n\n\t/// Let's say you set height to 16, width to 9, and range to 0.3. Your aspect ratio value is\n\n\t/// 16/9 = 1.777~. With range value of 0.3, this means you will accept images with aspect ratio between 1.477~ to\n\n\t/// 2.077~. An image with resolution of 4500x2000 has aspect ratio value of 4500/2000 = 2.25,\n\n\t/// outside range value of 1.477-2.077, meaning the image will be rejected from being downloaded.\n\n\t///\n\n\t/// High range value means more images, but there will also be more images that may not fit\n\n\t/// well with your device monitor. Low range value means more accurate images, but also means lower amount of\n\n\t/// images to fetch.\n", "file_path": "src/cli/profile.rs", "rank": 36, "score": 28681.300160924977 }, { "content": "\tMinimumSize(MinimumSize),\n\n}\n\n\n\n#[derive(Debug, StructOpt, Clone)]\n\npub struct AddOption {\n\n\t/// Disables checking aspect ratio of images\n\n\t#[structopt(long)]\n\n\tpub disable_aspect_ratio_check: bool,\n\n\t/// aspect ratio height to check\n\n\t#[structopt(long, default_value = \"9\")]\n\n\tpub aspect_ratio_height: u32,\n\n\t/// aspect ratio width to check\n\n\t#[structopt(long, default_value = \"16\")]\n\n\tpub aspect_ratio_width: u32,\n\n\t/// aspect ratio range to check\n\n\t#[structopt(long, default_value = \"0.3\")]\n\n\tpub aspect_ratio_range: f32,\n\n\t/// Disables checking minimum size of images\n\n\t#[structopt(long)]\n\n\tpub disable_minimum_size_check: bool,\n", "file_path": "src/cli/profile.rs", "rank": 37, "score": 28680.440091934237 }, { "content": "\t\t\t.map_err(|err| Status::failed_precondition(err.to_string()))\n\n\t}\n\n\n\n\tpub async fn server_write_config(&self) -> Result<(), Status> {\n\n\t\twrite_config(self)\n\n\t\t\t.await\n\n\t\t\t.map_err(|err| Status::failed_precondition(err.to_string()))\n\n\t}\n\n}\n\n\n\nimpl Configuration {\n\n\t/// Server side implementation. Handles Aspect Ratio gRPC data.\n\n\tpub fn server_update_aspect_ratio(&mut self, asp: AspectRatioOptional) {\n\n\t\tself.aspect_ratio.enable = asp.enable.unwrap_or(self.aspect_ratio.enable);\n\n\t\tself.aspect_ratio.height = asp.height.unwrap_or(self.aspect_ratio.height);\n\n\t\tself.aspect_ratio.width = asp.width.unwrap_or(self.aspect_ratio.width);\n\n\t\tself.aspect_ratio.range = asp.range.unwrap_or(self.aspect_ratio.range);\n\n\t}\n\n\n\n\t/// Server side implementation. Handles Minium Size gRPC data.\n", "file_path": "src/server/foreign_impl.rs", "rank": 38, "score": 27812.592129847453 }, { "content": "use std::collections::HashMap;\n\n\n\nuse super::ridit_proto::{\n\n\tAspectRatio, AspectRatioOptional, MinimumSize, MinimumSizeOptional, ProfileData,\n\n\tProfileListMap, ProfileUpsert,\n\n};\n\nuse crate::api::config::{\n\n\tconfig::{read_config, write_config, Config, Settings},\n\n\tconfiguration::Configuration,\n\n};\n\nuse tonic::Status;\n\n\n\nimpl Config {\n\n\t/// Server side implementation. Adds new profile.\n\n\tpub async fn server_upsert_profile(\n\n\t\t&mut self,\n\n\t\tprofile_upsert: ProfileUpsert,\n\n\t) -> Result<(), Status> {\n\n\t\tlet mut cfg = if let Some(cfg) = self.get(&profile_upsert.name) {\n\n\t\t\tcfg.to_owned()\n", "file_path": "src/server/foreign_impl.rs", "rank": 39, "score": 27811.751392093338 }, { "content": "\t\t} else {\n\n\t\t\tConfiguration::default()\n\n\t\t};\n\n\n\n\t\tif let Some(new_ar) = profile_upsert.aspect_ratio {\n\n\t\t\tcfg.server_update_aspect_ratio(new_ar);\n\n\t\t}\n\n\n\n\t\tif let Some(new_ms) = profile_upsert.minimum_size {\n\n\t\t\tcfg.server_update_minimum_size(new_ms);\n\n\t\t}\n\n\n\n\t\tself.insert(profile_upsert.name, cfg);\n\n\t\tself.server_write_config().await?;\n\n\t\tOk(())\n\n\t}\n\n\n\n\tpub async fn server_read_config() -> Result<Self, Status> {\n\n\t\tread_config()\n\n\t\t\t.await\n", "file_path": "src/server/foreign_impl.rs", "rank": 40, "score": 27807.412214551456 }, { "content": "\t\t\t}),\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl From<Settings> for ProfileListMap {\n\n\tfn from(settings: Settings) -> Self {\n\n\t\tlet mut value = HashMap::new();\n\n\t\tfor (k, cfg) in settings.into_iter() {\n\n\t\t\tvalue.insert(k, ProfileData::from(cfg));\n\n\t\t}\n\n\t\tProfileListMap { value }\n\n\t}\n\n}\n", "file_path": "src/server/foreign_impl.rs", "rank": 41, "score": 27806.511873834672 }, { "content": "\tpub fn server_update_minimum_size(&mut self, msp: MinimumSizeOptional) {\n\n\t\tself.minimum_size.enable = msp.enable.unwrap_or(self.minimum_size.enable);\n\n\t\tself.minimum_size.height = msp.height.unwrap_or(self.minimum_size.height);\n\n\t\tself.minimum_size.width = msp.height.unwrap_or(self.minimum_size.width);\n\n\t}\n\n}\n\n\n\nimpl From<Configuration> for ProfileData {\n\n\tfn from(cfg: Configuration) -> Self {\n\n\t\tProfileData {\n\n\t\t\taspect_ratio: Some(AspectRatio {\n\n\t\t\t\tenable: cfg.aspect_ratio.enable,\n\n\t\t\t\twidth: cfg.aspect_ratio.width,\n\n\t\t\t\theight: cfg.aspect_ratio.height,\n\n\t\t\t\trange: cfg.aspect_ratio.range,\n\n\t\t\t}),\n\n\t\t\tminimum_size: Some(MinimumSize {\n\n\t\t\t\tenable: cfg.minimum_size.enable,\n\n\t\t\t\theight: cfg.minimum_size.height,\n\n\t\t\t\twidth: cfg.minimum_size.width,\n", "file_path": "src/server/foreign_impl.rs", "rank": 42, "score": 27799.884521488824 }, { "content": "use serde::{Deserialize, Serialize};\n\nuse std::net::{IpAddr, Ipv4Addr};\n\n\n\n#[derive(Debug, Clone, Copy, Deserialize, Serialize)]\n\npub struct ServerConfig {\n\n\tpub port: u16,\n\n\tpub ip: IpAddr,\n\n}\n\n\n\nimpl Default for ServerConfig {\n\n\tfn default() -> Self {\n\n\t\tServerConfig {\n\n\t\t\tport: 9876,\n\n\t\t\tip: IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)),\n\n\t\t}\n\n\t}\n\n}\n", "file_path": "src/api/config/server.rs", "rank": 43, "score": 27201.78004939861 }, { "content": "use std::{convert::Infallible, default::Default, fmt::Display, str::FromStr};\n\n\n\nuse pad::PadStr;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Deserialize, Debug, Clone, Serialize)]\n\npub struct Configuration {\n\n\tpub aspect_ratio: AspectRatio,\n\n\tpub minimum_size: MinimumSize,\n\n}\n\n\n\nimpl Default for Configuration {\n\n\tfn default() -> Self {\n\n\t\tConfiguration {\n\n\t\t\taspect_ratio: AspectRatio::default(),\n\n\t\t\tminimum_size: MinimumSize::default(),\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "src/api/config/configuration.rs", "rank": 44, "score": 27201.73473320942 }, { "content": "\t\t\tSelf::Rising => write!(f, \"rising\"),\n\n\t\t\tSelf::Controversial => write!(f, \"controversial\"),\n\n\t\t\tSelf::Top => write!(f, \"top\"),\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl FromStr for Sort {\n\n\ttype Err = Infallible;\n\n\n\n\tfn from_str(s: &str) -> Result<Self, Self::Err> {\n\n\t\tOk(match s.to_lowercase().as_str() {\n\n\t\t\t\"hot\" => Self::Hot,\n\n\t\t\t\"rising\" => Self::Rising,\n\n\t\t\t\"controversial\" => Self::Controversial,\n\n\t\t\t\"top\" => Self::Top,\n\n\t\t\t_ => Self::New,\n\n\t\t})\n\n\t}\n\n}\n", "file_path": "src/api/config/configuration.rs", "rank": 45, "score": 27200.35507899877 }, { "content": "#[derive(Debug, Deserialize, Clone, Copy, Serialize)]\n\npub struct AspectRatio {\n\n\tpub enable: bool,\n\n\tpub height: u32,\n\n\tpub width: u32,\n\n\tpub range: f32,\n\n}\n\n\n\nimpl Default for AspectRatio {\n\n\tfn default() -> Self {\n\n\t\tAspectRatio {\n\n\t\t\tenable: true,\n\n\t\t\theight: 9,\n\n\t\t\twidth: 16,\n\n\t\t\trange: 0.3,\n\n\t\t}\n\n\t}\n\n}\n\n\n\n#[derive(Debug, Deserialize, Clone, Copy, Serialize)]\n", "file_path": "src/api/config/configuration.rs", "rank": 46, "score": 27200.309203113186 }, { "content": "#[serde(rename_all = \"lowercase\")]\n\npub enum Sort {\n\n\tHot,\n\n\tNew,\n\n\tRising,\n\n\tControversial,\n\n\tTop,\n\n}\n\n\n\nimpl Default for Sort {\n\n\tfn default() -> Self {\n\n\t\tSelf::New\n\n\t}\n\n}\n\n\n\nimpl Display for Sort {\n\n\tfn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n\t\tmatch self {\n\n\t\t\tSelf::Hot => write!(f, \"hot\"),\n\n\t\t\tSelf::New => write!(f, \"new\"),\n", "file_path": "src/api/config/configuration.rs", "rank": 47, "score": 27198.61761627869 }, { "content": "\tpub download_first: bool,\n\n\tpub sort: Sort,\n\n}\n\n\n\nimpl Subreddit {\n\n\tpub fn new_default(proper_name: String) -> Subreddit {\n\n\t\tSubreddit {\n\n\t\t\tproper_name,\n\n\t\t\tnsfw: true,\n\n\t\t\tdownload_first: false,\n\n\t\t\tsort: Sort::New,\n\n\t\t}\n\n\t}\n\n\n\n\tpub fn padded_proper_name(&self) -> String {\n\n\t\t(\"[\".to_string() + &self.proper_name + \"]\").pad_to_width(23)\n\n\t}\n\n}\n\n\n\n#[derive(Deserialize, Debug, Clone, Copy, Serialize)]\n", "file_path": "src/api/config/configuration.rs", "rank": 48, "score": 27198.455399446422 }, { "content": "pub struct MinimumSize {\n\n\tpub enable: bool,\n\n\tpub height: u32,\n\n\tpub width: u32,\n\n}\n\n\n\nimpl Default for MinimumSize {\n\n\tfn default() -> Self {\n\n\t\tMinimumSize {\n\n\t\t\tenable: true,\n\n\t\t\theight: 1080,\n\n\t\t\twidth: 1920,\n\n\t\t}\n\n\t}\n\n}\n\n\n\n#[derive(Deserialize, Debug, Clone, Serialize)]\n\npub struct Subreddit {\n\n\tpub proper_name: String,\n\n\tpub nsfw: bool,\n", "file_path": "src/api/config/configuration.rs", "rank": 49, "score": 27198.211328620277 }, { "content": "pub mod config;\n\npub mod configuration;\n\npub mod server;\n", "file_path": "src/api/config/mod.rs", "rank": 50, "score": 27186.16297015795 }, { "content": "use std::fmt::Display;\n\n\n\nuse thiserror::Error;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Deserialize, Serialize, Clone, Debug, Error)]\n\npub struct RedditError {\n\n\tpub reason: String,\n\n\tpub message: String,\n\n\tpub error: u16,\n\n}\n\n\n\nimpl Display for RedditError {\n\n\tfn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n\t\twrite!(\n\n\t\t\tf,\n\n\t\t\tr#\"reddit returned [{}: {}] with reason \"{}\"\"#,\n\n\t\t\tself.error, self.message, self.reason\n\n\t\t)\n\n\t}\n\n}\n", "file_path": "src/api/reddit/models/error.rs", "rank": 71, "score": 26859.885262045213 }, { "content": "use serde::Deserialize;\n\n\n\nuse crate::api::config::config::Config;\n\n\n\nuse super::download_meta::DownloadMeta;\n\n\n\n#[derive(Deserialize)]\n\npub struct Listing {\n\n\tpub data: Data,\n\n}\n\n\n\nimpl Listing {\n\n\tpub fn into_download_metas(self, config: &Config) -> Vec<DownloadMeta> {\n\n\t\tlet mut result: Vec<DownloadMeta> = Vec::new();\n\n\t\tfor children in self.data.children.into_iter() {\n\n\t\t\tlet data = children.data;\n\n\t\t\tif data.is_video {\n\n\t\t\t\tcontinue;\n\n\t\t\t}\n\n\n", "file_path": "src/api/reddit/models/listing.rs", "rank": 72, "score": 26532.762389065585 }, { "content": "\n\n\t\t\tlet mut should_download = false;\n\n\n\n\t\t\tfor (profile, setting) in config.settings.iter() {\n\n\t\t\t\tif !meta.passed_checks(setting) {\n\n\t\t\t\t\tcontinue;\n\n\t\t\t\t}\n\n\t\t\t\tmeta.profile.push(profile.to_owned());\n\n\t\t\t\tshould_download = true;\n\n\t\t\t}\n\n\n\n\t\t\tif !should_download {\n\n\t\t\t\tcontinue;\n\n\t\t\t}\n\n\n\n\t\t\tresult.push(meta);\n\n\t\t}\n\n\t\tresult\n\n\t}\n\n\n", "file_path": "src/api/reddit/models/listing.rs", "rank": 73, "score": 26529.62624066416 }, { "content": "\t\t\t\tNone => (1, 1),\n\n\t\t\t};\n\n\n\n\t\t\tlet mut meta = DownloadMeta {\n\n\t\t\t\tsubreddit_name: data.subreddit,\n\n\t\t\t\tpost_link: format!(\"https://reddit.com{}\", data.permalink),\n\n\t\t\t\timage_width: width,\n\n\t\t\t\timage_height: height,\n\n\t\t\t\tfilename,\n\n\t\t\t\turl: data.url,\n\n\t\t\t\tnsfw: data.over_18,\n\n\t\t\t\ttitle: data.title,\n\n\t\t\t\tauthor: data.author,\n\n\t\t\t\tprofile: Vec::new(),\n\n\t\t\t};\n\n\n\n\t\t\tif sub.download_first {\n\n\t\t\t\tresult.push(meta);\n\n\t\t\t\tcontinue;\n\n\t\t\t}\n", "file_path": "src/api/reddit/models/listing.rs", "rank": 74, "score": 26523.948816624248 }, { "content": "\t\t\tlet sub_name = &data.subreddit;\n\n\t\t\tlet sub = config\n\n\t\t\t\t.subreddits\n\n\t\t\t\t.get(&sub_name.to_lowercase())\n\n\t\t\t\t.unwrap_or_else(|| {\n\n\t\t\t\t\tpanic!(\"subreddit '{}' does not exist in configuration\", sub_name)\n\n\t\t\t\t});\n\n\n\n\t\t\tif data.over_18 && !sub.nsfw {\n\n\t\t\t\tcontinue;\n\n\t\t\t}\n\n\n\n\t\t\tlet filename = match Listing::get_filename_from_url(&data.url) {\n\n\t\t\t\tSome(name) => name,\n\n\t\t\t\tNone => continue,\n\n\t\t\t};\n\n\n\n\t\t\tlet (width, height) = match data.get_image_size() {\n\n\t\t\t\tSome(s) => s,\n\n\t\t\t\t// return (1, 1) to prevent panic divide by 0\n", "file_path": "src/api/reddit/models/listing.rs", "rank": 75, "score": 26521.823922782114 }, { "content": "\n\n#[derive(Deserialize)]\n\npub struct Preview {\n\n\tpub images: Vec<Image>,\n\n\tpub enabled: bool,\n\n}\n\n\n\nimpl Preview {\n\n\t/// tuple looks like this `(width, height)`\n\n\tpub fn get_image_size(&self) -> Option<(u32, u32)> {\n\n\t\tif let Some(img) = self.images.get(0) {\n\n\t\t\tlet source = &img.source;\n\n\t\t\treturn Some((source.width, source.height));\n\n\t\t}\n\n\t\tNone\n\n\t}\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct Image {\n", "file_path": "src/api/reddit/models/listing.rs", "rank": 76, "score": 26516.3098062174 }, { "content": "\t/// Returned tuple looks like this `(width, height)`\n\n\tpub fn get_image_size(&self) -> Option<(u32, u32)> {\n\n\t\tif let Some(preview) = &self.preview {\n\n\t\t\treturn preview.get_image_size();\n\n\t\t}\n\n\t\tNone\n\n\t}\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct MediaEmbed {}\n\n\n\n#[derive(Deserialize)]\n\npub struct SecureMediaEmbed {}\n\n\n\n#[derive(Deserialize)]\n\npub struct Gildings {\n\n\tpub gid1: Option<i64>,\n\n\tpub gid2: Option<i64>,\n\n}\n", "file_path": "src/api/reddit/models/listing.rs", "rank": 77, "score": 26516.037364340697 }, { "content": "\tfn get_filename_from_url(url: &str) -> Option<String> {\n\n\t\tlet s: String = url.split(\"/\").last().unwrap().split(\"?\").take(1).collect();\n\n\t\tif let Some(ext) = s.split(\".\").last() {\n\n\t\t\tif ext.len() > 3 || (ext != \"jpg\" && ext != \"png\") {\n\n\t\t\t\treturn None;\n\n\t\t\t}\n\n\t\t\treturn Some(s);\n\n\t\t}\n\n\t\tNone\n\n\t}\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct Data {\n\n\tpub children: Vec<Children>,\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct Children {\n\n\tpub data: ChildrenData,\n", "file_path": "src/api/reddit/models/listing.rs", "rank": 78, "score": 26515.493502724643 }, { "content": "}\n\n\n\n#[derive(Deserialize)]\n\npub struct ChildrenData {\n\n\tpub subreddit: String,\n\n\tpub title: String,\n\n\tpub post_hint: Option<String>,\n\n\tpub created: f64,\n\n\tpub over_18: bool,\n\n\tpub preview: Option<Preview>,\n\n\tpub id: String,\n\n\tpub author: String,\n\n\tpub permalink: String,\n\n\tpub stickied: bool,\n\n\tpub url: String,\n\n\tpub is_video: bool,\n\n\tpub is_gallery: Option<bool>,\n\n}\n\n\n\nimpl ChildrenData {\n", "file_path": "src/api/reddit/models/listing.rs", "rank": 79, "score": 26513.62350408202 }, { "content": "\tpub source: Source,\n\n\tpub resolutions: Vec<Resolution>,\n\n\tpub id: String,\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct Source {\n\n\tpub url: String,\n\n\tpub width: u32,\n\n\tpub height: u32,\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct Resolution {\n\n\tpub url: String,\n\n\tpub width: i64,\n\n\tpub height: i64,\n\n}\n", "file_path": "src/api/reddit/models/listing.rs", "rank": 80, "score": 26507.96814431669 }, { "content": "\tpub fn set_finished(mut self) -> Self {\n\n\t\tself.finished = true;\n\n\t\tself\n\n\t}\n\n\n\n\tpub fn cli_label(&self) -> String {\n\n\t\tlet profiles = format!(\"{:?}\", self.profiles).pad_to_width(23);\n\n\t\tlet subreddit_name = (\"[\".to_string() + &self.subreddit_name + \"]\").pad_to_width(23);\n\n\t\tformat!(\n\n\t\t\t\"{} {} {}\",\n\n\t\t\tprofiles,\n\n\t\t\tsubreddit_name,\n\n\t\t\tself.url.with_exact_width(35)\n\n\t\t)\n\n\t}\n\n}\n\n\n\nimpl From<DownloadStatus> for ProtoDownloadStatus {\n\n\tfn from(ds: DownloadStatus) -> Self {\n\n\t\tProtoDownloadStatus {\n", "file_path": "src/api/reddit/models/download_status.rs", "rank": 81, "score": 25426.08792170128 }, { "content": "use crate::server::ridit_proto::DownloadStatus as ProtoDownloadStatus;\n\n\n\nuse pad::PadStr;\n\n\n\n#[derive(Clone, Debug)]\n\npub struct DownloadStatus {\n\n\tpub subreddit_name: String,\n\n\tpub profiles: Vec<String>,\n\n\tpub download_length: u64,\n\n\tpub chunk_length: u64,\n\n\tpub finished: bool,\n\n\tpub error: Option<String>,\n\n\tpub url: String,\n\n}\n\n\n\nimpl DownloadStatus {\n\n\tpub fn new(\n\n\t\tsubreddit_name: String,\n\n\t\tprofiles: Vec<String>,\n\n\t\tdownload_length: u64,\n", "file_path": "src/api/reddit/models/download_status.rs", "rank": 82, "score": 25424.463004971487 }, { "content": "\t\tchunk_length: u64,\n\n\t\turl: String,\n\n\t) -> Self {\n\n\t\tSelf {\n\n\t\t\tsubreddit_name,\n\n\t\t\tprofiles,\n\n\t\t\tdownload_length,\n\n\t\t\tchunk_length,\n\n\t\t\turl,\n\n\t\t\tfinished: false,\n\n\t\t\terror: None,\n\n\t\t}\n\n\t}\n\n\n\n\t/// Givem error to self and set to finished\n\n\tpub fn with_error(mut self, error: String) -> Self {\n\n\t\tself.error = Some(error);\n\n\t\tself.set_finished()\n\n\t}\n\n\n", "file_path": "src/api/reddit/models/download_status.rs", "rank": 83, "score": 25423.81160238176 }, { "content": "\t\t\tsubreddit_name: ds.subreddit_name,\n\n\t\t\tprofiles: ds.profiles,\n\n\t\t\tdownload_length: ds.download_length,\n\n\t\t\tchunk_length: ds.chunk_length,\n\n\t\t\tfinished: ds.finished,\n\n\t\t\terror: ds.error,\n\n\t\t}\n\n\t}\n\n}\n", "file_path": "src/api/reddit/models/download_status.rs", "rank": 84, "score": 25410.684271054284 }, { "content": "### Feature\n\n- **cli:** removed daemon subcmd and edited help text\n\n- **cli:** added aspect ratio implementation commands\n\n- **config:** moved from u32 to usize\n\n- **config:** removed active_daemon config\n\n- **config:** added write default if config does not exist handling\n\n- **config:** added error context to project dir\n\n- **listing:** moved check handler to meta so it can be handled elsewhere as well\n\n- **reddit:** removed backoff and retry_fn from dependency\n\n- **reddit:** get listing now uses unbounded channel\n\n- **reddit:** update repository\n\n- **reddit:** changed retry to tokio_retry\n\n- **reddit:** added download images\n\n- **reddit:** reddit now poke image size first to get image sizes if `download_first` is set\n\n- **sort:** implemented display\n\n\n\n### Fix\n\n- **reddit:** create_dir_all runs first before any storing is made\n\n\n\n\n\n<a name=\"v0.1.4\"></a>\n\n## [v0.1.4] - 2021-08-23\n\n### Doc\n\n- **config:** update modify config explanation\n\n\n\n### Feature\n\n- **listing:** added image size and minimum size check\n\n\n\n### Fix\n\n- **listing:** fix extension check\n\n\n\n### WIP\n\n- **config:** exposed configuration struct\n\n- **config:** added active daemon for subreddits that will be downloaded by daemon\n\n- **config:** added active settings\n\n- **config:** added modify config by profile\n\n- **config:** added write config and modify config api\n\n\n\n\n\n<a name=\"v0.1.3\"></a>\n\n## [v0.1.3] - 2021-08-23\n\n### Feature\n\n- **config:** added read config api\n\n\n\n### Fix\n\n- **config:** proper `Default` implementation and added deref/derefmut for custom type\n\n\n\n### Format\n\n- format files using rustfmt config\n\n- added rustfmt config\n\n\n\n### Refactor\n\n- **config:** removed deref for more ergonomic code\n\n\n\n### Update\n\n- **config:** added serialize derive\n\n\n\n### WIP\n\n- **config:** config now uses profile as top most key\n\n- **config:** added default implementations to config\n\n\n\n\n\n<a name=\"v0.1.2\"></a>\n\n## [v0.1.2] - 2021-08-21\n\n### Doc\n\n- create README.MD\n\n\n\n### License\n\n- create LICENSE\n\n\n\n### WIP\n\n- **config:** added config models\n\n\n\n\n\n<a name=\"v0.1.1\"></a>\n\n## [v0.1.1] - 2021-08-20\n", "file_path": "CHANGELOG.md", "rank": 85, "score": 20287.451739071596 }, { "content": "<a name=\"unreleased\"></a>\n\n## [Unreleased]\n\n\n\n\n\n<a name=\"v0.4.11\"></a>\n\n## [v0.4.11] - 2021-09-30\n\n### Feature\n\n- **server:** implemented download trigger\n\n\n\n\n\n<a name=\"v0.4.10\"></a>\n\n## [v0.4.10] - 2021-09-29\n\n### Feature\n\n- **cli-aspect-ratio:** added text response on configuration for minimum-size\n\n- **cli-aspect-ratio:** added text response on configuration edit\n\n- **config:** added profile command\n\n- **config:** added server port setting\n\n- **docker:** added dockerfile\n\n- **server:** added state status\n\n- **server:** added port and ip configuration\n\n- **server:** impl From for ProtoDownloadMeta from DownloadMeta\n\n\n\n### Fix\n\n- removed helloworld proto load from code\n\n- **config:** default config path now will not panic, but instead raw string paths only\n\n\n\n### Perf\n\n- **cli-start:** uses fasthash hasher to display cli bar more smoothly\n\n\n\n### Refactor\n\n- **cli:** display bars are prettier\n\n- **cli-start:** simplify bar prefix code\n\n- **download_meta:** removed unnecessary codes\n\n- **proto:** split profile and server into separate protos\n\n- **reddit:** now download status is handled via streams\n\n- **server.proto:** added download meta\n\n\n\n### Revert\n\n- **cargo.toml:** release profile will not prioritize size for speed reason\n\n\n\n### WIP\n\n- **proto:** added skeleton for ridit proto\n\n- **proto:** added ridit service and profile service\n\n\n\n### Wip\n\n- **grpc:** added grpc template\n\n\n\n\n\n<a name=\"v0.4.9\"></a>\n\n## [v0.4.9] - 2021-09-20\n\n### Feature\n\n- **pad:** added padding on bar text\n\n\n\n\n\n<a name=\"v0.4.8\"></a>\n\n## [v0.4.8] - 2021-09-20\n\n### Clean\n\n- **repository:** hardcoded username useragent now in static\n\n\n\n### Cleanup\n\n- **pkg:** removed pkg from app (unused codes)\n\n\n\n### Feature\n\n- **cargo.toml:** release profile optimized for binary size\n\n- **cli:** support for tty detection\n\n\n\n### Format\n\n- **cargo.toml:** format\n\n\n\n### Refactor\n\n- **reddit:** download listing text now dependend on Printout Enum\n\n\n\n\n\n<a name=\"v0.4.7\"></a>\n\n## [v0.4.7] - 2021-09-20\n\n### Feat\n\n- **download:** images in temp folder are deleted upon successful copy\n\n\n", "file_path": "CHANGELOG.md", "rank": 86, "score": 20280.518580922195 }, { "content": "### Fix\n\n- **download:** removed images should be temp file NOT the downloaded file\n\n\n\n### Version\n\n- bump to 0.4.7\n\n\n\n\n\n<a name=\"v0.4.6\"></a>\n\n## [v0.4.6] - 2021-09-20\n\n### Feature\n\n- **config:** added download threads support. (default to 4).\n\n\n\n### Fix\n\n- **config:** now config if field is not complete, will be filled with default value\n\n- **download_thread:** println prompt grammar is now proper\n\n\n\n### Revert\n\n- **download_thread:** default value is reverted back from 4 to 8\n\n\n\n### Version\n\n- bump to 0.4.6\n\n\n\n\n\n<a name=\"v0.4.5\"></a>\n\n## [v0.4.5] - 2021-09-20\n\n### Version\n\n- bump to 0.4.5 to match current git tag\n\n\n\n\n\n<a name=\"v0.4.4\"></a>\n\n## [v0.4.4] - 2021-09-20\n\n### Feature\n\n- **config:** added proper_name in subreddit key\n\n- **config:** config now does not lowercase subreddit but instead checks for proper casing on adding subs\n\n\n\n### Fix\n\n- **cli:** added write config after selecting remove subreddit\n\n- **reddit:** removed unnecessary fields in listing for json deserializing\n\n- **reqwest:** reqwest now using rustls-tls instead of openssl for tls matching\n\n- **user_agent:** renamed user_agent to proper specification of reddit\n\n\n\n### Refactor\n\n- **listing:** renamed variable to not blatantly shadows usual convention in glance reading\n\n- **reddit:** now download meta and download operation result is passed to the top level function\n\n\n\n\n\n<a name=\"v0.4.3\"></a>\n\n## [v0.4.3] - 2021-09-15\n\n### Feature\n\n- **config:** moved from hashmap to btreemap\n\n- **reddit:** increased poke image size from 512 bytes to 20kB for image signature\n\n- **reddit:** changed user agent to include repo name\n\n- **reddit:** progress_bar now only show on certain enums\n\n\n\n### Fix\n\n- **reddit:** now error from downloading images are properly reported\n\n\n\n\n\n<a name=\"v0.4.2\"></a>\n\n## [v0.4.2] - 2021-09-14\n\n### Feature\n\n- **cli:** added progress bar for downloading\n\n- **config:** adding subreddits check now handled in parallel\n\n\n\n\n\n<a name=\"v0.4.1\"></a>\n\n## [v0.4.1] - 2021-09-14\n", "file_path": "CHANGELOG.md", "rank": 87, "score": 20278.432242197447 }, { "content": "### Feature\n\n- **cli:** adding subreddits only checks the net if it is not registered yet\n\n- **config:** update default path to ~/Pictures/ridit for linux and default mobile range to 0.3\n\n- **whole:** downloads now start downloading from every subreddit for every profile\n\n\n\n### Refactor\n\n- **impl Subreddit:** for loop does not use name.to_owned() anymore\n\n\n\n\n\n<a name=\"v0.4.0\"></a>\n\n## [v0.4.0] - 2021-09-13\n\n### Feature\n\n- **cli:** finished manual downloading command\n\n\n\n\n\n<a name=\"v0.3.0\"></a>\n\n## [v0.3.0] - 2021-09-04\n\n### Doc\n\n- **cli:** removed profile specific configuration example for aspect-ratio\n\n\n\n### Feature\n\n- **cli:** mutable borrow fixes on config get mut\n\n- **cli:** added print cli\n\n\n\n### Refactor\n\n- **cli:** removed modify_config and it's derivative\n\n- **cli:** aspect ratio does not read config twice now\n\n- **cli:** removed unused imports\n\n\n\n\n\n<a name=\"v0.2.1\"></a>\n\n## [v0.2.1] - 2021-09-03\n\n### Feature\n\n- **cli:** more detailed reason for error to write configuration\n\n- **cli:** support for sort in adding subreddit\n\n- **cli:** added subreddit cli implementations\n\n- **config:** download timeout moved to top level\n\n- **pkg:** shorten on error definition\n\n- **subreddit:** guard for 0 subreddit and print added subreddits\n\n\n\n### Fix\n\n- **cli:** subcommand aspec handle now uses await\n\n\n\n\n\n<a name=\"v0.2.0\"></a>\n\n## [v0.2.0] - 2021-09-03\n", "file_path": "CHANGELOG.md", "rank": 88, "score": 20271.561687486123 }, { "content": "### Update\n\n- **cli:** added command for start and daemon\n\n\n\n### WIP\n\n- **reddit:** added reddit api models\n\n\n\n\n\n<a name=\"v0.1.0\"></a>\n\n## v0.1.0 - 2021-08-19\n\n### Doc\n\n- **cli:** added aliases for aspect ratio\n\n- **cli:** aliases now visible\n\n- **cli:** added doc for out format\n\n- **cli:** added name and about\n\n- **cli:** added docs for aspect ratio children commands\n\n- **cli:** added docs for aspect ratio subcommand\n\n\n\n### Feat\n\n- **cli:** added download settings command\n\n- **cli:** added subreddit cmd\n\n- **cli:** added StructOpt\n\n\n\n### Feature\n\n- **cli:** added list command\n\n- **cli:** added download_first argument\n\n\n\n### Refactor\n\n- **cli:** renamed app from ridit-rs to ridit\n\n- **cli:** moved function to static method\n\n- **cli:** moved functions to method\n\n\n\n\n\n[Unreleased]: https://github.com/tigorlazuardi/ridit-rs/compare/v0.4.11...HEAD\n\n[v0.4.11]: https://github.com/tigorlazuardi/ridit-rs/compare/v0.4.10...v0.4.11\n\n[v0.4.10]: https://github.com/tigorlazuardi/ridit-rs/compare/v0.4.9...v0.4.10\n\n[v0.4.9]: https://github.com/tigorlazuardi/ridit-rs/compare/v0.4.8...v0.4.9\n\n[v0.4.8]: https://github.com/tigorlazuardi/ridit-rs/compare/v0.4.7...v0.4.8\n\n[v0.4.7]: https://github.com/tigorlazuardi/ridit-rs/compare/v0.4.6...v0.4.7\n\n[v0.4.6]: https://github.com/tigorlazuardi/ridit-rs/compare/v0.4.5...v0.4.6\n\n[v0.4.5]: https://github.com/tigorlazuardi/ridit-rs/compare/v0.4.4...v0.4.5\n\n[v0.4.4]: https://github.com/tigorlazuardi/ridit-rs/compare/v0.4.3...v0.4.4\n\n[v0.4.3]: https://github.com/tigorlazuardi/ridit-rs/compare/v0.4.2...v0.4.3\n\n[v0.4.2]: https://github.com/tigorlazuardi/ridit-rs/compare/v0.4.1...v0.4.2\n\n[v0.4.1]: https://github.com/tigorlazuardi/ridit-rs/compare/v0.4.0...v0.4.1\n\n[v0.4.0]: https://github.com/tigorlazuardi/ridit-rs/compare/v0.3.0...v0.4.0\n\n[v0.3.0]: https://github.com/tigorlazuardi/ridit-rs/compare/v0.2.1...v0.3.0\n\n[v0.2.1]: https://github.com/tigorlazuardi/ridit-rs/compare/v0.2.0...v0.2.1\n\n[v0.2.0]: https://github.com/tigorlazuardi/ridit-rs/compare/v0.1.4...v0.2.0\n\n[v0.1.4]: https://github.com/tigorlazuardi/ridit-rs/compare/v0.1.3...v0.1.4\n\n[v0.1.3]: https://github.com/tigorlazuardi/ridit-rs/compare/v0.1.2...v0.1.3\n\n[v0.1.2]: https://github.com/tigorlazuardi/ridit-rs/compare/v0.1.1...v0.1.2\n\n[v0.1.1]: https://github.com/tigorlazuardi/ridit-rs/compare/v0.1.0...v0.1.1\n", "file_path": "CHANGELOG.md", "rank": 89, "score": 20250.77351169488 }, { "content": "# ridit-rs\n\nReddit Image Download written in Rust\n", "file_path": "README.md", "rank": 90, "score": 20249.31651757479 }, { "content": "\tlet repo = Repository::new(config);\n\n\n\n\tlet text = if atty::is(Stream::Stdout) {\n\n\t\tPrintOut::Bar\n\n\t} else {\n\n\t\tPrintOut::Text\n\n\t};\n\n\n\n\tlet (tx, rx) = mpsc::unbounded_channel();\n\n\n\n\tlet handle = tokio::spawn(async move { display(rx).await });\n\n\n\n\tfor (meta, operation) in repo.download(text, tx).await.into_iter() {\n\n\t\tif let Err(err) = operation {\n\n\t\t\tprintln!(\n\n\t\t\t\t\"{} {} {}\",\n\n\t\t\t\tmeta.padded_profiles(),\n\n\t\t\t\tmeta.padded_subreddit_name(),\n\n\t\t\t\terr\n\n\t\t\t);\n", "file_path": "src/cli/start.rs", "rank": 91, "score": 42.169063112687034 }, { "content": "#[structopt(name = \"ridit\", about = \"Reddit image downloader written in rust\", version = crate_version!(), author = crate_authors!())]\n\npub struct Opt {\n\n\t#[structopt(subcommand)]\n\n\tsubcmd: SubCommand,\n\n}\n\n\n\n#[derive(Debug, StructOpt, Clone)]\n\npub struct Format {\n\n\t#[structopt(short, long, default_value = \"toml\")]\n\n\t/// Sets otuput format. defaults to TOML.\n\n\tformat: OutFormat,\n\n}\n\n\n\nimpl Opt {\n\n\tpub async fn execute(&self) -> Result<()> {\n\n\t\tlet mut config = read_config().await?;\n\n\t\tmatch &self.subcmd {\n\n\t\t\tSubCommand::Profile(p) => p.handle(&mut config).await?,\n\n\t\t\tSubCommand::Subreddit(sub) => sub.handle(&mut config).await?,\n\n\t\t\tSubCommand::Download(dl) => dl.handle(&mut config).await?,\n", "file_path": "src/cli/mod.rs", "rank": 92, "score": 37.892629589504914 }, { "content": "\n\nimpl MinimumSize {\n\n\tpub async fn handle(&self, config: &mut Config) -> Result<()> {\n\n\t\tmatch self {\n\n\t\t\tMinimumSize::Enable => self.enable(config).await?,\n\n\t\t\tMinimumSize::Disable => self.disable(config).await?,\n\n\t\t\tMinimumSize::Height { input } => self.height(*input, config).await?,\n\n\t\t\tMinimumSize::Width { input } => self.width(*input, config).await?,\n\n\t\t}\n\n\t\tOk(())\n\n\t}\n\n\n\n\tasync fn enable(&self, config: &mut Config) -> Result<()> {\n\n\t\tlet cfg = config.get_mut_configuration()?;\n\n\t\tcfg.minimum_size.enable = true;\n\n\t\twrite_config(config).await?;\n\n\t\tprintln!(\n\n\t\t\t\"minimum size check enabled for '{}'\",\n\n\t\t\tconfig.focused_profile\n\n\t\t);\n", "file_path": "src/cli/minimum_size.rs", "rank": 93, "score": 30.685728430705876 }, { "content": "\t#[structopt(visible_alias = \"w\")]\n\n\tWidth { input: u32 },\n\n}\n\n\n\nimpl AspectRatio {\n\n\tpub async fn handle(&self, config: &mut Config) -> Result<()> {\n\n\t\tmatch self {\n\n\t\t\tSelf::Enable => self.enable(config).await?,\n\n\t\t\tSelf::Disable => self.disable(config).await?,\n\n\t\t\t&Self::Height { input } => self.height(input, config).await?,\n\n\t\t\t&Self::Width { input } => self.width(input, config).await?,\n\n\t\t\t&Self::Range { input } => self.range(input, config).await?,\n\n\t\t};\n\n\t\tOk(())\n\n\t}\n\n\n\n\tasync fn enable(&self, config: &mut Config) -> Result<()> {\n\n\t\tlet cfg = config.get_mut_configuration()?;\n\n\t\tcfg.aspect_ratio.enable = true;\n\n\t\twrite_config(config).await?;\n", "file_path": "src/cli/aspect_ratio.rs", "rank": 94, "score": 30.44321183462735 }, { "content": "\t\t\tstate: Arc::new(Mutex::new(State::default())),\n\n\t\t}\n\n\t}\n\n}\n\n\n\n#[tonic::async_trait]\n\nimpl Ridit for RiditController {\n\n\tasync fn state(&self, _: Request<EmptyMsg>) -> Result<Response<AppState>, Status> {\n\n\t\tOk(Response::new(self.state.lock().unwrap().to_owned().into()))\n\n\t}\n\n\n\n\ttype TriggerDownloadStream = UnboundedReceiverStream<Result<ProtoDownloadStatus, Status>>;\n\n\n\n\tasync fn trigger_download(\n\n\t\t&self,\n\n\t\t_request: Request<EmptyMsg>,\n\n\t) -> Result<Response<Self::TriggerDownloadStream>, Status> {\n\n\t\tlet config = read_config()\n\n\t\t\t.await\n\n\t\t\t.map_err(|err| Status::failed_precondition(err.to_string()))?;\n", "file_path": "src/server/ridit.rs", "rank": 95, "score": 30.029825813553252 }, { "content": "async fn display_text(mut rx: UnboundedReceiver<DownloadStatus>) {\n\n\tlet mut v_err: Vec<DownloadStatus> = Vec::new();\n\n\n\n\twhile let Some(status) = rx.recv().await {\n\n\t\tif status.error.is_some() {\n\n\t\t\tv_err.push(status);\n\n\t\t\tcontinue;\n\n\t\t}\n\n\t\tif status.finished {\n\n\t\t\tprintln!(\"{} finished\", status.cli_label());\n\n\t\t\tcontinue;\n\n\t\t}\n\n\n\n\t\tif status.chunk_length == 0 {\n\n\t\t\tprintln!(\"{} started\", status.cli_label());\n\n\t\t}\n\n\t}\n\n\n\n\tfor status in v_err {\n\n\t\teprintln!(\"{} {}\", status.cli_label(), status.error.unwrap());\n\n\t}\n\n}\n", "file_path": "src/cli/start.rs", "rank": 96, "score": 29.50018616866513 }, { "content": "\t\t}\n\n\t}\n\n\thandle.await.ok();\n\n\tOk(())\n\n}\n\n\n\nasync fn display(rx: UnboundedReceiver<DownloadStatus>) {\n\n\tif atty::is(Stream::Stdout) {\n\n\t\tdisplay_bar(rx).await;\n\n\t} else {\n\n\t\tdisplay_text(rx).await;\n\n\t}\n\n}\n\n\n\nasync fn display_bar(mut rx: UnboundedReceiver<DownloadStatus>) {\n\n\tlet mut mpb = Progress::new();\n\n\tlet s = RandomXxHashBuilder64::default();\n\n\tlet mut bars = HashMap::with_hasher(s);\n\n\twhile let Some(status) = rx.recv().await {\n\n\t\tif status.download_length == 0 {\n", "file_path": "src/cli/start.rs", "rank": 97, "score": 29.175918712426295 }, { "content": "use std::net::IpAddr;\n\n\n\nuse anyhow::Result;\n\nuse structopt::StructOpt;\n\n\n\nuse crate::{\n\n\tapi::config::config::{write_config, Config},\n\n\tserver,\n\n};\n\n\n\n#[derive(Debug, Clone, StructOpt)]\n\npub enum ServerCMD {\n\n\tStart,\n\n\tPort { port: u16 },\n\n\tIP { ip_addr: IpAddr },\n\n}\n\n\n\nimpl ServerCMD {\n\n\tpub async fn handle(&self, mut config: Config) -> Result<()> {\n\n\t\tmatch *self {\n", "file_path": "src/cli/server.rs", "rank": 98, "score": 28.891685260682358 }, { "content": "\t\t\tSubCommand::Start => start::start(&config).await?,\n\n\t\t\tSubCommand::Print(p) => p.print(&config)?,\n\n\t\t\tSubCommand::Server(cmd) => cmd.handle(config).await?,\n\n\t\t}\n\n\t\tOk(())\n\n\t}\n\n}\n\n\n\n#[derive(Debug, StructOpt, Clone)]\n\npub enum SubCommand {\n\n\t/// Add or remove subreddit(s) from subscription.\n\n\t///\n\n\t/// Example adding a subreddit: `ridit subreddit add wallpaper`\n\n\t///\n\n\t/// Example adding subreddits while filtering content rated as nsfw:\n\n\t/// `ridit subreddit add --no-nsfw wallpaper wallpapers`\n\n\tSubreddit(subreddit::Subreddit),\n\n\t/// Configures download settings.\n\n\tDownload(download::Download),\n\n\t/// Start the download manually\n", "file_path": "src/cli/mod.rs", "rank": 99, "score": 28.793884918131873 } ]
Rust
src/ed25519/mod.rs
acw/simple_crypto
82bb499be36f17957e7f5e14755f5e67a2931fb4
mod constants; mod fe; mod loads; mod point; mod scalars; use rand::Rng; use sha::{Hash,SHA512}; use self::scalars::{curve25519_scalar_mask,x25519_sc_muladd,x25519_sc_reduce}; use self::point::{Point,Point2}; #[cfg(test)] use testing::run_test; #[cfg(test)] use std::collections::HashMap; use super::KeyPair; #[derive(Debug,PartialEq)] pub struct ED25519KeyPair { pub public: ED25519Public, pub private: ED25519Private } impl KeyPair for ED25519KeyPair { type Public = ED25519Public; type Private = ED25519Private; fn new(pbl: ED25519Public, prv: ED25519Private) -> ED25519KeyPair { ED25519KeyPair { public: pbl, private: prv } } } impl ED25519KeyPair { pub fn generate<G: Rng>(rng: &mut G) -> ED25519KeyPair { let mut seed = [0; 32]; rng.fill_bytes(&mut seed); let private = ED25519Private::from_seed(&seed); let public = ED25519Public::from(&private); ED25519KeyPair::new(public, private) } pub fn from_seed(seed: &[u8]) -> ED25519KeyPair { let private = ED25519Private::from_seed(seed); let public = ED25519Public::from(&private); ED25519KeyPair{ public, private } } } #[derive(Debug,PartialEq)] pub struct ED25519Private { seed: [u8; 32], private: [u8; 32], prefix: [u8; 32], public: [u8; 32] } impl ED25519Private { pub fn from_seed(seed: &[u8]) -> ED25519Private { let mut result = ED25519Private { seed: [0; 32], private: [0; 32], prefix: [0; 32], public: [0; 32] }; result.seed.copy_from_slice(seed); let mut expanded = SHA512::hash(seed); let (private, prefix) = expanded.split_at_mut(32); result.private.copy_from_slice(private); result.prefix.copy_from_slice(prefix); curve25519_scalar_mask(&mut result.private); let a = Point::scalarmult_base(&result.private); result.public.copy_from_slice(&a.encode()); result } pub fn sign(&self, msg: &[u8]) -> Vec<u8> { let mut signature_s = [0u8; 32]; let mut ctx = SHA512::new(); ctx.update(&self.prefix); ctx.update(&msg); let nonce = digest_scalar(&ctx.finalize()); let r = Point::scalarmult_base(&nonce); let signature_r = r.encode(); let hram_digest = eddsa_digest(&signature_r, &self.public, &msg); let hram = digest_scalar(&hram_digest); x25519_sc_muladd(&mut signature_s, &hram, &self.private, &nonce); let mut result = Vec::with_capacity(64); result.extend_from_slice(&signature_r); result.extend_from_slice(&signature_s); result } pub fn to_bytes(&self) -> Vec<u8> { self.seed.to_vec() } } #[derive(Debug,PartialEq)] pub struct ED25519Public { bytes: [u8; 32], point: Point } impl<'a> From<&'a ED25519Private> for ED25519Public { fn from(x: &ED25519Private) -> ED25519Public { ED25519Public::new(&x.public).expect("Broke converting private ED25519 to public. (?!)") } } #[derive(Debug)] pub enum ED25519PublicImportError { WrongNumberOfBytes(usize), InvalidPublicPoint } impl ED25519Public { pub fn new(bytes: &[u8]) -> Result<ED25519Public,ED25519PublicImportError> { if bytes.len() != 32 { return Err(ED25519PublicImportError::WrongNumberOfBytes(bytes.len())); } match Point::from_bytes(&bytes) { None => Err(ED25519PublicImportError::InvalidPublicPoint), Some(a) => { let mut res = ED25519Public{ bytes: [0; 32], point: a }; res.bytes.copy_from_slice(&bytes); Ok(res) } } } pub fn verify(&self, msg: &[u8], sig: &[u8]) -> bool { assert_eq!(sig.len(), 64); let signature_r = &sig[..32]; let signature_s = &sig[32..]; if signature_s[31] & 0b11100000 != 0 { return false; } let ainv = self.point.invert(); let h_digest = eddsa_digest(signature_r, &self.bytes, msg); let h = digest_scalar(&h_digest); let r = Point2::double_scalarmult_vartime(&h, &ainv, &signature_s); let r_check = r.encode(); signature_r.to_vec() == r_check } pub fn to_bytes(&self) -> Vec<u8> { self.bytes.to_vec() } } fn eddsa_digest(signature_r: &[u8], public_key: &[u8], msg: &[u8]) -> Vec<u8> { let mut ctx = SHA512::new(); ctx.update(signature_r); ctx.update(public_key); ctx.update(msg); ctx.finalize() } fn digest_scalar(digest: &[u8]) -> Vec<u8> { assert_eq!(digest.len(), 512/8); let mut copy = [0; 512/8]; copy.copy_from_slice(digest); x25519_sc_reduce(&mut copy); copy[..32].to_vec() } #[cfg(test)] fn run_signing_testcase(case: HashMap<String,(bool,Vec<u8>)>) { let (negr, rbytes) = case.get("r").unwrap(); let (negu, ubytes) = case.get("u").unwrap(); let (negm, mbytes) = case.get("m").unwrap(); let (negs, sbytes) = case.get("s").unwrap(); assert!(!negr && !negu && !negm && !negs); let keypair = ED25519KeyPair::from_seed(rbytes); assert_eq!(ubytes, &keypair.public.bytes.to_vec()); let mut privpub = Vec::new(); privpub.append(&mut rbytes.clone()); privpub.append(&mut ubytes.clone()); let sig = keypair.private.sign(&mbytes); assert_eq!(sig.len(), sbytes.len()); assert!(sig.iter().eq(sbytes.iter())); assert!(keypair.public.verify(&mbytes, &sig)); } #[cfg(test)] #[test] fn rfc8072() { let fname = "testdata/ed25519/rfc8032.test"; run_test(fname.to_string(), 4, run_signing_testcase); } #[cfg(test)] #[test] fn signing() { let fname = "testdata/ed25519/sign.test"; run_test(fname.to_string(), 4, run_signing_testcase); }
mod constants; mod fe; mod loads; mod point; mod scalars; use rand::Rng; use sha::{Hash,SHA512}; use self::scalars::{curve25519_scalar_mask,x25519_sc_muladd,x25519_sc_reduce}; use self::point::{Point,Point2}; #[cfg(test)] use testing::run_test; #[cfg(test)] use std::collections::HashMap; use super::KeyPair; #[derive(Debug,PartialEq)] pub struct ED25519KeyPair { pub public: ED25519Public, pub private: ED25519Private } impl KeyPair for ED25519KeyPair { type Public = ED25519Public; type Private = ED25519Private; fn new(pbl: ED25519Public, prv: ED25519Private) -> ED25519KeyPair { ED25519KeyPair { public: pbl, private: prv } } } impl ED25519KeyPair { pub fn generate<G: Rng>(rng: &mut G) -> ED25519KeyPair { let mut seed = [0; 32]; rng.fill_bytes(&mut seed); let private = ED25519Private::from_seed(&seed); let public = ED25519Public::from(&private); ED25519KeyPair::new(public, private) } pub fn from_seed(seed: &[u8]) -> ED25519KeyPair { let private = ED25519Private::from_seed(seed); let public = ED25519Public::from(&private); ED25519KeyPair{ public, private } } } #[derive(Debug,PartialEq)] pub struct ED25519Private { seed: [u8; 32], private: [u8; 32], prefix: [u8; 32], public: [u8; 32] } impl ED25519Private { pub fn from_seed(seed: &[u8]) -> ED25519Private { let mut result = ED25519Private { seed: [0; 32], private: [0; 32], prefix: [0; 32], public: [0; 32] }; result.seed.copy_from_slice(seed); let mut expanded = SHA512::hash(seed); let (private, prefix) = expanded.split_at_mut(32); result.private.copy_from_slice(private); result.prefix.copy_from_slice(prefix); curve25519_scalar_mask(&mut result.private); let a = Point::scalarmult_base(&result.private); result.public.copy_from_slice(&a.encode()); result } pub fn sign(&self, msg: &[u8]) -> Vec<u8> { let mut signature_s = [0u8; 32]; let mut ctx = SHA512::new(); ctx.update(&self.prefix); ctx.update(&msg); let nonce = digest_scalar(&ctx.finalize()); let r = Point::scalarmult_base(&nonce); let signature_r = r.encode(); let hram_digest = eddsa_digest(&signature_r, &self.public, &msg); let hram = digest_scalar(&hram_digest); x25519_sc_muladd(&mut signature_s, &hram, &self.private, &nonce); let mut result = Vec::with_capacity(64); result.extend_from_slice(&signature_r); result.extend_from_slice(&signature_s); result } pub fn to_bytes(&self) -> Vec<u8> { self.seed.to_vec() } } #[derive(Debug,PartialEq)] pub struct ED25519Public { bytes: [u8; 32], point: Point } impl<'a> From<&'a ED25519Private> for ED25519Public { fn from(x: &ED25519Private) -> ED25519Public { ED25519Public::new(&x.public).expect("Broke converting private ED25519 to public. (?!)") } } #[derive(Debug)] pub enum ED25519PublicImportError { WrongNumberOfBytes(usize), InvalidPublicPoint } impl ED25519Public { pub fn new(bytes: &[u8]) -> Result<ED25519Public,ED25519PublicImportError> { if bytes.len() != 32 { return Err(ED25519PublicImportError::WrongNumberOfBytes(bytes.len())); } match Point::from_bytes(&bytes) { None => Err(ED25519PublicImportError::InvalidPublicPoint), Some(a) => { let mut res = ED25519Public{ bytes: [0; 32], point: a }; res.bytes.copy_from_slice(&bytes); Ok(res) } } } pub fn verify(&self, msg: &[u8], sig: &[u8]) -> bool { assert_eq!(sig.len(), 64); let signature_r = &sig[..32]; let signature_s = &sig[32..]; if signature_s[31] & 0b11100000 != 0 { return false; } let ainv = self.point.invert(); let h_digest = eddsa_digest(signature_r, &self.bytes, msg); let h = digest_scalar(&h_digest); let r = Point2::double_scalarmult_vartime(&h, &ainv, &signature_s); let r_check = r.encode(); signature_r.to_vec() == r_check } pub fn to_bytes(&self) -> Vec<u8> { self.bytes.to_vec() } }
fn digest_scalar(digest: &[u8]) -> Vec<u8> { assert_eq!(digest.len(), 512/8); let mut copy = [0; 512/8]; copy.copy_from_slice(digest); x25519_sc_reduce(&mut copy); copy[..32].to_vec() } #[cfg(test)] fn run_signing_testcase(case: HashMap<String,(bool,Vec<u8>)>) { let (negr, rbytes) = case.get("r").unwrap(); let (negu, ubytes) = case.get("u").unwrap(); let (negm, mbytes) = case.get("m").unwrap(); let (negs, sbytes) = case.get("s").unwrap(); assert!(!negr && !negu && !negm && !negs); let keypair = ED25519KeyPair::from_seed(rbytes); assert_eq!(ubytes, &keypair.public.bytes.to_vec()); let mut privpub = Vec::new(); privpub.append(&mut rbytes.clone()); privpub.append(&mut ubytes.clone()); let sig = keypair.private.sign(&mbytes); assert_eq!(sig.len(), sbytes.len()); assert!(sig.iter().eq(sbytes.iter())); assert!(keypair.public.verify(&mbytes, &sig)); } #[cfg(test)] #[test] fn rfc8072() { let fname = "testdata/ed25519/rfc8032.test"; run_test(fname.to_string(), 4, run_signing_testcase); } #[cfg(test)] #[test] fn signing() { let fname = "testdata/ed25519/sign.test"; run_test(fname.to_string(), 4, run_signing_testcase); }
fn eddsa_digest(signature_r: &[u8], public_key: &[u8], msg: &[u8]) -> Vec<u8> { let mut ctx = SHA512::new(); ctx.update(signature_r); ctx.update(public_key); ctx.update(msg); ctx.finalize() }
function_block-full_function
[ { "content": "pub fn curve25519_scalar_mask(a: &mut [u8])\n\n{\n\n assert_eq!(a.len(), 32);\n\n a[0] &= 248;\n\n a[31] &= 127;\n\n a[31] |= 64;\n\n}\n\n\n", "file_path": "src/ed25519/scalars.rs", "rank": 0, "score": 295214.22511977865 }, { "content": "pub fn x25519_sc_reduce(s: &mut [u8])\n\n{\n\n let mut s0 : i64 = 2097151 & load3(s) as i64;\n\n let mut s1 : i64 = 2097151 & (load4(&s[2..]) >> 5) as i64;\n\n let mut s2 : i64 = 2097151 & (load3(&s[5..]) >> 2) as i64;\n\n let mut s3 : i64 = 2097151 & (load4(&s[7..]) >> 7) as i64;\n\n let mut s4 : i64 = 2097151 & (load4(&s[10..]) >> 4) as i64;\n\n let mut s5 : i64 = 2097151 & (load3(&s[13..]) >> 1) as i64;\n\n let mut s6 : i64 = 2097151 & (load4(&s[15..]) >> 6) as i64;\n\n let mut s7 : i64 = 2097151 & (load3(&s[18..]) >> 3) as i64;\n\n let mut s8 : i64 = 2097151 & load3(&s[21..]) as i64;\n\n let mut s9 : i64 = 2097151 & (load4(&s[23..]) >> 5) as i64;\n\n let mut s10 : i64 = 2097151 & (load3(&s[26..]) >> 2) as i64;\n\n let mut s11 : i64 = 2097151 & (load4(&s[28..]) >> 7) as i64;\n\n let mut s12 : i64 = 2097151 & (load4(&s[31..]) >> 4) as i64;\n\n let mut s13 : i64 = 2097151 & (load3(&s[34..]) >> 1) as i64;\n\n let mut s14 : i64 = 2097151 & (load4(&s[36..]) >> 6) as i64;\n\n let mut s15 : i64 = 2097151 & (load3(&s[39..]) >> 3) as i64;\n\n let mut s16 : i64 = 2097151 & load3(&s[42..]) as i64;\n\n let mut s17 : i64 = 2097151 & (load4(&s[44..]) >> 5) as i64;\n", "file_path": "src/ed25519/scalars.rs", "rank": 1, "score": 284034.0464721384 }, { "content": "pub fn x25519_sc_muladd(s: &mut [u8], a: &[u8], b: &[u8], c: &[u8])\n\n{\n\n let a0 : i64 = 2097151 & load3(a) as i64;\n\n let a1 : i64 = 2097151 & (load4(&a[2..]) >> 5) as i64;\n\n let a2 : i64 = 2097151 & (load3(&a[5..]) >> 2) as i64;\n\n let a3 : i64 = 2097151 & (load4(&a[7..]) >> 7) as i64;\n\n let a4 : i64 = 2097151 & (load4(&a[10..]) >> 4) as i64;\n\n let a5 : i64 = 2097151 & (load3(&a[13..]) >> 1) as i64;\n\n let a6 : i64 = 2097151 & (load4(&a[15..]) >> 6) as i64;\n\n let a7 : i64 = 2097151 & (load3(&a[18..]) >> 3) as i64;\n\n let a8 : i64 = 2097151 & load3(&a[21..]) as i64;\n\n let a9 : i64 = 2097151 & (load4(&a[23..]) >> 5) as i64;\n\n let a10 : i64 = 2097151 & (load3(&a[26..]) >> 2) as i64;\n\n let a11 : i64 = (load4(&a[28..]) >> 7) as i64;\n\n let b0 : i64 = 2097151 & load3(b) as i64;\n\n let b1 : i64 = 2097151 & (load4(&b[2..]) >> 5) as i64;\n\n let b2 : i64 = 2097151 & (load3(&b[5..]) >> 2) as i64;\n\n let b3 : i64 = 2097151 & (load4(&b[7..]) >> 7) as i64;\n\n let b4 : i64 = 2097151 & (load4(&b[10..]) >> 4) as i64;\n\n let b5 : i64 = 2097151 & (load3(&b[13..]) >> 1) as i64;\n", "file_path": "src/ed25519/scalars.rs", "rank": 2, "score": 278432.2713230389 }, { "content": "#[cfg(test)]\n\npub fn test_from_bytes(x: &[u8]) -> FieldElement\n\n{\n\n let mut res = FieldElement::new();\n\n let mut helper = Cursor::new(x);\n\n helper.read_i32_into::<LittleEndian>(&mut res.value).unwrap();\n\n res\n\n}\n\n\n\n#[cfg(test)]\n\nquickcheck! {\n\n // this is somewhat self referential, given the definition of arbitrary,\n\n // but more testing is more good\n\n fn from_to_bytes_roundtrip(e: ValidFieldElement) -> bool {\n\n let bytes = e.values.to_bytes();\n\n let trans = FieldElement::from_bytes(&bytes);\n\n trans == e.values\n\n }\n\n}\n\n\n\nimpl<'a> AddAssign<&'a FieldElement> for FieldElement\n", "file_path": "src/ed25519/fe.rs", "rank": 4, "score": 260959.57559897273 }, { "content": "fn slide(r: &mut [i8], a: &[u8])\n\n{\n\n for i in 0..256 {\n\n r[i] = (1 & (a[i >> 3] >> (i & 7))) as i8;\n\n }\n\n \n\n for i in 0..256 {\n\n if r[i] != 0 {\n\n let mut b = 1;\n\n while (b <= 6) && ((i + b) < 256) {\n\n if r[i + b] != 0 {\n\n if r[i] + (r[i + b] << b) <= 15 {\n\n r[i] += r[i + b] << b;\n\n r[i + b] = 0;\n\n } else if r[i] - (r[i + b] << b) >= -15 {\n\n r[i] -= r[i + b] << b;\n\n for k in (i+b)..256 {\n\n if r[k] == 0 {\n\n r[k] = 1;\n\n break;\n", "file_path": "src/ed25519/point.rs", "rank": 5, "score": 257356.06855051537 }, { "content": "pub fn load4(x: &[u8]) -> u64\n\n{\n\n (x[0] as u64) | ((x[1] as u64) << 8) |\n\n ((x[2] as u64) << 16) | ((x[3] as u64) << 24)\n\n}\n\n\n", "file_path": "src/ed25519/loads.rs", "rank": 6, "score": 239593.66569849788 }, { "content": "pub fn load3(x: &[u8]) -> u64\n\n{\n\n (x[0] as u64) | ((x[1] as u64) << 8) | ((x[2] as u64) << 16)\n\n}\n\n\n", "file_path": "src/ed25519/loads.rs", "rank": 7, "score": 239593.66569849788 }, { "content": "/// Parse the magic header in an SSH key file.\n\npub fn parse_openssh_header<R: Read>(input: &mut R) -> Result<(),SSHKeyParseError>\n\n{\n\n let mut limited_input_header = input.take(OPENSSH_MAGIC_HEADER_LEN as u64);\n\n let mut header: [u8; OPENSSH_MAGIC_HEADER_LEN] = [0; OPENSSH_MAGIC_HEADER_LEN];\n\n\n\n assert_eq!(OPENSSH_MAGIC_HEADER.len(), OPENSSH_MAGIC_HEADER_LEN);\n\n limited_input_header.read_exact(&mut header)?;\n\n\n\n for (left, right) in OPENSSH_MAGIC_HEADER.bytes().zip(header.iter()) {\n\n if left != *right {\n\n return Err(SSHKeyParseError::NoOpenSSHMagicHeader)\n\n }\n\n\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/ssh/frame.rs", "rank": 8, "score": 237175.05214602966 }, { "content": "/// Parse an X.590 certificate in memory into a generic certificate that can\n\n/// be used by a program.\n\npub fn parse_x509(buffer: &[u8]) -> Result<GenericCertificate,X509ParseError> {\n\n let blocks = from_der(&buffer[..])?;\n\n match blocks.first() {\n\n None =>\n\n Err(X509ParseError::NotEnoughData),\n\n Some(&ASN1Block::Sequence(_, _, ref x)) => {\n\n let cert = decode_certificate(&x[0])?;\n\n let cert_block_start = x[0].offset();\n\n let cert_block_end = x[1].offset();\n\n let cert_block = &buffer[cert_block_start..cert_block_end];\n\n let alginfo = decode_algorithm_ident(&x[1])?;\n\n let sig = decode_signature(&x[2])?;\n\n check_signature(&alginfo, &cert.subject_key, cert_block, sig)?;\n\n Ok(cert)\n\n }\n\n Some(_) =>\n\n Err(X509ParseError::IllFormedEverything)\n\n }\n\n}\n\n\n", "file_path": "src/x509/mod.rs", "rank": 10, "score": 217596.44347892425 }, { "content": "/// Once you've figured out the binary data you want to produce for an SSH key\n\n/// blob, use this routine to render it into its ASCII encoding.\n\npub fn render_ssh_private_key_data(bytes: &[u8]) -> String\n\n{\n\n let mut bytestr = encode(bytes);\n\n let mut output = String::new();\n\n\n\n output.push_str(OPENER);\n\n #[cfg(target_os=\"windows\")]\n\n output.push_str(\"\\r\");\n\n output.push_str(\"\\n\");\n\n while bytestr.len() > 70 {\n\n let rest = bytestr.split_off(70);\n\n output.push_str(&bytestr);\n\n #[cfg(target_os=\"windows\")]\n\n output.push_str(\"\\r\");\n\n output.push_str(\"\\n\");\n\n bytestr = rest;\n\n }\n\n output.push_str(&bytestr);\n\n #[cfg(target_os=\"windows\")]\n\n output.push_str(\"\\r\");\n", "file_path": "src/ssh/frame.rs", "rank": 11, "score": 214932.42135448134 }, { "content": "/// Read a buffer from the SSH key stream.\n\npub fn parse_openssh_buffer<I: Read>(input: &mut I) -> Result<Vec<u8>,SSHKeyParseError>\n\n{\n\n let length = parse_openssh_u32(input)?;\n\n let mut limited_input = input.take(length as u64);\n\n let mut res = Vec::with_capacity(length as usize);\n\n limited_input.read_to_end(&mut res)?;\n\n Ok(res)\n\n}\n\n\n", "file_path": "src/ssh/frame.rs", "rank": 12, "score": 207475.68576333538 }, { "content": "/// Render a buffer into the SSH key stream.\n\npub fn render_openssh_buffer<O: Write>(output: &mut O, b: &[u8]) -> Result<(),SSHKeyRenderError>\n\n{\n\n if b.len() > 0xFFFFFFFF {\n\n return Err(SSHKeyRenderError::BufferTooLarge);\n\n }\n\n\n\n render_openssh_u32(output, b.len() as u32)?;\n\n if b.len() > 0 {\n\n output.write_all(b)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/ssh/frame.rs", "rank": 13, "score": 204182.1517809131 }, { "content": "/// Load all the public keys from a file into memory.\n\npub fn load_ssh_pubkeys<KP,P>(path: P) -> Result<Vec<(KP::Public, String)>,SSHKeyParseError>\n\n where\n\n KP: SSHKey,\n\n P: AsRef<Path>\n\n{\n\n let mut file = File::open(path)?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n let mut result = Vec::new();\n\n\n\n for line in contents.lines() {\n\n result.push( decode_ssh_pubkey::<KP>(line)? );\n\n }\n\n\n\n Ok(result)\n\n}\n\n\n", "file_path": "src/ssh/mod.rs", "rank": 14, "score": 203414.7808363215 }, { "content": "/// Given a string defining an ASCII SSH key blob (one that starts with\n\n/// \"--BEGIN...\"), decode the body of the blob and return it as binary\n\n/// data.\n\npub fn parse_ssh_private_key_data(s: &str) -> Result<Vec<u8>,SSHKeyParseError>\n\n{\n\n if s.starts_with(OPENER) {\n\n if let Some(endidx) = s.find(CLOSER) {\n\n let b64str: String = s[OPENER.len()..endidx].chars().filter(|x| !x.is_whitespace()).collect();\n\n let bytes = decode(&b64str)?;\n\n Ok(bytes)\n\n } else {\n\n Err(SSHKeyParseError::NoEndBannerFound)\n\n }\n\n } else {\n\n Err(SSHKeyParseError::NoBeginBannerFound)\n\n }\n\n}\n\n\n", "file_path": "src/ssh/frame.rs", "rank": 15, "score": 202319.33701135148 }, { "content": "pub fn drop0s(a: &[u8]) -> &[u8] {\n\n let mut idx = 0;\n\n\n\n while (idx < a.len()) && (a[idx] == 0) {\n\n idx = idx + 1;\n\n }\n\n\n\n &a[idx..]\n\n}\n\n\n", "file_path": "src/rsa/core.rs", "rank": 17, "score": 190273.6606077329 }, { "content": "fn negative(b: i8) -> u8\n\n{\n\n let mut x = b as u32;\n\n x >>= 31; /* 1: yes; 0: no */\n\n x as u8\n\n}\n\n\n\n\n\n/* k25519Precomp[i][j] = (j+1)*256^i*B */\n\npub const K25519_PRECOMP: [[Precomp; 8]; 32] = [\n\n [\n\n Precomp {\n\n yplusx: FieldElement{ value: [25967493, -14356035, 29566456, 3660896, -12694345, 4014787, 27544626, -11754271, -6079156, 2047605] },\n\n yminusx: FieldElement{ value: [-12545711, 934262, -2722910, 3049990, -727428, 9406986, 12720692, 5043384, 19500929, -15469378] },\n\n xy2d: FieldElement{ value: [-8738181, 4489570, 9688441, -14785194, 10184609, -12363380, 29287919, 11864899, -24514362, -4438546] },\n\n },\n\n Precomp {\n\n yplusx: FieldElement{ value: [-12815894, -12976347, -21581243, 11784320, -25355658, -2750717, -11717903, -3814571, -358445, -10211303] },\n\n yminusx: FieldElement{ value: [-21703237, 6903825, 27185491, 6451973, -29577724, -9554005, -15616551, 11189268, -26829678, -5319081] },\n\n xy2d: FieldElement{ value: [26966642, 11152617, 32442495, 15396054, 14353839, -12752335, -3128826, -9541118, -15472047, -4166697] },\n", "file_path": "src/ed25519/constants.rs", "rank": 18, "score": 189911.2936752628 }, { "content": "/// Decode a string containing a public key into an appropriate key type and\n\n/// the comment associated with it, usually an email address or similar.\n\npub fn decode_ssh_pubkey<KP: SSHKey>(s: &str) -> Result<(KP::Public, String),SSHKeyParseError>\n\n{\n\n let mut splitter = s.split_whitespace();\n\n\n\n match (splitter.next(), splitter.next(), splitter.next(), splitter.next()) {\n\n (Some(keytype), Some(keymaterial), Some(comment), None) => {\n\n if !KP::valid_keytype(keytype) {\n\n return Err(SSHKeyParseError::InvalidPublicKeyType);\n\n }\n\n\n\n let bytes = decode(keymaterial)?;\n\n let mut byte_cursor = Cursor::new(bytes);\n\n let key = KP::parse_ssh_public_info(&mut byte_cursor)?;\n\n\n\n Ok((key, comment.to_string()))\n\n }\n\n _ =>\n\n Err(SSHKeyParseError::BrokenPublicKeyLine)\n\n }\n\n}\n\n\n", "file_path": "src/ssh/mod.rs", "rank": 19, "score": 184956.68282610044 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn from_to_bytes() {\n\n let fname = \"testdata/ed25519/bytes.test\";\n\n run_test(fname.to_string(), 2, |case| {\n\n let (nega, abytes) = case.get(\"a\").unwrap();\n\n let (negb, bbytes) = case.get(\"b\").unwrap();\n\n\n\n assert!(!nega && !negb);\n\n let e = FieldElement::from_bytes(abytes);\n\n let mut target = FieldElement::new();\n\n let mut cursor = Cursor::new(bbytes);\n\n cursor.read_i32_into::<NativeEndian>(&mut target.value).unwrap();\n\n assert_eq!(e, target, \"from bytes\");\n\n let bytes = e.to_bytes();\n\n assert_eq!(&bytes, abytes, \"to bytes\");\n\n });\n\n}\n\n\n", "file_path": "src/ed25519/fe.rs", "rank": 20, "score": 183369.57019071738 }, { "content": "fn equal(b: i8, c: i8) -> bool\n\n{\n\n let ub = b;\n\n let uc = c;\n\n let x = ub ^ uc; /* 0: yes; 1..255: no */\n\n (x == 0)\n\n}\n\n\n", "file_path": "src/ed25519/constants.rs", "rank": 21, "score": 179252.38532580758 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn from_bytes_vartime() {\n\n let fname = \"testdata/ed25519/fbv.test\";\n\n run_test(fname.to_string(), 3, |case| {\n\n let (nega, abytes) = case.get(\"a\").unwrap();\n\n let (negb, bbytes) = case.get(\"b\").unwrap();\n\n let (negc, cbytes) = case.get(\"c\").unwrap();\n\n\n\n assert!(!nega && !negb && !negc);\n\n let target = Point::load_test_value(&cbytes);\n\n let mine = Point::from_bytes(&abytes);\n\n if bbytes.len() < cbytes.len() {\n\n assert!(mine.is_none());\n\n } else {\n\n assert_eq!(target, mine.unwrap());\n\n }\n\n });\n\n}\n\n\n\n#[derive(Debug,PartialEq)]\n\npub struct Point2 {\n", "file_path": "src/ed25519/point.rs", "rank": 22, "score": 177669.1296958912 }, { "content": "/// Load an SSH private key file, returning the appropriate key type and the\n\n/// comment associated with it.\n\npub fn load_ssh_keyfile<KP,P>(path: P) -> Result<(KP, String),SSHKeyParseError>\n\n where\n\n KP: SSHKey,\n\n P: AsRef<Path>\n\n{\n\n let mut file = File::open(path)?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n decode_ssh(&contents)\n\n}\n\n\n", "file_path": "src/ssh/mod.rs", "rank": 23, "score": 174500.7884197542 }, { "content": "fn runhmac<H: Hash + Clone>(base: &HMAC<H>, m: &[u8]) -> Vec<u8>\n\n{\n\n let mut runner = base.clone();\n\n runner.update(&m);\n\n runner.finalize()\n\n}\n\n\n\n#[derive(Clone,Debug,PartialEq)]\n\npub enum DSADecodeError {\n\n ASN1Error(ASN1DecodeErr),\n\n NoSignatureFound,\n\n InvalidRValue,\n\n InvalidSValue\n\n}\n\n\n\nimpl From<ASN1DecodeErr> for DSADecodeError {\n\n fn from(a: ASN1DecodeErr) -> DSADecodeError {\n\n DSADecodeError::ASN1Error(a)\n\n }\n\n}\n", "file_path": "src/dsa/rfc6979.rs", "rank": 24, "score": 173425.95772258934 }, { "content": "/// Render the magic header in an SSH key file.\n\npub fn render_openssh_header<O: Write>(output: &mut O) -> Result<(),SSHKeyRenderError>\n\n{\n\n Ok(output.write_all(OPENSSH_MAGIC_HEADER.as_bytes())?)\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/ssh/frame.rs", "rank": 25, "score": 172319.29034156073 }, { "content": "/// Parse a fixed-width number from the SSH key stream and return it.\n\npub fn parse_openssh_number<I,D>(input: &mut I) -> Result<D,SSHKeyParseError>\n\n where\n\n I: Read,\n\n D: Decoder\n\n{\n\n let mut buffer = parse_openssh_buffer(input)?;\n\n while buffer[0] == 0 { buffer.remove(0); }\n\n Ok(D::from_bytes(&buffer))\n\n}\n\n\n", "file_path": "src/ssh/frame.rs", "rank": 26, "score": 171465.33647725842 }, { "content": "/// Parse an unsigned u32 from the SSH key stream. (This does the appropriate\n\n/// conversion from network order to native order.)\n\npub fn parse_openssh_u32<I: Read>(input: &mut I) -> Result<u32,SSHKeyParseError>\n\n{\n\n let mut limited_input_header = input.take(4);\n\n let res = limited_input_header.read_u32::<BigEndian>()?;\n\n Ok(res)\n\n}\n\n\n", "file_path": "src/ssh/frame.rs", "rank": 27, "score": 171460.83486681935 }, { "content": "/// Parse a string from the SSH key stream. This does some validation to ensure\n\n/// that the data being read is actually in a form that Rust will recognize as\n\n/// being a valid string.\n\npub fn parse_openssh_string<I: Read>(input: &mut I) -> Result<String,SSHKeyParseError>\n\n{\n\n let length = parse_openssh_u32(input)?;\n\n let mut limited_input = input.take(length as u64);\n\n let mut result = String::new();\n\n limited_input.read_to_string(&mut result)?;\n\n Ok(result)\n\n}\n\n\n", "file_path": "src/ssh/frame.rs", "rank": 28, "score": 171460.83486681935 }, { "content": "pub fn xor_vecs(a: &Vec<u8>, b: &Vec<u8>) -> Vec<u8> {\n\n a.iter().zip(b.iter()).map(|(a,b)| a^b).collect()\n\n}\n\n\n", "file_path": "src/rsa/core.rs", "rank": 29, "score": 168732.98629993337 }, { "content": "pub fn pkcs1_pad(ident: &[u8], hash: &[u8], keylen: usize) -> Vec<u8>\n\n{\n\n let mut idhash = Vec::new();\n\n idhash.extend_from_slice(ident);\n\n idhash.extend_from_slice(hash);\n\n let tlen = idhash.len();\n\n assert!(keylen > (tlen + 3));\n\n let mut padding = Vec::new();\n\n padding.resize(keylen - tlen - 3, 0xFF);\n\n let mut result = vec![0x00,0x01];\n\n result.append(&mut padding);\n\n result.push(0x00);\n\n result.append(&mut idhash);\n\n result\n\n}\n\n\n", "file_path": "src/rsa/core.rs", "rank": 30, "score": 163941.17266345723 }, { "content": "/// Render a string into the SSH key stream.\n\npub fn render_openssh_string<O: Write>(output: &mut O, v: &str) -> Result<(),SSHKeyRenderError>\n\n{\n\n let vbytes: Vec<u8> = v.bytes().collect();\n\n let len = vbytes.len();\n\n \n\n if len > 0xFFFFFFFF {\n\n return Err(SSHKeyRenderError::StringTooLong);\n\n }\n\n\n\n render_openssh_u32(output, vbytes.len() as u32)?;\n\n output.write_all(&vbytes)?;\n\n Ok(())\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/ssh/frame.rs", "rank": 31, "score": 162793.67571361666 }, { "content": "/// Render a fixed-width number into the SSH key stream.\n\npub fn render_openssh_number<O,D>(output: &mut O, n: &D) -> Result<(),SSHKeyRenderError>\n\n where\n\n O: Write,\n\n D: Encoder\n\n{\n\n let bytes = n.to_bytes();\n\n render_openssh_buffer(output, &bytes)\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n\n#[cfg(test)]\n\nuse cryptonum::unsigned::{U192,U1024,U2048,U4096};\n\n\n\n#[cfg(test)]\n\nquickcheck! {\n\n fn bytes_roundtrip(x: Vec<u8>) -> bool {\n\n let rendered = render_ssh_private_key_data(&x);\n\n let returned = parse_ssh_private_key_data(&rendered).unwrap();\n\n returned == x\n", "file_path": "src/ssh/frame.rs", "rank": 32, "score": 162793.67571361666 }, { "content": "/// Render an unsigned u32 from the SSH key stream. (This does the appropriate\n\n/// conversion from network order to native order.)\n\npub fn render_openssh_u32<O: Write>(output: &mut O, val: u32) -> Result<(),SSHKeyRenderError>\n\n{\n\n Ok(output.write_u32::<BigEndian>(val)?)\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/ssh/frame.rs", "rank": 33, "score": 160291.19941870566 }, { "content": "fn into_encoded_point(x: &FieldElement, y: &FieldElement, z: &FieldElement) -> Vec<u8>\n\n{\n\n let recip = z.invert();\n\n let x_over_z = x * &recip;\n\n let y_over_z = y * &recip;\n\n let mut bytes = y_over_z.to_bytes();\n\n let sign_bit = if x_over_z.isnegative() { 1 } else { 0 };\n\n // The preceding computations must execute in constant time, but this\n\n // doesn't need to.\n\n bytes[31] ^= sign_bit << 7;\n\n bytes\n\n}\n", "file_path": "src/ed25519/point.rs", "rank": 34, "score": 159389.84956937312 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn loads() {\n\n let fname = \"testdata/ed25519/load.test\";\n\n run_test(fname.to_string(), 3, |case| {\n\n let (negx, xbytes) = case.get(\"x\").unwrap();\n\n let (nega, abytes) = case.get(\"a\").unwrap();\n\n let (negb, bbytes) = case.get(\"b\").unwrap();\n\n\n\n assert!(!negx && !nega && !negb);\n\n let res3 = u64::from(U192::from_bytes(abytes));\n\n let res4 = u64::from(U192::from_bytes(bbytes));\n\n assert_eq!(res3, load3(&xbytes), \"load3\");\n\n assert_eq!(res4, load4(&xbytes), \"load4\");\n\n });\n\n}\n\n\n", "file_path": "src/ed25519/loads.rs", "rank": 35, "score": 154444.90756086702 }, { "content": "/// Decode a string containing a private key into the appropriate key type and\n\n/// the comment associated with it, usually an email address or similar.\n\npub fn decode_ssh<KP: SSHKey>(x: &str) -> Result<(KP, String),SSHKeyParseError>\n\n{\n\n let bytes = parse_ssh_private_key_data(x)?;\n\n let data_size = bytes.len() as u64;\n\n let mut byte_cursor = Cursor::new(bytes);\n\n\n\n parse_openssh_header(&mut byte_cursor)?;\n\n let ciphername = parse_openssh_string(&mut byte_cursor)?;\n\n if ciphername != \"none\" {\n\n return Err(SSHKeyParseError::UnknownKeyCipher(ciphername));\n\n }\n\n let kdfname = parse_openssh_string(&mut byte_cursor)?;\n\n if kdfname != \"none\" {\n\n return Err(SSHKeyParseError::UnknownKeyCipher(kdfname));\n\n }\n\n let kdfoptions = parse_openssh_buffer(&mut byte_cursor)?;\n\n if kdfoptions.len() > 0 {\n\n return Err(SSHKeyParseError::UnexpectedKDFOptions);\n\n }\n\n let numkeys = parse_openssh_u32(&mut byte_cursor)?;\n", "file_path": "src/ssh/mod.rs", "rank": 36, "score": 151498.76652015996 }, { "content": "fn next_value_set(line: &str) -> (String, bool, Vec<u8>)\n\n{\n\n assert!(line.is_ascii());\n\n let mut items = line.split(\": \");\n\n let key = items.next().unwrap();\n\n let valbits = items.next().unwrap();\n\n let neg = valbits.contains('-');\n\n let valbitsnoneg = valbits.trim_start_matches(\"-\");\n\n\n\n let mut nibble_iter = valbitsnoneg.chars().rev();\n\n let mut val = Vec::new();\n\n\n\n while let Some(c1) = nibble_iter.next() {\n\n match nibble_iter.next() {\n\n None => {\n\n val.push( c1.to_digit(16).expect(&format!(\"Unexpected character: |{}|\", c1)) as u8 );\n\n }\n\n Some(c2) => {\n\n let b1 = c1.to_digit(16).expect(&format!(\"Unexpected character: |{}|\", c1)) as u8;\n\n let b2 = c2.to_digit(16).expect(&format!(\"Unexpected character: |{}|\", c2)) as u8;\n\n val.push( (b2 << 4) | b1 );\n\n }\n\n }\n\n }\n\n val.reverse();\n\n\n\n (key.to_string(), neg, val)\n\n}\n\n\n", "file_path": "src/testing.rs", "rank": 37, "score": 150167.10730603017 }, { "content": "#[derive(Debug,PartialEq)]\n\nstruct Cached {\n\n yplusx: FieldElement,\n\n yminusx: FieldElement,\n\n z: FieldElement,\n\n t2d: FieldElement\n\n}\n\n\n\nimpl Cached\n\n{\n\n fn new() -> Cached\n\n {\n\n Cached {\n\n yplusx: FieldElement::new(),\n\n yminusx: FieldElement::new(),\n\n z: FieldElement::new(),\n\n t2d: FieldElement::new()\n\n }\n\n }\n\n \n\n #[cfg(test)]\n", "file_path": "src/ed25519/point.rs", "rank": 38, "score": 149480.0947497687 }, { "content": "pub fn bits2int<X>(x: &[u8], qlen: usize) -> X\n\n where\n\n X: Decoder + Shr<usize,Output=X>\n\n{\n\n\n\n if qlen < (x.len() * 8) {\n\n let mut fixed_x = Vec::from(x);\n\n let qlen_bytes = (qlen + 7) / 8;\n\n let rounded_qlen = qlen_bytes * 8;\n\n fixed_x.resize(qlen_bytes, 0);\n\n X::from_bytes(&fixed_x) >> (rounded_qlen - qlen)\n\n } else {\n\n X::from_bytes(x)\n\n }\n\n}\n\n\n", "file_path": "src/dsa/rfc6979.rs", "rank": 39, "score": 148689.63932255935 }, { "content": "/// Encode a supported key into its ASCII SSH format, with the given comment.\n\npub fn encode_ssh<KP: SSHKey>(x: &KP, comment: &str) -> Result<String,SSHKeyRenderError>\n\n{\n\n let mut pubkeybin = Vec::with_capacity(8192);\n\n let mut privkeybin = Vec::with_capacity(8192);\n\n let mut binary = Vec::with_capacity(16384);\n\n\n\n // create the public key bits\n\n x.render_ssh_public_info(&mut pubkeybin)?;\n\n // create the private key bits\n\n render_openssh_u32(&mut privkeybin, 0xDEADBEEF)?; // FIXME: Any reason for this to be random?\n\n render_openssh_u32(&mut privkeybin, 0xDEADBEEF)?; // ditto\n\n x.render_ssh_private_info(&mut privkeybin)?;\n\n render_openssh_string(&mut privkeybin, comment)?;\n\n // add some padding (not quite sure why)\n\n let mut i = comment.len();\n\n while (i % 16) != 0 {\n\n privkeybin.write(&[(i - comment.len() + 1) as u8])?;\n\n i += 1;\n\n }\n\n // render a bunch of the framing stuff\n\n render_openssh_header(&mut binary)?;\n\n render_openssh_string(&mut binary, \"none\")?; // ciphername\n\n render_openssh_string(&mut binary, \"none\")?; // kdfname\n\n render_openssh_buffer(&mut binary, &[])?; // kdfoptions\n\n render_openssh_u32(&mut binary, 1)?; // numkeys\n\n render_openssh_buffer(&mut binary, &pubkeybin)?;\n\n render_openssh_buffer(&mut binary, &privkeybin)?;\n\n Ok(render_ssh_private_key_data(&binary))\n\n}\n\n\n", "file_path": "src/ssh/mod.rs", "rank": 40, "score": 146457.71993248272 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn muladd() {\n\n let fname = \"testdata/ed25519/muladd.test\";\n\n run_test(fname.to_string(), 4, |case| {\n\n let (nega, abytes) = case.get(\"a\").unwrap();\n\n let (negb, bbytes) = case.get(\"b\").unwrap();\n\n let (negc, cbytes) = case.get(\"c\").unwrap();\n\n let (negd, dbytes) = case.get(\"d\").unwrap();\n\n\n\n assert!(!nega && !negb && !negc && !negd);\n\n let mut mine = [0; 32];\n\n x25519_sc_muladd(&mut mine, abytes, bbytes, cbytes);\n\n for i in 0..32 {\n\n assert_eq!(&mine[i], &dbytes[i]);\n\n }\n\n });\n\n}\n\n\n", "file_path": "src/ed25519/scalars.rs", "rank": 41, "score": 143898.54794493603 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn reduce() {\n\n let fname = \"testdata/ed25519/reduce.test\";\n\n run_test(fname.to_string(), 2, |case| {\n\n let (nega, abytes) = case.get(\"a\").unwrap();\n\n let (negb, bbytes) = case.get(\"b\").unwrap();\n\n\n\n assert!(!nega && !negb);\n\n assert_eq!(abytes.len(), 64);\n\n assert_eq!(bbytes.len(), 32);\n\n let mut copy = abytes.clone();\n\n x25519_sc_reduce(&mut copy);\n\n assert_eq!(&copy[0..32], &bbytes[0..]);\n\n });\n\n}\n\n\n\n/* Input:\n\n * a[0]+256*a[1]+...+256^31*a[31] = a\n\n * b[0]+256*b[1]+...+256^31*b[31] = b\n\n * c[0]+256*c[1]+...+256^31*c[31] = c\n\n *\n\n * Output:\n\n * s[0]+256*s[1]+...+256^31*s[31] = (ab+c) mod l\n\n * where l = 2^252 + 27742317777372353535851937790883648493. */\n", "file_path": "src/ed25519/scalars.rs", "rank": 42, "score": 143898.54794493603 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn double() {\n\n let fname = \"testdata/ed25519/pt_double.test\";\n\n run_test(fname.to_string(), 4, |case| {\n\n let (nega, abytes) = case.get(\"a\").unwrap();\n\n let (negb, bbytes) = case.get(\"b\").unwrap();\n\n let (negc, cbytes) = case.get(\"c\").unwrap();\n\n let (negd, dbytes) = case.get(\"d\").unwrap();\n\n\n\n assert!(!nega && !negb && !negc && !negd);\n\n let a = Point::load_test_value(abytes);\n\n let b = PointP1P1::load_test_value(bbytes);\n\n let c = Point2::load_test_value(cbytes);\n\n let d = PointP1P1::load_test_value(dbytes);\n\n\n\n let myb = a.double();\n\n assert_eq!(myb, b);\n\n let myd = c.double();\n\n assert_eq!(myd, d);\n\n });\n\n}\n", "file_path": "src/ed25519/point.rs", "rank": 43, "score": 143861.33512360672 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn conversion() {\n\n let fname = \"testdata/ed25519/conversion.test\";\n\n run_test(fname.to_string(), 6, |case| {\n\n let (nega, abytes) = case.get(\"a\").unwrap();\n\n let (negc, cbytes) = case.get(\"c\").unwrap();\n\n let (negt, tbytes) = case.get(\"t\").unwrap();\n\n let (nego, obytes) = case.get(\"o\").unwrap();\n\n let (negd, dbytes) = case.get(\"d\").unwrap();\n\n let (negb, bbytes) = case.get(\"b\").unwrap();\n\n\n\n let a = Point::load_test_value(&abytes);\n\n let c = Cached::load_test_value(&cbytes);\n\n let t = Point2::load_test_value(&tbytes);\n\n let o = PointP1P1::load_test_value(&obytes);\n\n let d = Point2::load_test_value(&dbytes);\n\n let b = Point::load_test_value(&bbytes);\n\n\n\n assert!(!nega && !negc && !negt && !nego && !negd && !negb);\n\n\n\n let myc = Cached::from(&a);\n", "file_path": "src/ed25519/point.rs", "rank": 44, "score": 143861.33512360672 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn maddsub() {\n\n let fname = \"testdata/ed25519/maddsub.test\";\n\n run_test(fname.to_string(), 4, |case| {\n\n let (nega, abytes) = case.get(\"a\").unwrap();\n\n let (negb, bbytes) = case.get(\"b\").unwrap();\n\n let (negc, cbytes) = case.get(\"c\").unwrap();\n\n let (negd, dbytes) = case.get(\"d\").unwrap();\n\n\n\n assert!(!nega && !negb && !negc && !negd);\n\n let a = Point::load_test_value(abytes);\n\n let b = PointP1P1::load_test_value(bbytes);\n\n let c = Precomp::load_test_value(cbytes);\n\n let d = PointP1P1::load_test_value(dbytes);\n\n\n\n let myb = &a + &c;\n\n assert_eq!(myb, b);\n\n let myd = &a - &c;\n\n assert_eq!(myd, d);\n\n });\n\n}\n", "file_path": "src/ed25519/point.rs", "rank": 45, "score": 143861.33512360672 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn addsub() {\n\n let fname = \"testdata/ed25519/ptaddsub.test\";\n\n run_test(fname.to_string(), 4, |case| {\n\n let (nega, abytes) = case.get(\"a\").unwrap();\n\n let (negb, bbytes) = case.get(\"b\").unwrap();\n\n let (negc, cbytes) = case.get(\"c\").unwrap();\n\n let (negd, dbytes) = case.get(\"d\").unwrap();\n\n\n\n assert!(!nega && !negb && !negc && !negd);\n\n let a = Point::load_test_value(abytes);\n\n let b = PointP1P1::load_test_value(bbytes);\n\n let c = Cached::load_test_value(cbytes);\n\n let d = PointP1P1::load_test_value(dbytes);\n\n\n\n let myb = &a + &c;\n\n assert_eq!(myb, b);\n\n let myd = &a - &c;\n\n assert_eq!(myd, d);\n\n });\n\n}\n", "file_path": "src/ed25519/point.rs", "rank": 46, "score": 143861.33512360672 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn addsub() {\n\n let fname = \"testdata/ed25519/addsub.test\";\n\n run_test(fname.to_string(), 4, |case| {\n\n let (nega, abytes) = case.get(\"a\").unwrap();\n\n let (negb, bbytes) = case.get(\"b\").unwrap();\n\n let (negc, cbytes) = case.get(\"c\").unwrap();\n\n let (negd, dbytes) = case.get(\"d\").unwrap();\n\n\n\n assert!(!nega && !negb && !negc && !negd);\n\n let a = test_from_bytes(&abytes);\n\n let b = test_from_bytes(&bbytes);\n\n let c = test_from_bytes(&cbytes);\n\n let d = test_from_bytes(&dbytes);\n\n let r = &a + &b;\n\n let s = &a - &b;\n\n assert_eq!(r, c, \"field addition\");\n\n assert_eq!(s, d, \"field subtraction\");\n\n });\n\n}\n\n\n", "file_path": "src/ed25519/fe.rs", "rank": 47, "score": 143717.751266401 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn square2() {\n\n let fname = \"testdata/ed25519/square2.test\";\n\n run_test(fname.to_string(), 2, |case| {\n\n let (nega, abytes) = case.get(\"a\").unwrap();\n\n let (negc, cbytes) = case.get(\"c\").unwrap();\n\n\n\n assert!(!nega && !negc);\n\n let a = test_from_bytes(&abytes);\n\n let c = test_from_bytes(&cbytes);\n\n let r = a.sq2();\n\n assert_eq!(r, c);\n\n });\n\n}\n\n\n\nimpl FieldElement {\n\n pub fn pow22523(&self) -> FieldElement\n\n {\n\n let mut t0 = self.square();\n\n let mut t1 = t0.square();\n\n for _ in 1..2 {\n", "file_path": "src/ed25519/fe.rs", "rank": 48, "score": 143717.751266401 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn invert() {\n\n let fname = \"testdata/ed25519/invert.test\";\n\n run_test(fname.to_string(), 2, |case| {\n\n let (nega, abytes) = case.get(\"a\").unwrap();\n\n let (negc, cbytes) = case.get(\"c\").unwrap();\n\n\n\n assert!(!nega && !negc);\n\n let a = test_from_bytes(&abytes);\n\n let c = test_from_bytes(&cbytes);\n\n let r = a.invert();\n\n assert_eq!(r, c);\n\n });\n\n}\n\n\n\nimpl<'a> Neg for &'a FieldElement {\n\n type Output = FieldElement;\n\n\n\n fn neg(self) -> FieldElement\n\n {\n\n FieldElement {\n\n value: [ -self.value[0], -self.value[1],\n\n -self.value[2], -self.value[3],\n\n -self.value[4], -self.value[5],\n\n -self.value[6], -self.value[7],\n\n -self.value[8], -self.value[9], ]\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ed25519/fe.rs", "rank": 49, "score": 143717.751266401 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn negate() {\n\n let fname = \"testdata/ed25519/negate.test\";\n\n run_test(fname.to_string(), 2, |case| {\n\n let (nega, abytes) = case.get(\"a\").unwrap();\n\n let (negc, cbytes) = case.get(\"c\").unwrap();\n\n\n\n assert!(!nega && !negc);\n\n let a = test_from_bytes(&abytes);\n\n let c = test_from_bytes(&cbytes);\n\n let r = -&a;\n\n assert_eq!(r, c);\n\n });\n\n}\n\n\n\nimpl FieldElement {\n\n pub fn cmov(&mut self, g: &FieldElement, bl: bool)\n\n {\n\n let b = if bl { -1 } else { 0 };\n\n for i in 0..10 {\n\n let mut x = self.value[i] ^ g.value[i];\n\n x &= b;\n\n self.value[i] ^= x;\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ed25519/fe.rs", "rank": 50, "score": 143717.751266401 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn pow22523() {\n\n let fname = \"testdata/ed25519/pow22523.test\";\n\n run_test(fname.to_string(), 2, |case| {\n\n let (nega, abytes) = case.get(\"a\").unwrap();\n\n let (negc, cbytes) = case.get(\"c\").unwrap();\n\n\n\n assert!(!nega && !negc);\n\n let a = test_from_bytes(&abytes);\n\n let c = test_from_bytes(&cbytes);\n\n let r = a.pow22523();\n\n assert_eq!(r, c);\n\n });\n\n}\n", "file_path": "src/ed25519/fe.rs", "rank": 51, "score": 143717.751266401 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn is_tests() {\n\n let fname = \"testdata/ed25519/istests.test\";\n\n run_test(fname.to_string(), 3, |case| {\n\n let (nega, abytes) = case.get(\"a\").unwrap();\n\n let (negz, zbytes) = case.get(\"z\").unwrap();\n\n let (negn, nbytes) = case.get(\"n\").unwrap();\n\n\n\n assert!(!nega && !negz && !negn);\n\n let a = test_from_bytes(&abytes);\n\n println!(\"a: {:?}\", a);\n\n let z = zbytes.len() > 1;\n\n let n = nbytes.len() > 1;\n\n assert_eq!(z, a.isnonzero());\n\n assert_eq!(n, a.isnegative());\n\n });\n\n}\n\n\n\nimpl FieldElement {\n\n pub fn sq2(&self) -> FieldElement\n\n {\n", "file_path": "src/ed25519/fe.rs", "rank": 52, "score": 143717.751266401 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn cmov() {\n\n let fname = \"testdata/ed25519/cmov.test\";\n\n run_test(fname.to_string(), 3, |case| {\n\n let (nega, abytes) = case.get(\"a\").unwrap();\n\n let (negb, bbytes) = case.get(\"b\").unwrap();\n\n let (negc, cbytes) = case.get(\"c\").unwrap();\n\n\n\n assert!(!nega && !negb && !negc);\n\n let a = test_from_bytes(&abytes);\n\n let b = bbytes.len() > 1;\n\n let c = test_from_bytes(&cbytes);\n\n let mut r = FieldElement::new();\n\n r.cmov(&a, b);\n\n assert_eq!(r, c);\n\n });\n\n}\n\n\n\nimpl FieldElement {\n\n pub fn isnonzero(&self) -> bool\n\n {\n", "file_path": "src/ed25519/fe.rs", "rank": 53, "score": 143717.751266401 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn mul() {\n\n let fname = \"testdata/ed25519/mul.test\";\n\n run_test(fname.to_string(), 3, |case| {\n\n let (nega, abytes) = case.get(\"a\").unwrap();\n\n let (negb, bbytes) = case.get(\"b\").unwrap();\n\n let (negc, cbytes) = case.get(\"c\").unwrap();\n\n\n\n assert!(!nega && !negb && !negc);\n\n let a = test_from_bytes(&abytes);\n\n let b = test_from_bytes(&bbytes);\n\n let c = test_from_bytes(&cbytes);\n\n let r = &a * &b;\n\n assert_eq!(r, c);\n\n });\n\n}\n\n\n\nimpl FieldElement {\n\n pub fn square_mut(&mut self)\n\n {\n\n let f0 : i32 = self.value[0];\n", "file_path": "src/ed25519/fe.rs", "rank": 54, "score": 143717.751266401 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn square() {\n\n let fname = \"testdata/ed25519/square.test\";\n\n run_test(fname.to_string(), 2, |case| {\n\n let (nega, abytes) = case.get(\"a\").unwrap();\n\n let (negc, cbytes) = case.get(\"c\").unwrap();\n\n\n\n assert!(!nega && !negc);\n\n let a = test_from_bytes(&abytes);\n\n let c = test_from_bytes(&cbytes);\n\n let r = a.square();\n\n assert_eq!(r, c);\n\n });\n\n}\n\n\n\nimpl FieldElement {\n\n pub fn invert(&self) -> FieldElement\n\n {\n\n let mut t0 = self.square();\n\n let mut t1 = t0.square();\n\n for _ in 1..2 {\n", "file_path": "src/ed25519/fe.rs", "rank": 55, "score": 143717.751266401 }, { "content": "pub fn decode_biguint(b: &ASN1Block) -> Result<BigUint,RSAError> {\n\n match b {\n\n &ASN1Block::Integer(_, _, ref v) => {\n\n match v.to_biguint() {\n\n Some(sn) => Ok(sn),\n\n _ => Err(RSAError::InvalidKey)\n\n }\n\n }\n\n _ =>\n\n Err(RSAError::ASN1DecodeErr(ASN1DecodeErr::EmptyBuffer))\n\n }\n\n}\n\n\n\n\n", "file_path": "src/rsa/core.rs", "rank": 56, "score": 143662.44256162646 }, { "content": "/// Encode a supported key into the given file, with the given comment.\n\npub fn write_ssh_keyfile<KP,P>(path: P, x: &KP, comment: &str) -> Result<(),SSHKeyRenderError>\n\n where\n\n KP: SSHKey,\n\n P: AsRef<Path>\n\n{\n\n let mut file = File::create(path)?;\n\n let contents = encode_ssh(x, comment)?;\n\n let bytes = contents.into_bytes();\n\n file.write_all(&bytes)?;\n\n file.sync_all()?;\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nuse dsa::{DSAKeyPair,DSAPublicKey,L1024N160};\n\n#[cfg(test)]\n\nuse ecdsa::ECDSAPair;\n\n#[cfg(test)]\n\nuse ed25519::ED25519KeyPair;\n\n#[cfg(test)]\n\nuse rsa::{RSAPair,RSAPublic,SIGNING_HASH_SHA256};\n\n#[cfg(test)]\n\nuse sha::SHA256;\n\n\n", "file_path": "src/ssh/mod.rs", "rank": 59, "score": 141786.36419306387 }, { "content": "#[cfg(test)]\n\n#[derive(Clone,Debug)]\n\nstruct ValidFieldElement {\n\n values: FieldElement\n\n}\n\n\n\n#[cfg(test)]\n\nimpl Arbitrary for ValidFieldElement {\n\n fn arbitrary<G: Gen>(g: &mut G) -> ValidFieldElement\n\n {\n\n let mut bytes = [0; 32];\n\n g.fill_bytes(&mut bytes);\n\n curve25519_scalar_mask(&mut bytes);\n\n ValidFieldElement{\n\n values: FieldElement::from_bytes(&bytes)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ed25519/fe.rs", "rank": 60, "score": 140892.14949255396 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn scalarmult_base() {\n\n let fname = \"testdata/ed25519/scalar_mult.test\";\n\n run_test(fname.to_string(), 2, |case| {\n\n let (nega, abytes) = case.get(\"a\").unwrap();\n\n let (negb, bbytes) = case.get(\"b\").unwrap();\n\n\n\n assert!(!nega && !negb);\n\n let b = Point::load_test_value(bbytes);\n\n let mine = Point::scalarmult_base(&abytes);\n\n assert_eq!(mine, b);\n\n });\n\n}\n\n\n", "file_path": "src/ed25519/point.rs", "rank": 61, "score": 139507.72039316336 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn double_scalarmult() {\n\n let fname = \"testdata/ed25519/scalar_mult_gen.test\";\n\n run_test(fname.to_string(), 4, |case| {\n\n let (nega, abytes) = case.get(\"a\").unwrap();\n\n let (negb, bbytes) = case.get(\"b\").unwrap();\n\n let (negc, cbytes) = case.get(\"c\").unwrap();\n\n let (negd, dbytes) = case.get(\"d\").unwrap();\n\n\n\n assert!(!nega && !negb && !negc && !negd);\n\n let b = Point::load_test_value(bbytes);\n\n let d = Point2::load_test_value(dbytes);\n\n let mine = Point2::double_scalarmult_vartime(&abytes, &b, &cbytes);\n\n assert_eq!(mine, d);\n\n });\n\n}\n\n\n", "file_path": "src/ed25519/point.rs", "rank": 62, "score": 139507.72039316336 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn helper_slide() {\n\n let fname = \"testdata/ed25519/slide.test\";\n\n run_test(fname.to_string(), 2, |case| {\n\n let (nega, abytes) = case.get(\"a\").unwrap();\n\n let (negb, bbytes) = case.get(\"b\").unwrap();\n\n\n\n assert!(!nega && !negb);\n\n let mut mine = [0; 256];\n\n slide(&mut mine, &abytes);\n\n for i in 0..256 {\n\n assert_eq!(mine[i], bbytes[i] as i8);\n\n }\n\n });\n\n}\n\n\n\nimpl Point2\n\n{\n\n /* r = a * A + b * B\n\n * where a = a[0]+256*a[1]+...+256^31 a[31].\n\n * and b = b[0]+256*b[1]+...+256^31 b[31].\n", "file_path": "src/ed25519/point.rs", "rank": 63, "score": 139507.72039316336 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn ed25519_examples() {\n\n let test_files = [\"ed25519-1\", \"ed25519-2\", \"ed25519-3\"];\n\n\n\n for file in test_files.iter() {\n\n let path = format!(\"testdata/ssh/{}\",file);\n\n match load_ssh_keyfile::<ED25519KeyPair,String>(path) {\n\n Err(e) =>\n\n assert!(false, \"SSH ED25519 parse error: {:?}\", e),\n\n Ok((keypair,comment)) => {\n\n // first see if this roundtrips\n\n let buffer = vec![0,1,2,4,5,6,9];\n\n let sig = keypair.private.sign(&buffer);\n\n assert!(keypair.public.verify(&buffer, &sig));\n\n match encode_ssh(&keypair, &comment) {\n\n Err(e) =>\n\n assert!(false, \"SSH ED25519 encoding error: {:?}\", e),\n\n Ok(coded) => {\n\n match decode_ssh(&coded) {\n\n Err(e) =>\n\n assert!(false, \"SSSH ECDSA redecoding error: {:?}\", e),\n", "file_path": "src/ssh/mod.rs", "rank": 64, "score": 139287.9669128421 }, { "content": "fn decode_rsa_key(x: &ASN1Block) -> Result<RSAPublic,X509ParseError>\n\n{\n\n if let &ASN1Block::BitString(_, _, _, ref bstr) = x {\n\n der_decode(bstr).map_err(|x| X509ParseError::RSAError(x))\n\n } else {\n\n Err(X509ParseError::NotEnoughData)\n\n }\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n//\n\n// DSA Public Key encoding / decoding\n\n//\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/x509/publickey.rs", "rank": 65, "score": 124142.18239887651 }, { "content": "fn render_number<O,N>(bitlen: usize, out: &mut O, val: &N) -> Result<(),SSHKeyRenderError>\n\n where\n\n O: Write,\n\n N: Encoder\n\n{\n\n let mut outvec = Vec::new();\n\n outvec.write(&val.to_bytes())?;\n\n while outvec.len() < ((bitlen + 7) / 8) { outvec.insert(0,0); }\n\n while outvec.len() > ((bitlen + 7) / 8) { outvec.remove(0); }\n\n out.write(&outvec)?;\n\n Ok(())\n\n}\n", "file_path": "src/ssh/ecdsa.rs", "rank": 66, "score": 119568.86405816901 }, { "content": "fn encode_rsa_key(c: ASN1Class, x: &RSAPublic) -> Result<ASN1Block,ASN1EncodeErr>\n\n{\n\n let objoid = ASN1Block::ObjectIdentifier(c, 0, oid!(1,2,840,113549,1,1,1));\n\n let bstr = der_encode(x)?;\n\n let objkey = ASN1Block::BitString(c, 0, bstr.len() * 8, bstr);\n\n Ok(ASN1Block::Sequence(c, 0, vec![objoid, objkey]))\n\n}\n\n\n", "file_path": "src/x509/publickey.rs", "rank": 67, "score": 114915.47227471182 }, { "content": "fn encode_ecdsa_key(c: ASN1Class, x: &ECDSAPublic) -> Result<ASN1Block,ECDSAEncodeErr>\n\n{\n\n let objoid = ASN1Block::ObjectIdentifier(c, 0, oid!(1,2,840,10045,2,1));\n\n let (base_curve_oid, mut keyvec) = match x {\n\n ECDSAPublic::P192(k) => (oid!(1,2,840,10045,3,1,1), k.to_asn1_class(c)?),\n\n ECDSAPublic::P224(k) => (oid!(1,3,132,0,33), k.to_asn1_class(c)?),\n\n ECDSAPublic::P256(k) => (oid!(1,2,840,10045,3,1,7), k.to_asn1_class(c)?),\n\n ECDSAPublic::P384(k) => (oid!(1,3,132,0,34), k.to_asn1_class(c)?),\n\n ECDSAPublic::P521(k) => (oid!(1,3,132,0,35), k.to_asn1_class(c)?),\n\n };\n\n let curve_oid = ASN1Block::ObjectIdentifier(c, 0, base_curve_oid);\n\n let header = ASN1Block::Sequence(c, 0, vec![objoid, curve_oid]);\n\n keyvec.insert(0, header);\n\n Ok(ASN1Block::Sequence(c, 0, keyvec))\n\n}\n\n\n", "file_path": "src/x509/publickey.rs", "rank": 68, "score": 114915.47227471182 }, { "content": "fn encode_dsa_key(c: ASN1Class, x: &DSAPublic) -> Result<ASN1Block,ASN1EncodeErr>\n\n{\n\n let objoid = ASN1Block::ObjectIdentifier(c, 0, oid!(1,2,840,10040,4,1));\n\n let (mut objparams, bstr) = match x {\n\n DSAPublic::DSAPublicL1024N160(x) => (x.params.to_asn1_class(c)?, der_encode(x)?),\n\n DSAPublic::DSAPublicL2048N224(x) => (x.params.to_asn1_class(c)?, der_encode(x)?),\n\n DSAPublic::DSAPublicL2048N256(x) => (x.params.to_asn1_class(c)?, der_encode(x)?),\n\n DSAPublic::DSAPublicL3072N256(x) => (x.params.to_asn1_class(c)?, der_encode(x)?)\n\n };\n\n objparams.insert(0, objoid);\n\n let headinfo = ASN1Block::Sequence(c, 0, objparams);\n\n let objkey = ASN1Block::BitString(c, 0, bstr.len() * 8, bstr);\n\n Ok(ASN1Block::Sequence(c, 0, vec![headinfo, objkey]))\n\n}\n\n\n", "file_path": "src/x509/publickey.rs", "rank": 69, "score": 114915.47227471182 }, { "content": "/// A trait defining keys that can be parsed / rendered by this library. Note\n\n/// that you probably don't want to use these routines directly; they're mostly\n\n/// used by the internal functions. Perhaps the only reason to use them is to\n\n/// implement them, because you've got another kind of key you want to parse that\n\n/// isn't already part of the library. (In that case, though ... maybe send a\n\n/// patch?)\n\npub trait SSHKey: Sized + KeyPair {\n\n /// Return true if the given string is a valid key type identifier for this\n\n /// key type. (i.e., \"ssh-ed25519\" is the identifier for ED25519, and \"dss\"\n\n /// and \"ssh-dss\" are both valid identifiers for DSA keys.)\n\n fn valid_keytype(s: &str) -> bool;\n\n\n\n /// Parse the public blob info within an SSH blob. I strongly recommend\n\n /// using the functions in `ssh::frame` for this.\n\n fn parse_ssh_public_info<I: Read>(inp: &mut I) -> Result<Self::Public,SSHKeyParseError>;\n\n /// Parse the private blob info within an SSH blob. I strongly recommend\n\n /// using the functions in `ssh::frame` for this.\n\n fn parse_ssh_private_info<I: Read>(inp: &mut I) -> Result<(Self::Private,String),SSHKeyParseError>;\n\n\n\n /// Render the public blob info within an SSH blob. I strongly recommend\n\n /// using the functions in `ssh::frame` for this.\n\n fn render_ssh_public_info<O: Write>(&self, out: &mut O) -> Result<(),SSHKeyRenderError>;\n\n /// Render the private blob info within an SSH blob. I strongly recommend\n\n /// using the functions in `ssh::frame` for this.\n\n fn render_ssh_private_info<O: Write>(&self, out: &mut O) -> Result<(),SSHKeyRenderError>;\n\n}\n\n\n", "file_path": "src/ssh/mod.rs", "rank": 70, "score": 114865.60634656448 }, { "content": "pub fn decode_signature(x: &ASN1Block)\n\n -> Result<Vec<u8>,X509ParseError>\n\n{\n\n match x {\n\n &ASN1Block::BitString(_, _, size, ref sig) if size % 8 == 0 => {\n\n Ok(sig.to_vec())\n\n }\n\n _ =>\n\n Err(X509ParseError::SignatureNotFound)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use quickcheck::{Arbitrary,Gen};\n\n use rand::Rng;\n\n use rand::distributions::Uniform;\n\n use super::*;\n\n\n\n fn check_version_roundtrip(v: X509Version) {\n", "file_path": "src/x509/misc.rs", "rank": 71, "score": 113155.93091869683 }, { "content": "fn bits2octets<X>(x: &[u8], q: &X, qlen: usize) -> Vec<u8>\n\n where\n\n X: Clone + Decoder + Encoder + PartialOrd + Sub<Output=X> + Shr<usize,Output=X>\n\n{\n\n let z1: X = bits2int(x, qlen);\n\n let res = if &z1 > q { z1 - q.clone() } else { z1 };\n\n int2octets(&res, qlen)\n\n}\n\n\n", "file_path": "src/dsa/rfc6979.rs", "rank": 72, "score": 112791.39260740878 }, { "content": "pub fn decode_algorithm_ident(x: &ASN1Block)\n\n -> Result<AlgorithmIdentifier,X509ParseError>\n\n{\n\n // AlgorithmIdentifier ::= SEQUENCE {\n\n // algorithm OBJECT IDENTIFIER,\n\n // parameters ANY DEFINED BY algorithm OPTIONAL }\n\n match x {\n\n &ASN1Block::Sequence(_, _, ref v) if v.len() >= 1 => {\n\n match v[0] {\n\n ASN1Block::ObjectIdentifier(_, _, ref oid) => {\n\n if oid == oid!(1,2,840,113549,1,1,5) {\n\n return Ok(AlgorithmIdentifier {\n\n hash: HashAlgorithm::SHA1,\n\n algo: PublicKeyInfo::RSA\n\n });\n\n }\n\n if oid == oid!(1,2,840,113549,1,1,11) {\n\n return Ok(AlgorithmIdentifier {\n\n hash: HashAlgorithm::SHA256,\n\n algo: PublicKeyInfo::RSA\n", "file_path": "src/x509/algident.rs", "rank": 73, "score": 110947.3135989643 }, { "content": "fn decode_dsa_key(info: ASN1Block, key: &ASN1Block) -> Result<DSAPublic,X509ParseError>\n\n{\n\n if let ASN1Block::Sequence(_, _, pqg) = info {\n\n if pqg.len() != 3 { return Err(X509ParseError::InvalidDSAInfo); }\n\n\n\n let puint = decode_biguint(&pqg[0])?;\n\n let guint = decode_biguint(&pqg[1])?;\n\n let quint = decode_biguint(&pqg[2])?;\n\n\n\n if puint.bits() > 2048 {\n\n let p = U3072::from_num(&puint).ok_or(X509ParseError::InvalidDSAInfo)?;\n\n let q = U3072::from_num(&quint).ok_or(X509ParseError::InvalidDSAInfo)?;\n\n let g = U256::from_num(&guint).ok_or(X509ParseError::InvalidDSAInfo)?;\n\n let params = L3072N256::new(p, q, g);\n\n\n\n if let ASN1Block::BitString(_, _, _, ybstr) = key {\n\n let blocks = from_der(ybstr)?;\n\n let (iblk,_) = blocks.split_first().ok_or(X509ParseError::InvalidDSAKey)?;\n\n if let ASN1Block::Integer(_,_,ynum) = iblk {\n\n let y = U3072::from_num(ynum).ok_or(X509ParseError::InvalidDSAKey)?;\n", "file_path": "src/x509/publickey.rs", "rank": 74, "score": 110874.76520680508 }, { "content": "fn decode_ecdsa_key(info: ASN1Block, keybls: &[ASN1Block]) -> Result<ECDSAPublic,X509ParseError>\n\n{\n\n if let ASN1Block::ObjectIdentifier(_, _, oid) = info {\n\n if oid == oid!(1,2,840,10045,3,1,1) {\n\n let (res, _) = ECCPublicKey::<P192>::from_asn1(keybls)?;\n\n return Ok(ECDSAPublic::P192(res));\n\n }\n\n\n\n if oid == oid!(1,3,132,0,33) {\n\n let (res, _) = ECCPublicKey::<P224>::from_asn1(keybls)?;\n\n return Ok(ECDSAPublic::P224(res));\n\n }\n\n\n\n if oid == oid!(1,2,840,10045,3,1,7) {\n\n let (res, _) = ECCPublicKey::<P256>::from_asn1(keybls)?;\n\n return Ok(ECDSAPublic::P256(res));\n\n }\n\n\n\n if oid == oid!(1,3,132,0,34) {\n\n let (res, _) = ECCPublicKey::<P384>::from_asn1(keybls)?;\n", "file_path": "src/x509/publickey.rs", "rank": 75, "score": 110874.76520680508 }, { "content": "pub trait ECCPoint : Sized {\n\n type Curve: EllipticCurve;\n\n type Scale;\n\n\n\n fn default() -> Self;\n\n fn negate(&self) -> Self;\n\n fn double(&self) -> Self;\n\n fn add(&self, other: &Self) -> Self;\n\n fn scale(&self, amt: &Self::Scale) -> Self;\n\n fn double_scalar_mult(x1: &Self::Scale, p1: &Self, x2: &Self::Scale, p2: &Self) -> Self\n\n {\n\n // FIXME: Replace this with something not stupid.\n\n let big1 = p1.scale(x1);\n\n let big2 = p2.scale(x2);\n\n big1.add(&big2)\n\n }\n\n}\n\n\n\n#[derive(Debug,PartialEq)]\n\npub struct Point<T: EllipticCurve>\n", "file_path": "src/ecdsa/point.rs", "rank": 76, "score": 107474.04060277296 }, { "content": "fn next_test_case(contents: &mut Lines, lines: usize) ->\n\n Option<HashMap<String,(bool,Vec<u8>)>>\n\n{\n\n let mut res = HashMap::new();\n\n let mut count = 0;\n\n\n\n while count < lines {\n\n let line = contents.next()?;\n\n let (key, neg, val) = next_value_set(line);\n\n res.insert(key, (neg,val));\n\n count += 1;\n\n }\n\n\n\n Some(res)\n\n}\n\n\n", "file_path": "src/testing.rs", "rank": 77, "score": 103442.64868933224 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn ecdsa_examples() {\n\n let test_files = [\"ecdsa256-1\", \"ecdsa256-2\", \"ecdsa256-3\",\n\n \"ecdsa384-1\", \"ecdsa384-2\", \"ecdsa384-3\",\n\n \"ecdsa521-1\", \"ecdsa521-2\", \"ecdsa521-3\"];\n\n\n\n for file in test_files.iter() {\n\n let path = format!(\"testdata/ssh/{}\",file);\n\n match load_ssh_keyfile::<ECDSAPair,String>(path) {\n\n Err(e) =>\n\n assert!(false, \"SSH ECDSA parse error: {:?}\", e),\n\n Ok((keypair,comment)) => {\n\n // first see if this roundtrips\n\n let buffer = vec![0,1,2,4,5,6,9];\n\n match keypair {\n\n ECDSAPair::P192(_,_) =>\n\n assert!(false, \"Somehow got a P192 in read test\"),\n\n ECDSAPair::P224(_,_) =>\n\n assert!(false, \"Somehow got a P224 in read test\"),\n\n ECDSAPair::P256(ref pu, ref pr) => {\n\n let sig = pr.sign::<SHA256>(&buffer);\n", "file_path": "src/ssh/mod.rs", "rank": 78, "score": 102952.70653005694 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn rsa_examples() {\n\n let test_files = [\"rsa1024-1\", \"rsa1024-2\", \"rsa1024-3\",\n\n \"rsa2048-1\", \"rsa2048-2\", \"rsa2048-3\",\n\n \"rsa3072-1\", \"rsa3072-2\", \"rsa3072-3\",\n\n \"rsa4096-1\", \"rsa4096-2\", \"rsa4096-3\",\n\n \"rsa8192-1\", \"rsa8192-2\", \"rsa8192-3\"];\n\n\n\n for file in test_files.iter() {\n\n let path = format!(\"testdata/ssh/{}\",file);\n\n let mkeypair = load_ssh_keyfile::<RSAPair,String>(path);\n\n match mkeypair {\n\n Err(e) => assert!(false, format!(\"reading error: {:?}\", e)),\n\n Ok((keypair, comment)) => {\n\n let buffer = [0,1,2,3,4,6,2];\n\n let sig = keypair.sign(&SIGNING_HASH_SHA256, &buffer);\n\n assert!(keypair.verify(&SIGNING_HASH_SHA256, &buffer, &sig));\n\n match encode_ssh(&keypair, &comment) {\n\n Err(e2) => assert!(false, format!(\"render error: {:?}\", e2)),\n\n Ok(encodedstr) => {\n\n match decode_ssh(&encodedstr) {\n", "file_path": "src/ssh/mod.rs", "rank": 79, "score": 102952.70653005694 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn dsa_examples() {\n\n let test_files = [\"dsa1024-1\", \"dsa1024-2\", \"dsa1024-3\"];\n\n\n\n for file in test_files.iter() {\n\n let path = format!(\"testdata/ssh/{}\",file);\n\n let mkeypair = load_ssh_keyfile(path);\n\n match mkeypair {\n\n Err(e) => assert!(false, format!(\"reading error: {:?}\", e)),\n\n Ok((keypair, comment)) => {\n\n let buffer = [0,1,2,3,4,6,2];\n\n let _ : DSAKeyPair<L1024N160> = keypair;\n\n let sig = keypair.private.sign::<SHA256>(&buffer);\n\n assert!(keypair.public.verify::<SHA256>(&buffer, &sig));\n\n let buffer2 = [0,1,2,3,4,6,5];\n\n assert!(!keypair.public.verify::<SHA256>(&buffer2, &sig));\n\n match encode_ssh(&keypair, &comment) {\n\n Err(e2) => assert!(false, format!(\"render error: {:?}\", e2)),\n\n Ok(encodedstr) => {\n\n match decode_ssh(&encodedstr) {\n\n Err(e3) => assert!(false, format!(\"reparse error: {:?}\", e3)),\n", "file_path": "src/ssh/mod.rs", "rank": 80, "score": 102952.70653005694 }, { "content": "fn hash<T>(x: &T, len: usize) -> Vec<u8>\n\n where T: Encoder\n\n{\n\n let mut base = x.to_bytes();\n\n let bytelen = len / 8;\n\n while base.len() < bytelen {\n\n base.insert(0,0);\n\n }\n\n SHA256::hash(&base)\n\n}", "file_path": "src/dsa/params.rs", "rank": 81, "score": 98430.60198289879 }, { "content": "pub fn build_test_path(dir: &str, typename: &str) -> String\n\n{\n\n format!(\"testdata/{}/{}.test\", dir, typename)\n\n}\n\n\n", "file_path": "src/testing.rs", "rank": 82, "score": 98258.3257340132 }, { "content": "// Calculate the value `k` used in the padding for all the hashes, solving the\n\n// equation (l + 1 + k) mod b = a.\n\npub fn calculate_k(a: usize, b: usize, l: usize) -> usize\n\n{\n\n (a - (l + 1)) % b\n\n}\n\n\n\n#[cfg(test)]\n\nquickcheck!\n\n{\n\n fn maj_rewrite_ok(x: u64, y: u64, z: u64) -> bool\n\n {\n\n let orig = (x & y) ^ (x & z) ^ (y & z);\n\n maj!(x, y, z) == orig\n\n }\n\n\n\n // Note, these two laws hold because we hash with bytes as the atomic size,\n\n // not bits. If we hashed true bit streams, we'd be in trouble.\n\n fn sha1_k_plus_1_multiple_of_8(lbytes: usize) -> bool\n\n {\n\n let l = lbytes * 8;\n\n (calculate_k(448,512,l) + 1) % 8 == 0\n", "file_path": "src/sha/shared.rs", "rank": 83, "score": 98122.13649764382 }, { "content": "fn get_printable_val(a: &ASN1Block) -> Result<String,X509ParseError>\n\n{\n\n match a {\n\n &ASN1Block::PrintableString(_,_,ref v) => Ok(v.clone()),\n\n _ =>\n\n Err(X509ParseError::IllegalStringValue)\n\n }\n\n}\n\n\n", "file_path": "src/x509/atv.rs", "rank": 84, "score": 97519.02001006427 }, { "content": "fn get_ia5_val(a: &ASN1Block) -> Result<String,X509ParseError>\n\n{\n\n match a {\n\n &ASN1Block::IA5String(_,_,ref v) => Ok(v.clone()),\n\n _ =>\n\n Err(X509ParseError::IllegalStringValue)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use quickcheck::{Arbitrary,Gen};\n\n use rand::Rng;\n\n use rand::prelude::SliceRandom;\n\n use std::iter::FromIterator;\n\n use super::*;\n\n\n\n impl Arbitrary for X520Name {\n\n fn arbitrary<G: Gen>(g: &mut G) -> X520Name {\n\n let names = vec![X520Name::Name,\n", "file_path": "src/x509/atv.rs", "rank": 85, "score": 97519.02001006427 }, { "content": "fn get_string_val(a: &ASN1Block) -> Result<String,X509ParseError>\n\n{\n\n match a {\n\n &ASN1Block::TeletexString(_,_,ref v) => Ok(v.clone()),\n\n &ASN1Block::PrintableString(_,_,ref v) => Ok(v.clone()),\n\n &ASN1Block::UniversalString(_,_,ref v) => Ok(v.clone()),\n\n &ASN1Block::UTF8String(_,_,ref v) => Ok(v.clone()),\n\n &ASN1Block::BMPString(_,_,ref v) => Ok(v.clone()),\n\n _ =>\n\n Err(X509ParseError::IllegalStringValue)\n\n }\n\n}\n\n\n", "file_path": "src/x509/atv.rs", "rank": 86, "score": 97519.02001006427 }, { "content": "fn decode_biguint(b: &ASN1Block) -> Result<BigUint,X509ParseError> {\n\n match b {\n\n &ASN1Block::Integer(_, _, ref v) => {\n\n match v.to_biguint() {\n\n Some(sn) => Ok(sn),\n\n _ => Err(X509ParseError::InvalidDSAInfo)\n\n }\n\n }\n\n _ =>\n\n Err(X509ParseError::InvalidDSAInfo)\n\n }\n\n}", "file_path": "src/x509/publickey.rs", "rank": 87, "score": 97519.02001006427 }, { "content": "fn int2octets<X>(x: &X, qlen_bits: usize) -> Vec<u8>\n\n where X: Encoder\n\n{\n\n let qlen_bytes = (qlen_bits + 7) / 8;\n\n let mut base = x.to_bytes();\n\n\n\n while base.len() < qlen_bytes {\n\n base.insert(0,0);\n\n }\n\n\n\n while base.len() > qlen_bytes {\n\n base.remove(0);\n\n }\n\n\n\n base\n\n}\n\n\n", "file_path": "src/dsa/rfc6979.rs", "rank": 88, "score": 96485.57679642003 }, { "content": "/// A generic trait for defining what a key pair looks like. This is useful\n\n/// in a couple places in which we want to define code regardless of the\n\n/// kind of key it is, but is unlikely to be hugely useful to users of the\n\n/// library.\n\npub trait KeyPair {\n\n /// The type of the public key of this pair.\n\n type Public;\n\n /// The type of the private key of this pair.\n\n type Private;\n\n\n\n /// Generate a key pair given the provided public and private keys.\n\n fn new(pbl: Self::Public, prv: Self::Private) -> Self;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 89, "score": 96295.86448808179 }, { "content": "pub fn run_test<F>(fname: String, i: usize, f: F)\n\n where F: Fn(HashMap<String,(bool,Vec<u8>)>)\n\n{\n\n let mut file = File::open(fname).unwrap();\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents).unwrap();\n\n let mut iter = contents.lines();\n\n\n\n while let Some(scase) = next_test_case(&mut iter, i) {\n\n f(scase);\n\n }\n\n}\n", "file_path": "src/testing.rs", "rank": 90, "score": 96044.47249072453 }, { "content": "fn decode_validity_data(bs: &ASN1Block) -> Result<Validity,X509ParseError> {\n\n // Validity ::= SEQUENCE {\n\n // notBefore Time,\n\n // notAfter Time }\n\n match bs {\n\n &ASN1Block::Sequence(_, _, ref valxs) => {\n\n if valxs.len() != 2 {\n\n return Err(X509ParseError::IllFormedValidity);\n\n }\n\n let nb = get_time(&valxs[0])?;\n\n let na = get_time(&valxs[1])?;\n\n Ok(Validity{ not_before: nb, not_after: na })\n\n }\n\n _ =>\n\n Err(X509ParseError::IllFormedValidity)\n\n }\n\n}\n\n\n\nimpl FromASN1 for Validity {\n\n type Error = X509ParseError;\n", "file_path": "src/x509/validity.rs", "rank": 91, "score": 95893.19613301542 }, { "content": "fn get_time(b: &ASN1Block) -> Result<DateTime<Utc>, X509ParseError> {\n\n match b {\n\n &ASN1Block::UTCTime(_, _, v) => Ok(v.clone()),\n\n &ASN1Block::GeneralizedTime(_, _, v) => Ok(v.clone()),\n\n _ =>\n\n Err(X509ParseError::IllFormedValidity)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use chrono::TimeZone;\n\n use chrono::offset::LocalResult;\n\n use quickcheck::{Arbitrary,Gen};\n\n use rand::Rng;\n\n use super::*;\n\n\n\n fn arbitrary_date<G: Gen>(g: &mut G) -> DateTime<Utc> {\n\n loop {\n\n let y = g.gen_range(1900,3000);\n", "file_path": "src/x509/validity.rs", "rank": 92, "score": 93376.95209300052 }, { "content": "fn decode_certificate(x: &ASN1Block)\n\n -> Result<GenericCertificate,X509ParseError>\n\n{\n\n //\n\n // TBSCertificate ::= SEQUENCE {\n\n // version [0] Version DEFAULT v1,\n\n // serialNumber CertificateSerialNumber,\n\n // signature AlgorithmIdentifier,\n\n // issuer Name,\n\n // validity Validity,\n\n // subject Name,\n\n // subjectPublicKeyInfo SubjectPublicKeyInfo,\n\n // issuerUniqueID [1] IMPLICIT UniqueIdentifier OPTIONAL,\n\n // -- If present, version MUST be v2 or v3\n\n // subjectUniqueID [2] IMPLICIT UniqueIdentifier OPTIONAL,\n\n // -- If present, version MUST be v2 or v3\n\n // extensions [3] Extensions OPTIONAL\n\n // -- If present, version MUST be v3 -- }\n\n //\n\n match x {\n", "file_path": "src/x509/mod.rs", "rank": 93, "score": 92942.85477322218 }, { "content": "fn encode_attribute_type_value(c: ASN1Class, x: &AttributeTypeValue)\n\n -> Result<ASN1Block,ASN1EncodeErr>\n\n{\n\n let mut resvec = x.attrtype.to_asn1_class(c)?;\n\n let value = match x.attrtype {\n\n X520Name::CountryName =>\n\n ASN1Block::PrintableString(c,0,x.value.clone()),\n\n X520Name::SerialNumber =>\n\n ASN1Block::PrintableString(c,0,x.value.clone()),\n\n X520Name::DomainComponent =>\n\n ASN1Block::IA5String(c,0,x.value.clone()),\n\n X520Name::EmailAddress =>\n\n ASN1Block::IA5String(c,0,x.value.clone()),\n\n _ =>\n\n ASN1Block::UTF8String(c,0,x.value.clone())\n\n };\n\n resvec.push(value);\n\n Ok(ASN1Block::Sequence(c, 0, resvec))\n\n}\n\n\n", "file_path": "src/x509/atv.rs", "rank": 94, "score": 91152.75050521256 }, { "content": "fn check_signature(alg: &AlgorithmIdentifier,\n\n key: &X509PublicKey,\n\n block: &[u8],\n\n sig: Vec<u8>)\n\n -> Result<(),X509ParseError>\n\n{\n\n match (alg.algo, key) {\n\n (PublicKeyInfo::RSA, &X509PublicKey::RSA(ref key)) => {\n\n let sighash = match alg.hash {\n\n HashAlgorithm::SHA1 => &SIGNING_HASH_SHA1,\n\n HashAlgorithm::SHA224 => &SIGNING_HASH_SHA224,\n\n HashAlgorithm::SHA256 => &SIGNING_HASH_SHA256,\n\n HashAlgorithm::SHA384 => &SIGNING_HASH_SHA384,\n\n HashAlgorithm::SHA512 => &SIGNING_HASH_SHA512,\n\n };\n\n\n\n if !key.verify(sighash, block, &sig) {\n\n return Err(X509ParseError::RSASignatureWrong);\n\n }\n\n\n", "file_path": "src/x509/mod.rs", "rank": 95, "score": 90589.18380826051 }, { "content": "void GFp_x25519_public_from_private(uint8_t out_public_value[32],\n", "file_path": "test-generator/cbits/curve25519.c", "rank": 96, "score": 90071.06632518287 }, { "content": "fn decode_attribute_type_value(x: &ASN1Block)\n\n -> Result<AttributeTypeValue,X509ParseError>\n\n{\n\n // AttributeTypeAndValue ::= SEQUENCE {\n\n // type AttributeType,\n\n // value AttributeValue }\n\n match x {\n\n &ASN1Block::Sequence(_, _, ref xs) => {\n\n let (name, rest) = X520Name::from_asn1(xs)?;\n\n match rest.first() {\n\n None => Err(X509ParseError::NotEnoughData),\n\n Some(ref x) => {\n\n let atvstr = get_atv_string(name, x)?;\n\n Ok(AttributeTypeValue{\n\n attrtype: name,\n\n value: atvstr\n\n })\n\n }\n\n }\n\n }\n", "file_path": "src/x509/atv.rs", "rank": 97, "score": 88895.97103664314 }, { "content": "fn diff<T>(a: &T, b: &T) -> T\n\n where\n\n T: Clone + PartialOrd,\n\n T: Sub<T,Output=T>\n\n{\n\n if a > b {\n\n a.clone() - b.clone()\n\n } else {\n\n b.clone() - a.clone()\n\n }\n\n}\n\n\n\n/// An RSA key pair containing keys of the given size; keeping them in the\n\n/// type means you'll never forget which one you have.\n\n/// \n\n/// As an aside:\n\n/// * `U512` should only be used for testing\n\n/// * `U1024` should only be used to support old protocols or devices\n\n/// * `U2048` is probably your bare minimum\n\n/// * `U3072` is a very reasonable choice\n", "file_path": "src/rsa/mod.rs", "rank": 98, "score": 84681.24689799709 }, { "content": "#[cfg(test)]\n\nuse cryptonum::unsigned::{Decoder,U192};\n\n#[cfg(test)]\n\nuse testing::run_test;\n\n\n", "file_path": "src/ed25519/loads.rs", "rank": 99, "score": 80527.46622001874 } ]
Rust
src/parallelhash.rs
quininer/sp800-185
04472c21e6b92956983fff9178452ee38a9d9c80
use tiny_keccak::{ Keccak, XofReader }; use rayon::prelude::*; use ::cshake::CShake; use ::utils::{ left_encode, right_encode }; #[derive(Clone)] pub struct ParallelHash { inner: CShake, buf: Vec<u8>, n: u64, rate: usize, blocksize: usize } impl ParallelHash { #[inline] pub fn new_parallelhash128(custom: &[u8], blocksize: usize) -> Self { let mut hasher = ParallelHash { inner: CShake::new_cshake128(b"ParallelHash", custom), buf: Vec::new(), n: 0, rate: 128, blocksize }; hasher.init(); hasher } #[inline] pub fn new_parallelhash256(custom: &[u8], blocksize: usize) -> Self { let mut hasher = ParallelHash { inner: CShake::new_cshake256(b"ParallelHash", custom), buf: Vec::new(), n: 0, rate: 256, blocksize }; hasher.init(); hasher } fn init(&mut self) { let mut encbuf = [0; 9]; let pos = left_encode(&mut encbuf, self.blocksize as u64); self.inner.update(&encbuf[pos..]); } pub fn update(&mut self, buf: &[u8]) { let rate = self.rate; let pos = if !self.buf.is_empty() { let len = self.blocksize - self.buf.len(); if buf.len() < len { self.buf.extend_from_slice(buf); return; } else { let mut encbuf = vec![0; rate / 4]; let mut shake = Keccak::new(200 - rate / 4, 0x1f); shake.update(&self.buf); shake.update(&buf[..len]); shake.finalize(&mut encbuf); self.inner.update(&encbuf); self.buf.clear(); self.n += 1; } len } else { 0 }; let bufs = buf[pos..].par_chunks(self.blocksize) .map(|chunk| if chunk.len() < self.blocksize { (false, chunk.into()) } else { let mut encbuf = vec![0; rate / 4]; let mut shake = Keccak::new(200 - rate / 4, 0x1f); shake.update(chunk); shake.finalize(&mut encbuf); (true, encbuf) }) .collect::<Vec<_>>(); for (is_hashed, mut buf) in bufs { if is_hashed { self.inner.update(&buf); self.n += 1; } else { self.buf.append(&mut buf); } } } #[inline] pub fn finalize(mut self, buf: &mut [u8]) { self.with_bitlength(buf.len() as u64 * 8); self.inner.finalize(buf) } #[inline] pub fn xof(mut self) -> XofReader { self.with_bitlength(0); self.inner.xof() } #[inline] fn with_bitlength(&mut self, bitlength: u64) { if !self.buf.is_empty() { let mut encbuf = vec![0; self.rate / 4]; let mut shake = Keccak::new(200 - self.rate / 4, 0x1f); shake.update(&self.buf); shake.finalize(&mut encbuf); self.inner.update(&encbuf); self.buf.clear(); self.n += 1; } let mut encbuf = [0; 9]; let pos = right_encode(&mut encbuf, self.n); self.inner.update(&encbuf[pos..]); let pos = right_encode(&mut encbuf, bitlength); self.inner.update(&encbuf[pos..]); } }
use tiny_keccak::{ Keccak, XofReader }; use rayon::prelude::*; use ::cshake::CShake; use ::utils::{ left_encode, right_encode }; #[derive(Clone)] pub struct ParallelHash { inner: CShake, buf: Vec<u8>, n: u64, rate: usize, blocksize: usize } impl ParallelHash { #[inline] pub fn new_parallelhash128(custom: &[u8], blocksize: usize) -> Self { let mut hasher = ParallelHash { inner: CShake::new_cshake128(b"ParallelHash", custom), buf: Vec::new(), n: 0, rate: 128, blocksize }; hasher.init(); hasher } #[inline] pub fn new_parallelhash256(custom: &[u8], blocksize: usize) -> Self { let mut hasher = ParallelHash { inner: CShake::new_cshake256(b"ParallelHash", custom), buf: Vec::new(), n: 0, rate: 256, blocksize }; hasher.init(); hasher } fn init(&mut self) { let mut encbuf = [0; 9]; let pos = left_encode(&mut encbuf, self.blocksize as u64); self.inner.update(&encbuf[pos..]); } pub fn update(&mut self, buf: &[u8]) { let rate = self.rate; let pos = if !self.buf.is_empty() { let len = self.blocksize - self.buf.len(); if buf.len() < len { self.buf.extend_from_slice(buf); return; } else { let mut encbuf = vec![0; rate / 4]; let mut shake = Keccak::new(200 - rate / 4, 0x1f); shake.update(&self.buf); shake.update(&buf[..len]); shak
let mut encbuf = [0; 9]; let pos = right_encode(&mut encbuf, self.n); self.inner.update(&encbuf[pos..]); let pos = right_encode(&mut encbuf, bitlength); self.inner.update(&encbuf[pos..]); } }
e.finalize(&mut encbuf); self.inner.update(&encbuf); self.buf.clear(); self.n += 1; } len } else { 0 }; let bufs = buf[pos..].par_chunks(self.blocksize) .map(|chunk| if chunk.len() < self.blocksize { (false, chunk.into()) } else { let mut encbuf = vec![0; rate / 4]; let mut shake = Keccak::new(200 - rate / 4, 0x1f); shake.update(chunk); shake.finalize(&mut encbuf); (true, encbuf) }) .collect::<Vec<_>>(); for (is_hashed, mut buf) in bufs { if is_hashed { self.inner.update(&buf); self.n += 1; } else { self.buf.append(&mut buf); } } } #[inline] pub fn finalize(mut self, buf: &mut [u8]) { self.with_bitlength(buf.len() as u64 * 8); self.inner.finalize(buf) } #[inline] pub fn xof(mut self) -> XofReader { self.with_bitlength(0); self.inner.xof() } #[inline] fn with_bitlength(&mut self, bitlength: u64) { if !self.buf.is_empty() { let mut encbuf = vec![0; self.rate / 4]; let mut shake = Keccak::new(200 - self.rate / 4, 0x1f); shake.update(&self.buf); shake.finalize(&mut encbuf); self.inner.update(&encbuf); self.buf.clear(); self.n += 1; }
random
[ { "content": "/// `left_encode(x)` encodes the integer x as a byte string in a way that can be unambiguously parsed\n\n/// from the beginning of the string by inserting the length of the byte string before the byte string\n\n/// representation of x.\n\npub fn left_encode(buf: &mut [u8; 9], value: u64) -> usize {\n\n // ref https://cryptologie.net/article/388/shake-cshake-and-some-more-bit-ordering/\n\n\n\n buf.copy_from_slice(&[0; 9]);\n\n\n\n BigEndian::write_u64(&mut buf[1..], value.to_le());\n\n let offset = buf.iter()\n\n .enumerate()\n\n .find(|&(_, &v)| v != 0)\n\n .map(|(n, _)| n)\n\n .unwrap_or(8);\n\n\n\n buf[offset - 1] = (9 - offset) as u8;\n\n offset - 1\n\n}\n\n\n\n\n", "file_path": "src/utils.rs", "rank": 0, "score": 157213.4906102978 }, { "content": "/// `right_encode(x)` encodes the integer x as a byte string in a way that can be unambiguously parsed\n\n/// from the end of the string by inserting the length of the byte string after the byte string\n\n/// representation of x.\n\npub fn right_encode(buf: &mut [u8; 9], value: u64) -> usize {\n\n buf.copy_from_slice(&[0; 9]);\n\n\n\n BigEndian::write_u64(&mut buf[..8], value.to_le());\n\n let offset = buf.iter()\n\n .enumerate()\n\n .find(|&(_, &v)| v != 0)\n\n .map(|(n, _)| n)\n\n .unwrap_or(7);\n\n\n\n buf[8] = (8 - offset) as u8;\n\n offset\n\n}\n\n\n\n\n", "file_path": "src/utils.rs", "rank": 1, "score": 157213.4906102978 }, { "content": "#[test]\n\nfn test_cshake128() {\n\n let input = b\"\\x00\\x01\\x02\\x03\";\n\n let name = b\"\";\n\n let custom = b\"Email Signature\";\n\n let output = b\"\\xC1\\xC3\\x69\\x25\\xB6\\x40\\x9A\\x04\\xF1\\xB5\\x04\\xFC\\xBC\\xA9\\xD8\\x2B\\x40\\x17\\x27\\x7C\\xB5\\xED\\x2B\\x20\\x65\\xFC\\x1D\\x38\\x14\\xD5\\xAA\\xF5\";\n\n\n\n let mut buf = vec![0; output.len()];\n\n let mut cshake = CShake::new_cshake128(name, custom);\n\n cshake.update(input);\n\n cshake.finalize(&mut buf);\n\n assert_eq!(buf, output);\n\n\n\n\n\n let input = b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0A\\x0B\\x0C\\x0D\\x0E\\x0F\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1A\\x1B\\x1C\\x1D\\x1E\\x1F\\\n\n \\x20\\x21\\x22\\x23\\x24\\x25\\x26\\x27\\x28\\x29\\x2A\\x2B\\x2C\\x2D\\x2E\\x2F\\x30\\x31\\x32\\x33\\x34\\x35\\x36\\x37\\x38\\x39\\x3A\\x3B\\x3C\\x3D\\x3E\\x3F\\\n\n \\x40\\x41\\x42\\x43\\x44\\x45\\x46\\x47\\x48\\x49\\x4A\\x4B\\x4C\\x4D\\x4E\\x4F\\x50\\x51\\x52\\x53\\x54\\x55\\x56\\x57\\x58\\x59\\x5A\\x5B\\x5C\\x5D\\x5E\\x5F\\\n\n \\x60\\x61\\x62\\x63\\x64\\x65\\x66\\x67\\x68\\x69\\x6A\\x6B\\x6C\\x6D\\x6E\\x6F\\x70\\x71\\x72\\x73\\x74\\x75\\x76\\x77\\x78\\x79\\x7A\\x7B\\x7C\\x7D\\x7E\\x7F\\\n\n \\x80\\x81\\x82\\x83\\x84\\x85\\x86\\x87\\x88\\x89\\x8A\\x8B\\x8C\\x8D\\x8E\\x8F\\x90\\x91\\x92\\x93\\x94\\x95\\x96\\x97\\x98\\x99\\x9A\\x9B\\x9C\\x9D\\x9E\\x9F\\\n\n \\xA0\\xA1\\xA2\\xA3\\xA4\\xA5\\xA6\\xA7\\xA8\\xA9\\xAA\\xAB\\xAC\\xAD\\xAE\\xAF\\xB0\\xB1\\xB2\\xB3\\xB4\\xB5\\xB6\\xB7\\xB8\\xB9\\xBA\\xBB\\xBC\\xBD\\xBE\\xBF\\\n\n \\xC0\\xC1\\xC2\\xC3\\xC4\\xC5\\xC6\\xC7\";\n", "file_path": "tests/cshake.rs", "rank": 2, "score": 41570.387154592594 }, { "content": "#[test]\n\nfn test_cshake256() {\n\n let input = b\"\\x00\\x01\\x02\\x03\";\n\n let name = b\"\";\n\n let custom = b\"Email Signature\";\n\n let output = b\"\\xD0\\x08\\x82\\x8E\\x2B\\x80\\xAC\\x9D\\x22\\x18\\xFF\\xEE\\x1D\\x07\\x0C\\x48\\xB8\\xE4\\xC8\\x7B\\xFF\\x32\\xC9\\x69\\x9D\\x5B\\x68\\x96\\xEE\\xE0\\xED\\xD1\\\n\n \\x64\\x02\\x0E\\x2B\\xE0\\x56\\x08\\x58\\xD9\\xC0\\x0C\\x03\\x7E\\x34\\xA9\\x69\\x37\\xC5\\x61\\xA7\\x4C\\x41\\x2B\\xB4\\xC7\\x46\\x46\\x95\\x27\\x28\\x1C\\x8C\";\n\n\n\n let mut buf = vec![0; output.len()];\n\n let mut cshake = CShake::new_cshake256(name, custom);\n\n cshake.update(input);\n\n cshake.finalize(&mut buf);\n\n assert_eq!(buf, &output[..]);\n\n\n\n\n\n let input = b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0A\\x0B\\x0C\\x0D\\x0E\\x0F\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1A\\x1B\\x1C\\x1D\\x1E\\x1F\\\n\n \\x20\\x21\\x22\\x23\\x24\\x25\\x26\\x27\\x28\\x29\\x2A\\x2B\\x2C\\x2D\\x2E\\x2F\\x30\\x31\\x32\\x33\\x34\\x35\\x36\\x37\\x38\\x39\\x3A\\x3B\\x3C\\x3D\\x3E\\x3F\\\n\n \\x40\\x41\\x42\\x43\\x44\\x45\\x46\\x47\\x48\\x49\\x4A\\x4B\\x4C\\x4D\\x4E\\x4F\\x50\\x51\\x52\\x53\\x54\\x55\\x56\\x57\\x58\\x59\\x5A\\x5B\\x5C\\x5D\\x5E\\x5F\\\n\n \\x60\\x61\\x62\\x63\\x64\\x65\\x66\\x67\\x68\\x69\\x6A\\x6B\\x6C\\x6D\\x6E\\x6F\\x70\\x71\\x72\\x73\\x74\\x75\\x76\\x77\\x78\\x79\\x7A\\x7B\\x7C\\x7D\\x7E\\x7F\\\n\n \\x80\\x81\\x82\\x83\\x84\\x85\\x86\\x87\\x88\\x89\\x8A\\x8B\\x8C\\x8D\\x8E\\x8F\\x90\\x91\\x92\\x93\\x94\\x95\\x96\\x97\\x98\\x99\\x9A\\x9B\\x9C\\x9D\\x9E\\x9F\\\n\n \\xA0\\xA1\\xA2\\xA3\\xA4\\xA5\\xA6\\xA7\\xA8\\xA9\\xAA\\xAB\\xAC\\xAD\\xAE\\xAF\\xB0\\xB1\\xB2\\xB3\\xB4\\xB5\\xB6\\xB7\\xB8\\xB9\\xBA\\xBB\\xBC\\xBD\\xBE\\xBF\\\n", "file_path": "tests/cshake.rs", "rank": 3, "score": 41570.387154592594 }, { "content": "#[test]\n\nfn test_parallelhash128() {\n\n let x192 = b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x20\\x21\\x22\\x23\\x24\\x25\\x26\\x27\";\n\n let s0 = b\"\";\n\n let s1 = b\"Parallel Data\";\n\n\n\n\n\n let output = b\"\\xBA\\x8D\\xC1\\xD1\\xD9\\x79\\x33\\x1D\\x3F\\x81\\x36\\x03\\xC6\\x7F\\x72\\x60\\x9A\\xB5\\xE4\\x4B\\x94\\xA0\\xB8\\xF9\\xAF\\x46\\x51\\x44\\x54\\xA2\\xB4\\xF5\";\n\n let mut buf = vec![0; output.len()];\n\n let mut hasher = ParallelHash::new_parallelhash128(s0, 8);\n\n hasher.update(x192);\n\n hasher.finalize(&mut buf);\n\n assert_eq!(buf, output);\n\n\n\n\n\n\n\n let output = b\"\\xFC\\x48\\x4D\\xCB\\x3F\\x84\\xDC\\xEE\\xDC\\x35\\x34\\x38\\x15\\x1B\\xEE\\x58\\x15\\x7D\\x6E\\xFE\\xD0\\x44\\x5A\\x81\\xF1\\x65\\xE4\\x95\\x79\\x5B\\x72\\x06\";\n\n let mut buf = vec![0; output.len()];\n\n let mut hasher = ParallelHash::new_parallelhash128(s1, 8);\n\n hasher.update(x192);\n\n hasher.finalize(&mut buf);\n", "file_path": "tests/parallelhash.rs", "rank": 4, "score": 40951.4184200785 }, { "content": "#[test]\n\nfn test_parallelhash256() {\n\n let x192 = b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x20\\x21\\x22\\x23\\x24\\x25\\x26\\x27\";\n\n let s0 = b\"\";\n\n let s1 = b\"Parallel Data\";\n\n\n\n let output = b\"\\xBC\\x1E\\xF1\\x24\\xDA\\x34\\x49\\x5E\\x94\\x8E\\xAD\\x20\\x7D\\xD9\\x84\\x22\\x35\\xDA\\x43\\x2D\\x2B\\xBC\\x54\\xB4\\xC1\\x10\\xE6\\x4C\\x45\\x11\\x05\\x53\\\n\n \\x1B\\x7F\\x2A\\x3E\\x0C\\xE0\\x55\\xC0\\x28\\x05\\xE7\\xC2\\xDE\\x1F\\xB7\\x46\\xAF\\x97\\xA1\\xDD\\x01\\xF4\\x3B\\x82\\x4E\\x31\\xB8\\x76\\x12\\x41\\x04\\x29\";\n\n let mut buf = vec![0; output.len()];\n\n let mut hasher = ParallelHash::new_parallelhash256(s0, 8);\n\n hasher.update(x192);\n\n hasher.finalize(&mut buf);\n\n assert_eq!(buf, &output[..]);\n\n\n\n\n\n let output = b\"\\xCD\\xF1\\x52\\x89\\xB5\\x4F\\x62\\x12\\xB4\\xBC\\x27\\x05\\x28\\xB4\\x95\\x26\\x00\\x6D\\xD9\\xB5\\x4E\\x2B\\x6A\\xDD\\x1E\\xF6\\x90\\x0D\\xDA\\x39\\x63\\xBB\\\n\n \\x33\\xA7\\x24\\x91\\xF2\\x36\\x96\\x9C\\xA8\\xAF\\xAE\\xA2\\x9C\\x68\\x2D\\x47\\xA3\\x93\\xC0\\x65\\xB3\\x8E\\x29\\xFA\\xE6\\x51\\xA2\\x09\\x1C\\x83\\x31\\x10\";\n\n let mut buf = vec![0; output.len()];\n\n let mut hasher = ParallelHash::new_parallelhash256(s1, 8);\n\n hasher.update(x192);\n\n hasher.finalize(&mut buf);\n", "file_path": "tests/parallelhash.rs", "rank": 5, "score": 40951.4184200785 }, { "content": "#[test]\n\nfn test_right_encode() {\n\n let mut buf = [0; 9];\n\n let n = right_encode(&mut buf, 0);\n\n assert_eq!(&buf[n..], [0, 1]);\n\n\n\n let n = right_encode(&mut buf, 128);\n\n assert_eq!(&buf[n..], [128, 1]);\n\n\n\n let n = right_encode(&mut buf, 65536);\n\n assert_eq!(&buf[n..], [1, 0, 0, 3]);\n\n\n\n let n = right_encode(&mut buf, 4096);\n\n assert_eq!(&buf[n..], [16, 0, 2]);\n\n\n\n let n = right_encode(&mut buf, 18446744073709551615);\n\n assert_eq!(&buf[n..], [255, 255, 255, 255, 255, 255, 255, 255, 8]);\n\n\n\n let n = right_encode(&mut buf, 12345);\n\n assert_eq!(&buf[n..], [48, 57, 2]);\n\n}\n", "file_path": "src/utils.rs", "rank": 6, "score": 39709.77909358555 }, { "content": "#[test]\n\nfn test_left_encode() {\n\n let mut buf = [0; 9];\n\n let n = left_encode(&mut buf, 0);\n\n assert_eq!(&buf[n..], [1, 0]);\n\n\n\n let n = left_encode(&mut buf, 128);\n\n assert_eq!(&buf[n..], [1, 128]);\n\n\n\n let n = left_encode(&mut buf, 65536);\n\n assert_eq!(&buf[n..], [3, 1, 0, 0]);\n\n\n\n let n = left_encode(&mut buf, 4096);\n\n assert_eq!(&buf[n..], [2, 16, 0]);\n\n\n\n let n = left_encode(&mut buf, 18446744073709551615);\n\n assert_eq!(&buf[n..], [8, 255, 255, 255, 255, 255, 255, 255, 255]);\n\n\n\n let n = left_encode(&mut buf, 54321);\n\n assert_eq!(&buf[n..], [2, 212, 49]);\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 7, "score": 39709.77909358555 }, { "content": "#[test]\n\nfn test_parallelhash128_xof() {\n\n let x192 = b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x20\\x21\\x22\\x23\\x24\\x25\\x26\\x27\";\n\n let s0 = b\"\";\n\n let s1 = b\"Parallel Data\";\n\n\n\n\n\n let output = b\"\\xFE\\x47\\xD6\\x61\\xE4\\x9F\\xFE\\x5B\\x7D\\x99\\x99\\x22\\xC0\\x62\\x35\\x67\\x50\\xCA\\xF5\\x52\\x98\\x5B\\x8E\\x8C\\xE6\\x66\\x7F\\x27\\x27\\xC3\\xC8\\xD3\";\n\n let mut buf = vec![0; output.len()];\n\n let mut hasher = ParallelHash::new_parallelhash128(s0, 8);\n\n hasher.update(x192);\n\n let mut xof = hasher.xof();\n\n xof.squeeze(&mut buf);\n\n assert_eq!(buf, output);\n\n\n\n\n\n let output = b\"\\xEA\\x2A\\x79\\x31\\x40\\x82\\x0F\\x7A\\x12\\x8B\\x8E\\xB7\\x0A\\x94\\x39\\xF9\\x32\\x57\\xC6\\xE6\\xE7\\x9B\\x4A\\x54\\x0D\\x29\\x1D\\x6D\\xAE\\x70\\x98\\xD7\";\n\n let mut buf = vec![0; output.len()];\n\n let mut hasher = ParallelHash::new_parallelhash128(s1, 8);\n\n hasher.update(x192);\n\n let mut xof = hasher.xof();\n\n xof.squeeze(&mut buf);\n\n assert_eq!(buf, output);\n\n}\n\n\n", "file_path": "tests/parallelhash.rs", "rank": 8, "score": 38857.418743673574 }, { "content": "#[test]\n\nfn test_parallelhash256_xof() {\n\n let x192 = b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x20\\x21\\x22\\x23\\x24\\x25\\x26\\x27\";\n\n let s0 = b\"\";\n\n let s1 = b\"Parallel Data\";\n\n\n\n\n\n let output = b\"\\xC1\\x0A\\x05\\x27\\x22\\x61\\x46\\x84\\x14\\x4D\\x28\\x47\\x48\\x50\\xB4\\x10\\x75\\x7E\\x3C\\xBA\\x87\\x65\\x1B\\xA1\\x67\\xA5\\xCB\\xDD\\xFF\\x7F\\x46\\x66\\\n\n \\x75\\xFB\\xF8\\x4B\\xCA\\xE7\\x37\\x8A\\xC4\\x44\\xBE\\x68\\x1D\\x72\\x94\\x99\\xAF\\xCA\\x66\\x7F\\xB8\\x79\\x34\\x8B\\xFD\\xDA\\x42\\x78\\x63\\xC8\\x2F\\x1C\";\n\n let mut buf = vec![0; output.len()];\n\n let mut hasher = ParallelHash::new_parallelhash256(s0, 8);\n\n hasher.update(x192);\n\n let mut xof = hasher.xof();\n\n xof.squeeze(&mut buf);\n\n assert_eq!(buf, &output[..]);\n\n\n\n\n\n let output = b\"\\x53\\x8E\\x10\\x5F\\x1A\\x22\\xF4\\x4E\\xD2\\xF5\\xCC\\x16\\x74\\xFB\\xD4\\x0B\\xE8\\x03\\xD9\\xC9\\x9B\\xF5\\xF8\\xD9\\x0A\\x2C\\x81\\x93\\xF3\\xFE\\x6E\\xA7\\\n\n \\x68\\xE5\\xC1\\xA2\\x09\\x87\\xE2\\xC9\\xC6\\x5F\\xEB\\xED\\x03\\x88\\x7A\\x51\\xD3\\x56\\x24\\xED\\x12\\x37\\x75\\x94\\xB5\\x58\\x55\\x41\\xDC\\x37\\x7E\\xFC\";\n\n let mut buf = vec![0; output.len()];\n\n let mut hasher = ParallelHash::new_parallelhash256(s1, 8);\n\n hasher.update(x192);\n\n let mut xof = hasher.xof();\n\n xof.squeeze(&mut buf);\n\n assert_eq!(buf, &output[..]);\n\n}\n", "file_path": "tests/parallelhash.rs", "rank": 9, "score": 38857.418743673574 }, { "content": "use byteorder::{ ByteOrder, BigEndian };\n\n\n\n\n\n/// `left_encode(x)` encodes the integer x as a byte string in a way that can be unambiguously parsed\n\n/// from the beginning of the string by inserting the length of the byte string before the byte string\n\n/// representation of x.\n", "file_path": "src/utils.rs", "rank": 10, "score": 22351.585093303747 }, { "content": "#[test]\n\nfn test_tuplehash128() {\n\n let te3 = b\"\\x00\\x01\\x02\";\n\n let te6 = b\"\\x10\\x11\\x12\\x13\\x14\\x15\";\n\n let te9 = b\"\\x20\\x21\\x22\\x23\\x24\\x25\\x26\\x27\\x28\";\n\n let s0 = b\"\";\n\n let s1 = b\"My Tuple App\";\n\n\n\n\n\n let output = b\"\\xC5\\xD8\\x78\\x6C\\x1A\\xFB\\x9B\\x82\\x11\\x1A\\xB3\\x4B\\x65\\xB2\\xC0\\x04\\x8F\\xA6\\x4E\\x6D\\x48\\xE2\\x63\\x26\\x4C\\xE1\\x70\\x7D\\x3F\\xFC\\x8E\\xD1\";\n\n let mut buf = vec![0; output.len()];\n\n let mut hasher = TupleHash::new_tuplehash128(s0);\n\n hasher.update(&[&te3[..], &te6[..]]);\n\n hasher.finalize(&mut buf);\n\n assert_eq!(buf, output);\n\n\n\n\n\n let output = b\"\\x75\\xCD\\xB2\\x0F\\xF4\\xDB\\x11\\x54\\xE8\\x41\\xD7\\x58\\xE2\\x41\\x60\\xC5\\x4B\\xAE\\x86\\xEB\\x8C\\x13\\xE7\\xF5\\xF4\\x0E\\xB3\\x55\\x88\\xE9\\x6D\\xFB\";\n\n let mut buf = vec![0; output.len()];\n\n let mut hasher = TupleHash::new_tuplehash128(s1);\n\n hasher.update(&[&te3[..], &te6[..]]);\n", "file_path": "tests/tuplehash.rs", "rank": 11, "score": 22180.56028736286 }, { "content": "#[test]\n\nfn test_kmac256() {\n\n let key = b\"\\x40\\x41\\x42\\x43\\x44\\x45\\x46\\x47\\x48\\x49\\x4A\\x4B\\x4C\\x4D\\x4E\\x4F\\x50\\x51\\x52\\x53\\x54\\x55\\x56\\x57\\x58\\x59\\x5A\\x5B\\x5C\\x5D\\x5E\\x5F\";\n\n let data = b\"\\x00\\x01\\x02\\x03\";\n\n let custom = b\"My Tagged Application\";\n\n let output = b\"\\x20\\xC5\\x70\\xC3\\x13\\x46\\xF7\\x03\\xC9\\xAC\\x36\\xC6\\x1C\\x03\\xCB\\x64\\xC3\\x97\\x0D\\x0C\\xFC\\x78\\x7E\\x9B\\x79\\x59\\x9D\\x27\\x3A\\x68\\xD2\\xF7\\\n\n \\xF6\\x9D\\x4C\\xC3\\xDE\\x9D\\x10\\x4A\\x35\\x16\\x89\\xF2\\x7C\\xF6\\xF5\\x95\\x1F\\x01\\x03\\xF3\\x3F\\x4F\\x24\\x87\\x10\\x24\\xD9\\xC2\\x77\\x73\\xA8\\xDD\";\n\n\n\n let mut buf = vec![0; output.len()];\n\n let mut kmac = KMac::new_kmac256(key, custom);\n\n kmac.update(data);\n\n kmac.finalize(&mut buf);\n\n assert_eq!(buf, &output[..]);\n\n\n\n\n\n let key = b\"\\x40\\x41\\x42\\x43\\x44\\x45\\x46\\x47\\x48\\x49\\x4A\\x4B\\x4C\\x4D\\x4E\\x4F\\x50\\x51\\x52\\x53\\x54\\x55\\x56\\x57\\x58\\x59\\x5A\\x5B\\x5C\\x5D\\x5E\\x5F\";\n\n let data = b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0A\\x0B\\x0C\\x0D\\x0E\\x0F\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1A\\x1B\\x1C\\x1D\\x1E\\x1F\\\n\n \\x20\\x21\\x22\\x23\\x24\\x25\\x26\\x27\\x28\\x29\\x2A\\x2B\\x2C\\x2D\\x2E\\x2F\\x30\\x31\\x32\\x33\\x34\\x35\\x36\\x37\\x38\\x39\\x3A\\x3B\\x3C\\x3D\\x3E\\x3F\\\n\n \\x40\\x41\\x42\\x43\\x44\\x45\\x46\\x47\\x48\\x49\\x4A\\x4B\\x4C\\x4D\\x4E\\x4F\\x50\\x51\\x52\\x53\\x54\\x55\\x56\\x57\\x58\\x59\\x5A\\x5B\\x5C\\x5D\\x5E\\x5F\\\n\n \\x60\\x61\\x62\\x63\\x64\\x65\\x66\\x67\\x68\\x69\\x6A\\x6B\\x6C\\x6D\\x6E\\x6F\\x70\\x71\\x72\\x73\\x74\\x75\\x76\\x77\\x78\\x79\\x7A\\x7B\\x7C\\x7D\\x7E\\x7F\\\n\n \\x80\\x81\\x82\\x83\\x84\\x85\\x86\\x87\\x88\\x89\\x8A\\x8B\\x8C\\x8D\\x8E\\x8F\\x90\\x91\\x92\\x93\\x94\\x95\\x96\\x97\\x98\\x99\\x9A\\x9B\\x9C\\x9D\\x9E\\x9F\\\n", "file_path": "tests/kmac.rs", "rank": 12, "score": 22180.56028736286 }, { "content": "#[test]\n\nfn test_tuplehash256() {\n\n let te3 = b\"\\x00\\x01\\x02\";\n\n let te6 = b\"\\x10\\x11\\x12\\x13\\x14\\x15\";\n\n let te9 = b\"\\x20\\x21\\x22\\x23\\x24\\x25\\x26\\x27\\x28\";\n\n let s0 = b\"\";\n\n let s1 = b\"My Tuple App\";\n\n\n\n\n\n let output = b\"\\xCF\\xB7\\x05\\x8C\\xAC\\xA5\\xE6\\x68\\xF8\\x1A\\x12\\xA2\\x0A\\x21\\x95\\xCE\\x97\\xA9\\x25\\xF1\\xDB\\xA3\\xE7\\x44\\x9A\\x56\\xF8\\x22\\x01\\xEC\\x60\\x73\\\n\n \\x11\\xAC\\x26\\x96\\xB1\\xAB\\x5E\\xA2\\x35\\x2D\\xF1\\x42\\x3B\\xDE\\x7B\\xD4\\xBB\\x78\\xC9\\xAE\\xD1\\xA8\\x53\\xC7\\x86\\x72\\xF9\\xEB\\x23\\xBB\\xE1\\x94\";\n\n let mut buf = vec![0; output.len()];\n\n let mut hasher = TupleHash::new_tuplehash256(s0);\n\n hasher.update(&[&te3[..], &te6[..]]);\n\n hasher.finalize(&mut buf);\n\n assert_eq!(buf, &output[..]);\n\n\n\n\n\n let output = b\"\\x14\\x7C\\x21\\x91\\xD5\\xED\\x7E\\xFD\\x98\\xDB\\xD9\\x6D\\x7A\\xB5\\xA1\\x16\\x92\\x57\\x6F\\x5F\\xE2\\xA5\\x06\\x5F\\x3E\\x33\\xDE\\x6B\\xBA\\x9F\\x3A\\xA1\\\n\n \\xC4\\xE9\\xA0\\x68\\xA2\\x89\\xC6\\x1C\\x95\\xAA\\xB3\\x0A\\xEE\\x1E\\x41\\x0B\\x0B\\x60\\x7D\\xE3\\x62\\x0E\\x24\\xA4\\xE3\\xBF\\x98\\x52\\xA1\\xD4\\x36\\x7E\";\n\n let mut buf = vec![0; output.len()];\n", "file_path": "tests/tuplehash.rs", "rank": 13, "score": 22180.56028736286 }, { "content": "#[test]\n\nfn test_kmac128() {\n\n let key = b\"\\x40\\x41\\x42\\x43\\x44\\x45\\x46\\x47\\x48\\x49\\x4A\\x4B\\x4C\\x4D\\x4E\\x4F\\x50\\x51\\x52\\x53\\x54\\x55\\x56\\x57\\x58\\x59\\x5A\\x5B\\x5C\\x5D\\x5E\\x5F\";\n\n let data = b\"\\x00\\x01\\x02\\x03\";\n\n let custom = b\"\";\n\n let output = b\"\\xE5\\x78\\x0B\\x0D\\x3E\\xA6\\xF7\\xD3\\xA4\\x29\\xC5\\x70\\x6A\\xA4\\x3A\\x00\\xFA\\xDB\\xD7\\xD4\\x96\\x28\\x83\\x9E\\x31\\x87\\x24\\x3F\\x45\\x6E\\xE1\\x4E\";\n\n\n\n let mut buf = vec![0; output.len()];\n\n let mut kmac = KMac::new_kmac128(key, custom);\n\n kmac.update(data);\n\n kmac.finalize(&mut buf);\n\n assert_eq!(buf, output);\n\n\n\n\n\n let key = b\"\\x40\\x41\\x42\\x43\\x44\\x45\\x46\\x47\\x48\\x49\\x4A\\x4B\\x4C\\x4D\\x4E\\x4F\\x50\\x51\\x52\\x53\\x54\\x55\\x56\\x57\\x58\\x59\\x5A\\x5B\\x5C\\x5D\\x5E\\x5F\";\n\n let data = b\"\\x00\\x01\\x02\\x03\";\n\n let custom = b\"My Tagged Application\";\n\n let output = b\"\\x3B\\x1F\\xBA\\x96\\x3C\\xD8\\xB0\\xB5\\x9E\\x8C\\x1A\\x6D\\x71\\x88\\x8B\\x71\\x43\\x65\\x1A\\xF8\\xBA\\x0A\\x70\\x70\\xC0\\x97\\x9E\\x28\\x11\\x32\\x4A\\xA5\";\n\n\n\n let mut buf = vec![0; output.len()];\n\n let mut kmac = KMac::new_kmac128(key, custom);\n", "file_path": "tests/kmac.rs", "rank": 14, "score": 22180.56028736286 }, { "content": " #[inline]\n\n pub fn new_cshake256(name: &[u8], custom: &[u8]) -> Self {\n\n let mut cshake = CShake(Keccak::new(136, 0x04));\n\n cshake.init(name, custom, 136);\n\n cshake\n\n }\n\n\n\n fn init(&mut self, name: &[u8], custom: &[u8], rate: usize) {\n\n let mut encbuf = [0; 9];\n\n\n\n let pos = left_encode(&mut encbuf, rate as u64);\n\n self.0.absorb(&encbuf[pos..]); // left_encode(rate)\n\n\n\n let pos = left_encode(&mut encbuf, name.len() as u64 * 8);\n\n self.0.absorb(&encbuf[pos..]); // left_encode(len(N))\n\n self.0.absorb(name);\n\n\n\n let pos = left_encode(&mut encbuf, custom.len() as u64 * 8);\n\n self.0.absorb(&encbuf[pos..]); // left_encode(len(S))\n\n self.0.absorb(custom);\n", "file_path": "src/cshake.rs", "rank": 15, "score": 22057.728333239837 }, { "content": "use tiny_keccak::{ Keccak, XofReader };\n\nuse ::utils::left_encode;\n\n\n\n\n\n/// The customizable SHAKE function.\n\n///\n\n/// The two variants of `cSHAKE`—`cSHAKE128` and `cSHAKE256`—are defined in terms of the\n\n/// `SHAKE` and `KECCAK[c]` functions specified in FIPS 202. `cSHAKE128` provides a 128-bit\n\n/// security strength, while `cSHAKE256` provides a 256-bit security strength.\n\n#[derive(Clone)]\n\npub struct CShake(pub(crate) Keccak);\n\n\n\nimpl CShake {\n\n #[inline]\n\n pub fn new_cshake128(name: &[u8], custom: &[u8]) -> Self {\n\n let mut cshake = CShake(Keccak::new(168, 0x04));\n\n cshake.init(name, custom, 168);\n\n cshake\n\n }\n\n\n", "file_path": "src/cshake.rs", "rank": 16, "score": 22051.617658205752 }, { "content": "\n\n self.0.fill_block(); // pad zero\n\n }\n\n\n\n #[inline]\n\n pub fn update(&mut self, buf: &[u8]) {\n\n self.0.absorb(buf)\n\n }\n\n\n\n #[inline]\n\n pub fn finalize(&mut self, buf: &mut [u8]) {\n\n self.0.pad();\n\n self.0.keccakf();\n\n self.0.squeeze(buf);\n\n }\n\n\n\n #[inline]\n\n pub fn xof(self) -> XofReader {\n\n self.0.xof()\n\n }\n\n}\n", "file_path": "src/cshake.rs", "rank": 17, "score": 22040.649261443534 }, { "content": " let name = b\"\";\n\n let custom = b\"Email Signature\";\n\n let output = b\"\\xC5\\x22\\x1D\\x50\\xE4\\xF8\\x22\\xD9\\x6A\\x2E\\x88\\x81\\xA9\\x61\\x42\\x0F\\x29\\x4B\\x7B\\x24\\xFE\\x3D\\x20\\x94\\xBA\\xED\\x2C\\x65\\x24\\xCC\\x16\\x6B\";\n\n\n\n let mut buf = vec![0; output.len()];\n\n let mut cshake = CShake::new_cshake128(name, custom);\n\n cshake.update(input);\n\n cshake.finalize(&mut buf);\n\n assert_eq!(buf, output);\n\n}\n\n\n", "file_path": "tests/cshake.rs", "rank": 18, "score": 22038.489376784037 }, { "content": " \\xC0\\xC1\\xC2\\xC3\\xC4\\xC5\\xC6\\xC7\";\n\n let name = b\"\";\n\n let custom = b\"Email Signature\";\n\n let output = b\"\\x07\\xDC\\x27\\xB1\\x1E\\x51\\xFB\\xAC\\x75\\xBC\\x7B\\x3C\\x1D\\x98\\x3E\\x8B\\x4B\\x85\\xFB\\x1D\\xEF\\xAF\\x21\\x89\\x12\\xAC\\x86\\x43\\x02\\x73\\x09\\x17\\\n\n \\x27\\xF4\\x2B\\x17\\xED\\x1D\\xF6\\x3E\\x8E\\xC1\\x18\\xF0\\x4B\\x23\\x63\\x3C\\x1D\\xFB\\x15\\x74\\xC8\\xFB\\x55\\xCB\\x45\\xDA\\x8E\\x25\\xAF\\xB0\\x92\\xBB\";\n\n\n\n let mut buf = vec![0; output.len()];\n\n let mut cshake = CShake::new_cshake256(name, custom);\n\n cshake.update(input);\n\n cshake.finalize(&mut buf);\n\n assert_eq!(buf, &output[..]);\n\n}\n", "file_path": "tests/cshake.rs", "rank": 19, "score": 22036.204524207253 }, { "content": "extern crate sp800_185;\n\n\n\nuse sp800_185::CShake;\n\n\n\n\n\n#[test]\n", "file_path": "tests/cshake.rs", "rank": 20, "score": 22031.5105575869 }, { "content": " assert_eq!(buf, output);\n\n\n\n\n\n\n\n let output = b\"\\xBA\\x8D\\xC1\\xD1\\xD9\\x79\\x33\\x1D\\x3F\\x81\\x36\\x03\\xC6\\x7F\\x72\\x60\\x9A\\xB5\\xE4\\x4B\\x94\\xA0\\xB8\\xF9\\xAF\\x46\\x51\\x44\\x54\\xA2\\xB4\\xF5\";\n\n let mut buf = vec![0; output.len()];\n\n let mut hasher = ParallelHash::new_parallelhash128(s0, 8);\n\n hasher.update(&x192[..13]);\n\n hasher.update(&x192[13..]);\n\n hasher.finalize(&mut buf);\n\n assert_eq!(buf, output);\n\n}\n\n\n", "file_path": "tests/parallelhash.rs", "rank": 28, "score": 21332.289434375838 }, { "content": " assert_eq!(buf, &output[..]);\n\n\n\n\n\n let output = b\"\\xBC\\x1E\\xF1\\x24\\xDA\\x34\\x49\\x5E\\x94\\x8E\\xAD\\x20\\x7D\\xD9\\x84\\x22\\x35\\xDA\\x43\\x2D\\x2B\\xBC\\x54\\xB4\\xC1\\x10\\xE6\\x4C\\x45\\x11\\x05\\x53\\\n\n \\x1B\\x7F\\x2A\\x3E\\x0C\\xE0\\x55\\xC0\\x28\\x05\\xE7\\xC2\\xDE\\x1F\\xB7\\x46\\xAF\\x97\\xA1\\xDD\\x01\\xF4\\x3B\\x82\\x4E\\x31\\xB8\\x76\\x12\\x41\\x04\\x29\";\n\n let mut buf = vec![0; output.len()];\n\n let mut hasher = ParallelHash::new_parallelhash256(s0, 8);\n\n hasher.update(&x192[..13]);\n\n hasher.update(&x192[13..]);\n\n hasher.finalize(&mut buf);\n\n assert_eq!(buf, &output[..]);\n\n}\n\n\n", "file_path": "tests/parallelhash.rs", "rank": 29, "score": 21331.014940483623 }, { "content": "#![cfg(feature = \"parallelhash\")]\n\n\n\nextern crate sp800_185;\n\n\n\nuse sp800_185::ParallelHash;\n\n\n\n\n\n#[test]\n", "file_path": "tests/parallelhash.rs", "rank": 30, "score": 21328.435596590243 }, { "content": "#[test]\n\nfn test_kmac256_xof() {\n\n let key = b\"\\x40\\x41\\x42\\x43\\x44\\x45\\x46\\x47\\x48\\x49\\x4A\\x4B\\x4C\\x4D\\x4E\\x4F\\x50\\x51\\x52\\x53\\x54\\x55\\x56\\x57\\x58\\x59\\x5A\\x5B\\x5C\\x5D\\x5E\\x5F\";\n\n let data = b\"\\x00\\x01\\x02\\x03\";\n\n let custom = b\"My Tagged Application\";\n\n let output = b\"\\x17\\x55\\x13\\x3F\\x15\\x34\\x75\\x2A\\xAD\\x07\\x48\\xF2\\xC7\\x06\\xFB\\x5C\\x78\\x45\\x12\\xCA\\xB8\\x35\\xCD\\x15\\x67\\x6B\\x16\\xC0\\xC6\\x64\\x7F\\xA9\\\n\n \\x6F\\xAA\\x7A\\xF6\\x34\\xA0\\xBF\\x8F\\xF6\\xDF\\x39\\x37\\x4F\\xA0\\x0F\\xAD\\x9A\\x39\\xE3\\x22\\xA7\\xC9\\x20\\x65\\xA6\\x4E\\xB1\\xFB\\x08\\x01\\xEB\\x2B\";\n\n\n\n let mut buf = vec![0; output.len()];\n\n let mut kmac = KMac::new_kmac256(key, custom);\n\n kmac.update(data);\n\n let mut xof = kmac.xof();\n\n xof.squeeze(&mut buf);\n\n assert_eq!(buf, &output[..]);\n\n\n\n\n\n let key = b\"\\x40\\x41\\x42\\x43\\x44\\x45\\x46\\x47\\x48\\x49\\x4A\\x4B\\x4C\\x4D\\x4E\\x4F\\x50\\x51\\x52\\x53\\x54\\x55\\x56\\x57\\x58\\x59\\x5A\\x5B\\x5C\\x5D\\x5E\\x5F\";\n\n let data = b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0A\\x0B\\x0C\\x0D\\x0E\\x0F\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1A\\x1B\\x1C\\x1D\\x1E\\x1F\\\n\n \\x20\\x21\\x22\\x23\\x24\\x25\\x26\\x27\\x28\\x29\\x2A\\x2B\\x2C\\x2D\\x2E\\x2F\\x30\\x31\\x32\\x33\\x34\\x35\\x36\\x37\\x38\\x39\\x3A\\x3B\\x3C\\x3D\\x3E\\x3F\\\n\n \\x40\\x41\\x42\\x43\\x44\\x45\\x46\\x47\\x48\\x49\\x4A\\x4B\\x4C\\x4D\\x4E\\x4F\\x50\\x51\\x52\\x53\\x54\\x55\\x56\\x57\\x58\\x59\\x5A\\x5B\\x5C\\x5D\\x5E\\x5F\\\n\n \\x60\\x61\\x62\\x63\\x64\\x65\\x66\\x67\\x68\\x69\\x6A\\x6B\\x6C\\x6D\\x6E\\x6F\\x70\\x71\\x72\\x73\\x74\\x75\\x76\\x77\\x78\\x79\\x7A\\x7B\\x7C\\x7D\\x7E\\x7F\\\n", "file_path": "tests/kmac.rs", "rank": 31, "score": 21146.533344112144 }, { "content": "#[test]\n\nfn test_tuplehash128_xof() {\n\n let te3 = b\"\\x00\\x01\\x02\";\n\n let te6 = b\"\\x10\\x11\\x12\\x13\\x14\\x15\";\n\n let te9 = b\"\\x20\\x21\\x22\\x23\\x24\\x25\\x26\\x27\\x28\";\n\n let s0 = b\"\";\n\n let s1 = b\"My Tuple App\";\n\n\n\n\n\n let output = b\"\\x2F\\x10\\x3C\\xD7\\xC3\\x23\\x20\\x35\\x34\\x95\\xC6\\x8D\\xE1\\xA8\\x12\\x92\\x45\\xC6\\x32\\x5F\\x6F\\x2A\\x3D\\x60\\x8D\\x92\\x17\\x9C\\x96\\xE6\\x84\\x88\";\n\n let mut buf = vec![0; output.len()];\n\n let mut hasher = TupleHash::new_tuplehash128(s0);\n\n hasher.update(&[&te3[..], &te6[..]]);\n\n let mut xof = hasher.xof();\n\n xof.squeeze(&mut buf);\n\n assert_eq!(buf, output);\n\n\n\n\n\n let output = b\"\\x3F\\xC8\\xAD\\x69\\x45\\x31\\x28\\x29\\x28\\x59\\xA1\\x8B\\x6C\\x67\\xD7\\xAD\\x85\\xF0\\x1B\\x32\\x81\\x5E\\x22\\xCE\\x83\\x9C\\x49\\xEC\\x37\\x4E\\x9B\\x9A\";\n\n let mut buf = vec![0; output.len()];\n\n let mut hasher = TupleHash::new_tuplehash128(s1);\n", "file_path": "tests/tuplehash.rs", "rank": 32, "score": 21146.533344112144 }, { "content": "#[test]\n\nfn test_kmac128_xof() {\n\n let key = b\"\\x40\\x41\\x42\\x43\\x44\\x45\\x46\\x47\\x48\\x49\\x4A\\x4B\\x4C\\x4D\\x4E\\x4F\\x50\\x51\\x52\\x53\\x54\\x55\\x56\\x57\\x58\\x59\\x5A\\x5B\\x5C\\x5D\\x5E\\x5F\";\n\n let data = b\"\\x00\\x01\\x02\\x03\";\n\n let custom = b\"\";\n\n let output = b\"\\xCD\\x83\\x74\\x0B\\xBD\\x92\\xCC\\xC8\\xCF\\x03\\x2B\\x14\\x81\\xA0\\xF4\\x46\\x0E\\x7C\\xA9\\xDD\\x12\\xB0\\x8A\\x0C\\x40\\x31\\x17\\x8B\\xAC\\xD6\\xEC\\x35\";\n\n\n\n let mut buf = vec![0; output.len()];\n\n let mut kmac = KMac::new_kmac128(key, custom);\n\n kmac.update(data);\n\n let mut xof = kmac.xof();\n\n xof.squeeze(&mut buf);\n\n assert_eq!(buf, output);\n\n\n\n\n\n let key = b\"\\x40\\x41\\x42\\x43\\x44\\x45\\x46\\x47\\x48\\x49\\x4A\\x4B\\x4C\\x4D\\x4E\\x4F\\x50\\x51\\x52\\x53\\x54\\x55\\x56\\x57\\x58\\x59\\x5A\\x5B\\x5C\\x5D\\x5E\\x5F\";\n\n let data = b\"\\x00\\x01\\x02\\x03\";\n\n let custom = b\"My Tagged Application\";\n\n let output = b\"\\x31\\xA4\\x45\\x27\\xB4\\xED\\x9F\\x5C\\x61\\x01\\xD1\\x1D\\xE6\\xD2\\x6F\\x06\\x20\\xAA\\x5C\\x34\\x1D\\xEF\\x41\\x29\\x96\\x57\\xFE\\x9D\\xF1\\xA3\\xB1\\x6C\";\n\n\n\n let mut buf = vec![0; output.len()];\n", "file_path": "tests/kmac.rs", "rank": 33, "score": 21146.533344112144 }, { "content": "#[test]\n\nfn test_tuplehash256_xof() {\n\n let te3 = b\"\\x00\\x01\\x02\";\n\n let te6 = b\"\\x10\\x11\\x12\\x13\\x14\\x15\";\n\n let te9 = b\"\\x20\\x21\\x22\\x23\\x24\\x25\\x26\\x27\\x28\";\n\n let s0 = b\"\";\n\n let s1 = b\"My Tuple App\";\n\n\n\n\n\n let output = b\"\\x03\\xDE\\xD4\\x61\\x0E\\xD6\\x45\\x0A\\x1E\\x3F\\x8B\\xC4\\x49\\x51\\xD1\\x4F\\xBC\\x38\\x4A\\xB0\\xEF\\xE5\\x7B\\x00\\x0D\\xF6\\xB6\\xDF\\x5A\\xAE\\x7C\\xD5\\\n\n \\x68\\xE7\\x73\\x77\\xDA\\xF1\\x3F\\x37\\xEC\\x75\\xCF\\x5F\\xC5\\x98\\xB6\\x84\\x1D\\x51\\xDD\\x20\\x7C\\x99\\x1C\\xD4\\x5D\\x21\\x0B\\xA6\\x0A\\xC5\\x2E\\xB9\";\n\n let mut buf = vec![0; output.len()];\n\n let mut hasher = TupleHash::new_tuplehash256(s0);\n\n hasher.update(&[&te3[..], &te6[..]]);\n\n let mut xof = hasher.xof();\n\n xof.squeeze(&mut buf);\n\n assert_eq!(buf, &output[..]);\n\n\n\n\n\n let output = b\"\\x64\\x83\\xCB\\x3C\\x99\\x52\\xEB\\x20\\xE8\\x30\\xAF\\x47\\x85\\x85\\x1F\\xC5\\x97\\xEE\\x3B\\xF9\\x3B\\xB7\\x60\\x2C\\x0E\\xF6\\xA6\\x5D\\x74\\x1A\\xEC\\xA7\\\n\n \\xE6\\x3C\\x3B\\x12\\x89\\x81\\xAA\\x05\\xC6\\xD2\\x74\\x38\\xC7\\x9D\\x27\\x54\\xBB\\x1B\\x71\\x91\\xF1\\x25\\xD6\\x62\\x0F\\xCA\\x12\\xCE\\x65\\x8B\\x24\\x42\";\n", "file_path": "tests/tuplehash.rs", "rank": 34, "score": 21146.533344112144 }, { "content": "pub struct TupleHash(CShake);\n\n\n\nimpl TupleHash {\n\n #[inline]\n\n pub fn new_tuplehash128(custom: &[u8]) -> Self {\n\n TupleHash(CShake::new_cshake128(b\"TupleHash\", custom))\n\n }\n\n\n\n #[inline]\n\n pub fn new_tuplehash256(custom: &[u8]) -> Self {\n\n TupleHash(CShake::new_cshake256(b\"TupleHash\", custom))\n\n }\n\n\n\n pub fn update<T: AsRef<[u8]>>(&mut self, input: &[T]) {\n\n let mut encbuf = [0; 9];\n\n\n\n for buf in input {\n\n let buf = buf.as_ref();\n\n // encode_string(X[i])\n\n let pos = left_encode(&mut encbuf, buf.len() as u64 * 8);\n", "file_path": "src/tuplehash.rs", "rank": 35, "score": 26.74554575854358 }, { "content": " let mut kmac = KMac(CShake::new_cshake128(b\"KMAC\", custom));\n\n kmac.init(key, 168);\n\n kmac\n\n }\n\n\n\n #[inline]\n\n pub fn new_kmac256(key: &[u8], custom: &[u8]) -> Self {\n\n let mut kmac = KMac(CShake::new_cshake256(b\"KMAC\", custom));\n\n kmac.init(key, 136);\n\n kmac\n\n }\n\n\n\n fn init(&mut self, key: &[u8], rate: usize) {\n\n let mut encbuf = [0; 9];\n\n\n\n // bytepad(encode_string(k))\n\n let pos = left_encode(&mut encbuf, rate as u64);\n\n self.0.update(&encbuf[pos..]);\n\n\n\n let pos = left_encode(&mut encbuf, key.len() as u64 * 8);\n", "file_path": "src/kmac.rs", "rank": 36, "score": 24.99075326363779 }, { "content": "use tiny_keccak::XofReader;\n\nuse ::cshake::CShake;\n\nuse ::utils::{ left_encode, right_encode };\n\n\n\n\n\n/// KECCAK Message Authentication Code.\n\n///\n\n/// The KECCAK Message Authentication Code (KMAC) algorithm is a PRF and keyed hash\n\n/// function based on `KECCAK`. It provides variable-length output, and unlike `SHAKE` and `cSHAKE`,\n\n/// altering the requested output length generates a new, unrelated output. `KMAC` has two variants,\n\n/// `KMAC128` and `KMAC256`, built from `cSHAKE128` and `cSHAKE256`, respectively. The two\n\n/// variants differ somewhat in their technical security properties. Nonetheless, for most\n\n/// applications, both variants can support any security strength up to 256 bits of security, provided\n\n/// that a long enough key is used.\n\n#[derive(Clone)]\n\npub struct KMac(CShake);\n\n\n\nimpl KMac {\n\n #[inline]\n\n pub fn new_kmac128(key: &[u8], custom: &[u8]) -> Self {\n", "file_path": "src/kmac.rs", "rank": 37, "score": 19.24539279420515 }, { "content": " self.0.update(&encbuf[pos..]);\n\n self.0.update(buf);\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn finalize(mut self, buf: &mut [u8]) {\n\n self.with_bitlength(buf.len() as u64 * 8);\n\n self.0.finalize(buf)\n\n }\n\n\n\n /// A function on bit strings in which the output can be extended to any desired length.\n\n ///\n\n /// Some applications of `TupleHash` may not know the number of output bits they will need until\n\n /// after the outputs begin to be produced. For these applications, `TupleHash` can also be used as a\n\n /// XOF (i.e., the output can be extended to any desired length), which mimics the behavior of\n\n /// cSHAKE.\n\n #[inline]\n\n pub fn xof(mut self) -> XofReader {\n\n self.with_bitlength(0);\n", "file_path": "src/tuplehash.rs", "rank": 38, "score": 18.935540543376028 }, { "content": " self.0.update(&encbuf[pos..]);\n\n self.0.update(key);\n\n\n\n (self.0).0.fill_block();\n\n }\n\n\n\n #[inline]\n\n pub fn update(&mut self, buf: &[u8]) {\n\n self.0.update(buf)\n\n }\n\n\n\n #[inline]\n\n pub fn finalize(mut self, buf: &mut [u8]) {\n\n self.with_bitlength(buf.len() as u64 * 8);\n\n self.0.finalize(buf);\n\n }\n\n\n\n /// A function on bit strings in which the output can be extended to any desired length.\n\n ///\n\n /// Some applications of `KMAC` may not know the number of output bits they will need until after\n", "file_path": "src/kmac.rs", "rank": 39, "score": 18.16718650435856 }, { "content": "//! SHA-3 Derived Functions (SP800-185) Implementation in Rust.\n\n\n\n\n\n#![cfg_attr(not(feature = \"parallelhash\"), no_std)]\n\n\n\nextern crate byteorder;\n\nextern crate tiny_keccak;\n\n#[cfg(feature = \"parallelhash\")] extern crate rayon;\n\n\n\npub mod utils;\n\nmod cshake;\n\nmod kmac;\n\nmod tuplehash;\n\n\n\npub use cshake::CShake;\n\npub use kmac::KMac;\n\npub use tuplehash::TupleHash;\n\n\n\n#[cfg(feature = \"parallelhash\")] mod parallelhash;\n\n#[cfg(feature = \"parallelhash\")] pub use parallelhash::ParallelHash;\n", "file_path": "src/lib.rs", "rank": 40, "score": 16.225008675221275 }, { "content": " /// the outputs begin to be produced. For these applications, `KMAC` can also be used as a XOF (i.e.,\n\n /// the output can be extended to any desired length), which mimics the behavior of `cSHAKE`.\n\n #[inline]\n\n pub fn xof(mut self) -> XofReader {\n\n self.with_bitlength(0);\n\n self.0.xof()\n\n }\n\n\n\n #[inline]\n\n fn with_bitlength(&mut self, bitlength: u64) {\n\n let mut encbuf = [0; 9];\n\n\n\n // right_encode(L)\n\n let pos = right_encode(&mut encbuf, bitlength);\n\n self.0.update(&encbuf[pos..]);\n\n }\n\n}\n", "file_path": "src/kmac.rs", "rank": 41, "score": 15.266350644749355 }, { "content": " self.0.xof()\n\n }\n\n\n\n #[inline]\n\n fn with_bitlength(&mut self, bitlength: u64) {\n\n let mut encbuf = [0; 9];\n\n\n\n // right_encode(L)\n\n let pos = right_encode(&mut encbuf, bitlength);\n\n self.0.update(&encbuf[pos..]);\n\n }\n\n}\n", "file_path": "src/tuplehash.rs", "rank": 42, "score": 10.610967799832997 }, { "content": " hasher.update(&[&te3[..], &te6[..]]);\n\n let mut xof = hasher.xof();\n\n xof.squeeze(&mut buf);\n\n assert_eq!(buf, output);\n\n\n\n\n\n let output = b\"\\x90\\x0F\\xE1\\x6C\\xAD\\x09\\x8D\\x28\\xE7\\x4D\\x63\\x2E\\xD8\\x52\\xF9\\x9D\\xAA\\xB7\\xF7\\xDF\\x4D\\x99\\xE7\\x75\\x65\\x78\\x85\\xB4\\xBF\\x76\\xD6\\xF8\";\n\n let mut buf = vec![0; output.len()];\n\n let mut hasher = TupleHash::new_tuplehash128(s1);\n\n hasher.update(&[&te3[..], &te6[..], &te9[..]]);\n\n let mut xof = hasher.xof();\n\n xof.squeeze(&mut buf);\n\n assert_eq!(buf, output);\n\n}\n\n\n", "file_path": "tests/tuplehash.rs", "rank": 43, "score": 9.358152289763975 }, { "content": " hasher.finalize(&mut buf);\n\n assert_eq!(buf, output);\n\n\n\n\n\n let output = b\"\\xE6\\x0F\\x20\\x2C\\x89\\xA2\\x63\\x1E\\xDA\\x8D\\x4C\\x58\\x8C\\xA5\\xFD\\x07\\xF3\\x9E\\x51\\x51\\x99\\x8D\\xEC\\xCF\\x97\\x3A\\xDB\\x38\\x04\\xBB\\x6E\\x84\";\n\n let mut buf = vec![0; output.len()];\n\n let mut hasher = TupleHash::new_tuplehash128(s1);\n\n hasher.update(&[&te3[..], &te6[..], &te9[..]]);\n\n hasher.finalize(&mut buf);\n\n assert_eq!(buf, output);\n\n}\n\n\n", "file_path": "tests/tuplehash.rs", "rank": 44, "score": 9.221846520276419 }, { "content": " let mut buf = vec![0; output.len()];\n\n let mut hasher = TupleHash::new_tuplehash256(s1);\n\n hasher.update(&[&te3[..], &te6[..]]);\n\n let mut xof = hasher.xof();\n\n xof.squeeze(&mut buf);\n\n assert_eq!(buf, &output[..]);\n\n\n\n\n\n let output = b\"\\x0C\\x59\\xB1\\x14\\x64\\xF2\\x33\\x6C\\x34\\x66\\x3E\\xD5\\x1B\\x2B\\x95\\x0B\\xEC\\x74\\x36\\x10\\x85\\x6F\\x36\\xC2\\x8D\\x1D\\x08\\x8D\\x8A\\x24\\x46\\x28\\\n\n \\x4D\\xD0\\x98\\x30\\xA6\\xA1\\x78\\xDC\\x75\\x23\\x76\\x19\\x9F\\xAE\\x93\\x5D\\x86\\xCF\\xDE\\xE5\\x91\\x3D\\x49\\x22\\xDF\\xD3\\x69\\xB6\\x6A\\x53\\xC8\\x97\";\n\n let mut buf = vec![0; output.len()];\n\n let mut hasher = TupleHash::new_tuplehash256(s1);\n\n hasher.update(&[&te3[..], &te6[..], &te9[..]]);\n\n let mut xof = hasher.xof();\n\n xof.squeeze(&mut buf);\n\n assert_eq!(buf, &output[..]);\n\n}\n", "file_path": "tests/tuplehash.rs", "rank": 45, "score": 9.18614321893069 }, { "content": " let mut hasher = TupleHash::new_tuplehash256(s1);\n\n hasher.update(&[&te3[..], &te6[..]]);\n\n hasher.finalize(&mut buf);\n\n assert_eq!(buf, &output[..]);\n\n\n\n\n\n let output = b\"\\x45\\x00\\x0B\\xE6\\x3F\\x9B\\x6B\\xFD\\x89\\xF5\\x47\\x17\\x67\\x0F\\x69\\xA9\\xBC\\x76\\x35\\x91\\xA4\\xF0\\x5C\\x50\\xD6\\x88\\x91\\xA7\\x44\\xBC\\xC6\\xE7\\\n\n \\xD6\\xD5\\xB5\\xE8\\x2C\\x01\\x8D\\xA9\\x99\\xED\\x35\\xB0\\xBB\\x49\\xC9\\x67\\x8E\\x52\\x6A\\xBD\\x8E\\x85\\xC1\\x3E\\xD2\\x54\\x02\\x1D\\xB9\\xE7\\x90\\xCE\";\n\n let mut buf = vec![0; output.len()];\n\n let mut hasher = TupleHash::new_tuplehash256(s1);\n\n hasher.update(&[&te3[..], &te6[..], &te9[..]]);\n\n hasher.finalize(&mut buf);\n\n assert_eq!(buf, &output[..]);\n\n}\n\n\n", "file_path": "tests/tuplehash.rs", "rank": 46, "score": 8.470389814759706 }, { "content": "# SP800-185\n\n[![travis-ci](https://api.travis-ci.org/quininer/sp800-185.svg)](https://travis-ci.org/quininer/sp800-185)\n\n[![crates](https://img.shields.io/crates/v/sp800-185.svg)](https://crates.io/crates/sp800-185)\n\n[![license](https://img.shields.io/github/license/quininer/sp800-185.svg)](https://github.com/quininer/sp800-185/blob/master/LICENSE)\n\n[![docs.rs](https://docs.rs/sp800-185/badge.svg)](https://docs.rs/sp800-185/)\n\n\n\nSHA-3 Derived Functions (SP800-185) Implementation in Rust.\n\n\n\n* [SHA-3 Derived Functions: cSHAKE, KMAC, TupleHash, and ParallelHash](https://www.nist.gov/publications/sha-3-derived-functions-cshake-kmac-tuplehash-and-parallelhash)\n\n* [KeccakCodePackage](https://github.com/gvanas/KeccakCodePackage)\n\n* [SHAKE, cSHAKE and some more bit ordering April 2017](https://cryptologie.net/article/388/shake-cshake-and-some-more-bit-ordering/)\n", "file_path": "README.md", "rank": 47, "score": 7.227989009834222 }, { "content": " \\xC0\\xC1\\xC2\\xC3\\xC4\\xC5\\xC6\\xC7\";\n\n let custom = b\"My Tagged Application\";\n\n let output = b\"\\xB5\\x86\\x18\\xF7\\x1F\\x92\\xE1\\xD5\\x6C\\x1B\\x8C\\x55\\xDD\\xD7\\xCD\\x18\\x8B\\x97\\xB4\\xCA\\x4D\\x99\\x83\\x1E\\xB2\\x69\\x9A\\x83\\x7D\\xA2\\xE4\\xD9\\\n\n \\x70\\xFB\\xAC\\xFD\\xE5\\x00\\x33\\xAE\\xA5\\x85\\xF1\\xA2\\x70\\x85\\x10\\xC3\\x2D\\x07\\x88\\x08\\x01\\xBD\\x18\\x28\\x98\\xFE\\x47\\x68\\x76\\xFC\\x89\\x65\";\n\n\n\n let mut buf = vec![0; output.len()];\n\n let mut kmac = KMac::new_kmac256(key, custom);\n\n kmac.update(data);\n\n kmac.finalize(&mut buf);\n\n assert_eq!(buf, &output[..]);\n\n}\n\n\n", "file_path": "tests/kmac.rs", "rank": 48, "score": 6.839945674259082 }, { "content": "use tiny_keccak::XofReader;\n\nuse ::cshake::CShake;\n\nuse ::utils::{ left_encode, right_encode };\n\n\n\n\n\n/// Tuple Hash.\n\n///\n\n/// `TupleHash` is a SHA-3-derived hash function with variable-length output that is designed to\n\n/// simply hash a tuple of input strings, any or all of which may be empty strings, in an\n\n/// unambiguous way. Such a tuple may consist of any number of strings, including zero, and is\n\n/// represented as a sequence of strings or variables in parentheses like (“a”, “b”, “c”,...,“z”) in this\n\n/// document.\n\n/// `TupleHash` is designed to provide a generic, misuse-resistant way to combine a sequence of\n\n/// strings for hashing such that, for example, a `TupleHash` computed on the tuple (\"abc\" ,\"d\") will\n\n/// produce a different hash value than a `TupleHash` computed on the tuple (\"ab\",\"cd\"), even though\n\n/// all the remaining input parameters are kept the same, and the two resulting concatenated strings,\n\n/// without string encoding, are identical.\n\n/// `TupleHash` supports two security strengths: 128 bits and 256 bits. Changing any input to the\n\n/// function, including the requested output length, will almost certainly change the final output.\n\n#[derive(Clone)]\n", "file_path": "src/tuplehash.rs", "rank": 49, "score": 6.593686207153567 }, { "content": " \\x80\\x81\\x82\\x83\\x84\\x85\\x86\\x87\\x88\\x89\\x8A\\x8B\\x8C\\x8D\\x8E\\x8F\\x90\\x91\\x92\\x93\\x94\\x95\\x96\\x97\\x98\\x99\\x9A\\x9B\\x9C\\x9D\\x9E\\x9F\\\n\n \\xA0\\xA1\\xA2\\xA3\\xA4\\xA5\\xA6\\xA7\\xA8\\xA9\\xAA\\xAB\\xAC\\xAD\\xAE\\xAF\\xB0\\xB1\\xB2\\xB3\\xB4\\xB5\\xB6\\xB7\\xB8\\xB9\\xBA\\xBB\\xBC\\xBD\\xBE\\xBF\\\n\n \\xC0\\xC1\\xC2\\xC3\\xC4\\xC5\\xC6\\xC7\";\n\n let custom = b\"My Tagged Application\";\n\n let output = b\"\\xD5\\xBE\\x73\\x1C\\x95\\x4E\\xD7\\x73\\x28\\x46\\xBB\\x59\\xDB\\xE3\\xA8\\xE3\\x0F\\x83\\xE7\\x7A\\x4B\\xFF\\x44\\x59\\xF2\\xF1\\xC2\\xB4\\xEC\\xEB\\xB8\\xCE\\\n\n \\x67\\xBA\\x01\\xC6\\x2E\\x8A\\xB8\\x57\\x8D\\x2D\\x49\\x9B\\xD1\\xBB\\x27\\x67\\x68\\x78\\x11\\x90\\x02\\x0A\\x30\\x6A\\x97\\xDE\\x28\\x1D\\xCC\\x30\\x30\\x5D\";\n\n\n\n let mut buf = vec![0; output.len()];\n\n let mut kmac = KMac::new_kmac256(key, custom);\n\n kmac.update(data);\n\n let mut xof = kmac.xof();\n\n xof.squeeze(&mut buf);\n\n assert_eq!(buf, &output[..]);\n\n}\n", "file_path": "tests/kmac.rs", "rank": 50, "score": 5.496995595263696 }, { "content": " let mut kmac = KMac::new_kmac128(key, custom);\n\n kmac.update(data);\n\n let mut xof = kmac.xof();\n\n xof.squeeze(&mut buf);\n\n assert_eq!(buf, output);\n\n\n\n\n\n let key = b\"\\x40\\x41\\x42\\x43\\x44\\x45\\x46\\x47\\x48\\x49\\x4A\\x4B\\x4C\\x4D\\x4E\\x4F\\x50\\x51\\x52\\x53\\x54\\x55\\x56\\x57\\x58\\x59\\x5A\\x5B\\x5C\\x5D\\x5E\\x5F\";\n\n let data = b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0A\\x0B\\x0C\\x0D\\x0E\\x0F\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1A\\x1B\\x1C\\x1D\\x1E\\x1F\\\n\n \\x20\\x21\\x22\\x23\\x24\\x25\\x26\\x27\\x28\\x29\\x2A\\x2B\\x2C\\x2D\\x2E\\x2F\\x30\\x31\\x32\\x33\\x34\\x35\\x36\\x37\\x38\\x39\\x3A\\x3B\\x3C\\x3D\\x3E\\x3F\\\n\n \\x40\\x41\\x42\\x43\\x44\\x45\\x46\\x47\\x48\\x49\\x4A\\x4B\\x4C\\x4D\\x4E\\x4F\\x50\\x51\\x52\\x53\\x54\\x55\\x56\\x57\\x58\\x59\\x5A\\x5B\\x5C\\x5D\\x5E\\x5F\\\n\n \\x60\\x61\\x62\\x63\\x64\\x65\\x66\\x67\\x68\\x69\\x6A\\x6B\\x6C\\x6D\\x6E\\x6F\\x70\\x71\\x72\\x73\\x74\\x75\\x76\\x77\\x78\\x79\\x7A\\x7B\\x7C\\x7D\\x7E\\x7F\\\n\n \\x80\\x81\\x82\\x83\\x84\\x85\\x86\\x87\\x88\\x89\\x8A\\x8B\\x8C\\x8D\\x8E\\x8F\\x90\\x91\\x92\\x93\\x94\\x95\\x96\\x97\\x98\\x99\\x9A\\x9B\\x9C\\x9D\\x9E\\x9F\\\n\n \\xA0\\xA1\\xA2\\xA3\\xA4\\xA5\\xA6\\xA7\\xA8\\xA9\\xAA\\xAB\\xAC\\xAD\\xAE\\xAF\\xB0\\xB1\\xB2\\xB3\\xB4\\xB5\\xB6\\xB7\\xB8\\xB9\\xBA\\xBB\\xBC\\xBD\\xBE\\xBF\\\n\n \\xC0\\xC1\\xC2\\xC3\\xC4\\xC5\\xC6\\xC7\";\n\n let custom = b\"My Tagged Application\";\n\n let output = b\"\\x47\\x02\\x6C\\x7C\\xD7\\x93\\x08\\x4A\\xA0\\x28\\x3C\\x25\\x3E\\xF6\\x58\\x49\\x0C\\x0D\\xB6\\x14\\x38\\xB8\\x32\\x6F\\xE9\\xBD\\xDF\\x28\\x1B\\x83\\xAE\\x0F\";\n\n\n\n let mut buf = vec![0; output.len()];\n\n let mut kmac = KMac::new_kmac128(key, custom);\n\n kmac.update(data);\n\n let mut xof = kmac.xof();\n\n xof.squeeze(&mut buf);\n\n assert_eq!(buf, output);\n\n}\n\n\n", "file_path": "tests/kmac.rs", "rank": 51, "score": 4.644992439597018 }, { "content": " kmac.update(data);\n\n kmac.finalize(&mut buf);\n\n assert_eq!(buf, output);\n\n\n\n\n\n let key = b\"\\x40\\x41\\x42\\x43\\x44\\x45\\x46\\x47\\x48\\x49\\x4A\\x4B\\x4C\\x4D\\x4E\\x4F\\x50\\x51\\x52\\x53\\x54\\x55\\x56\\x57\\x58\\x59\\x5A\\x5B\\x5C\\x5D\\x5E\\x5F\";\n\n let data = b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0A\\x0B\\x0C\\x0D\\x0E\\x0F\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1A\\x1B\\x1C\\x1D\\x1E\\x1F\\\n\n \\x20\\x21\\x22\\x23\\x24\\x25\\x26\\x27\\x28\\x29\\x2A\\x2B\\x2C\\x2D\\x2E\\x2F\\x30\\x31\\x32\\x33\\x34\\x35\\x36\\x37\\x38\\x39\\x3A\\x3B\\x3C\\x3D\\x3E\\x3F\\\n\n \\x40\\x41\\x42\\x43\\x44\\x45\\x46\\x47\\x48\\x49\\x4A\\x4B\\x4C\\x4D\\x4E\\x4F\\x50\\x51\\x52\\x53\\x54\\x55\\x56\\x57\\x58\\x59\\x5A\\x5B\\x5C\\x5D\\x5E\\x5F\\\n\n \\x60\\x61\\x62\\x63\\x64\\x65\\x66\\x67\\x68\\x69\\x6A\\x6B\\x6C\\x6D\\x6E\\x6F\\x70\\x71\\x72\\x73\\x74\\x75\\x76\\x77\\x78\\x79\\x7A\\x7B\\x7C\\x7D\\x7E\\x7F\\\n\n \\x80\\x81\\x82\\x83\\x84\\x85\\x86\\x87\\x88\\x89\\x8A\\x8B\\x8C\\x8D\\x8E\\x8F\\x90\\x91\\x92\\x93\\x94\\x95\\x96\\x97\\x98\\x99\\x9A\\x9B\\x9C\\x9D\\x9E\\x9F\\\n\n \\xA0\\xA1\\xA2\\xA3\\xA4\\xA5\\xA6\\xA7\\xA8\\xA9\\xAA\\xAB\\xAC\\xAD\\xAE\\xAF\\xB0\\xB1\\xB2\\xB3\\xB4\\xB5\\xB6\\xB7\\xB8\\xB9\\xBA\\xBB\\xBC\\xBD\\xBE\\xBF\\\n\n \\xC0\\xC1\\xC2\\xC3\\xC4\\xC5\\xC6\\xC7\";\n\n let custom = b\"My Tagged Application\";\n\n let output = b\"\\x1F\\x5B\\x4E\\x6C\\xCA\\x02\\x20\\x9E\\x0D\\xCB\\x5C\\xA6\\x35\\xB8\\x9A\\x15\\xE2\\x71\\xEC\\xC7\\x60\\x07\\x1D\\xFD\\x80\\x5F\\xAA\\x38\\xF9\\x72\\x92\\x30\";\n\n\n\n let mut buf = vec![0; output.len()];\n\n let mut kmac = KMac::new_kmac128(key, custom);\n\n kmac.update(data);\n\n kmac.finalize(&mut buf);\n\n assert_eq!(buf, output);\n\n}\n\n\n", "file_path": "tests/kmac.rs", "rank": 52, "score": 4.003659322185824 }, { "content": " \\x80\\x81\\x82\\x83\\x84\\x85\\x86\\x87\\x88\\x89\\x8A\\x8B\\x8C\\x8D\\x8E\\x8F\\x90\\x91\\x92\\x93\\x94\\x95\\x96\\x97\\x98\\x99\\x9A\\x9B\\x9C\\x9D\\x9E\\x9F\\\n\n \\xA0\\xA1\\xA2\\xA3\\xA4\\xA5\\xA6\\xA7\\xA8\\xA9\\xAA\\xAB\\xAC\\xAD\\xAE\\xAF\\xB0\\xB1\\xB2\\xB3\\xB4\\xB5\\xB6\\xB7\\xB8\\xB9\\xBA\\xBB\\xBC\\xBD\\xBE\\xBF\\\n\n \\xC0\\xC1\\xC2\\xC3\\xC4\\xC5\\xC6\\xC7\";\n\n let custom = b\"\";\n\n let output = b\"\\xFF\\x7B\\x17\\x1F\\x1E\\x8A\\x2B\\x24\\x68\\x3E\\xED\\x37\\x83\\x0E\\xE7\\x97\\x53\\x8B\\xA8\\xDC\\x56\\x3F\\x6D\\xA1\\xE6\\x67\\x39\\x1A\\x75\\xED\\xC0\\x2C\\\n\n \\xA6\\x33\\x07\\x9F\\x81\\xCE\\x12\\xA2\\x5F\\x45\\x61\\x5E\\xC8\\x99\\x72\\x03\\x1D\\x18\\x33\\x73\\x31\\xD2\\x4C\\xEB\\x8F\\x8C\\xA8\\xE6\\xA1\\x9F\\xD9\\x8B\";\n\n\n\n let mut buf = vec![0; output.len()];\n\n let mut kmac = KMac::new_kmac256(key, custom);\n\n kmac.update(data);\n\n let mut xof = kmac.xof();\n\n xof.squeeze(&mut buf);\n\n assert_eq!(buf, &output[..]);\n\n\n\n\n\n let key = b\"\\x40\\x41\\x42\\x43\\x44\\x45\\x46\\x47\\x48\\x49\\x4A\\x4B\\x4C\\x4D\\x4E\\x4F\\x50\\x51\\x52\\x53\\x54\\x55\\x56\\x57\\x58\\x59\\x5A\\x5B\\x5C\\x5D\\x5E\\x5F\";\n\n let data = b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0A\\x0B\\x0C\\x0D\\x0E\\x0F\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1A\\x1B\\x1C\\x1D\\x1E\\x1F\\\n\n \\x20\\x21\\x22\\x23\\x24\\x25\\x26\\x27\\x28\\x29\\x2A\\x2B\\x2C\\x2D\\x2E\\x2F\\x30\\x31\\x32\\x33\\x34\\x35\\x36\\x37\\x38\\x39\\x3A\\x3B\\x3C\\x3D\\x3E\\x3F\\\n\n \\x40\\x41\\x42\\x43\\x44\\x45\\x46\\x47\\x48\\x49\\x4A\\x4B\\x4C\\x4D\\x4E\\x4F\\x50\\x51\\x52\\x53\\x54\\x55\\x56\\x57\\x58\\x59\\x5A\\x5B\\x5C\\x5D\\x5E\\x5F\\\n\n \\x60\\x61\\x62\\x63\\x64\\x65\\x66\\x67\\x68\\x69\\x6A\\x6B\\x6C\\x6D\\x6E\\x6F\\x70\\x71\\x72\\x73\\x74\\x75\\x76\\x77\\x78\\x79\\x7A\\x7B\\x7C\\x7D\\x7E\\x7F\\\n", "file_path": "tests/kmac.rs", "rank": 53, "score": 3.3861916222556077 }, { "content": " \\xA0\\xA1\\xA2\\xA3\\xA4\\xA5\\xA6\\xA7\\xA8\\xA9\\xAA\\xAB\\xAC\\xAD\\xAE\\xAF\\xB0\\xB1\\xB2\\xB3\\xB4\\xB5\\xB6\\xB7\\xB8\\xB9\\xBA\\xBB\\xBC\\xBD\\xBE\\xBF\\\n\n \\xC0\\xC1\\xC2\\xC3\\xC4\\xC5\\xC6\\xC7\";\n\n let custom = b\"\";\n\n let output = b\"\\x75\\x35\\x8C\\xF3\\x9E\\x41\\x49\\x4E\\x94\\x97\\x07\\x92\\x7C\\xEE\\x0A\\xF2\\x0A\\x3F\\xF5\\x53\\x90\\x4C\\x86\\xB0\\x8F\\x21\\xCC\\x41\\x4B\\xCF\\xD6\\x91\\\n\n \\x58\\x9D\\x27\\xCF\\x5E\\x15\\x36\\x9C\\xBB\\xFF\\x8B\\x9A\\x4C\\x2E\\xB1\\x78\\x00\\x85\\x5D\\x02\\x35\\xFF\\x63\\x5D\\xA8\\x25\\x33\\xEC\\x6B\\x75\\x9B\\x69\";\n\n\n\n let mut buf = vec![0; output.len()];\n\n let mut kmac = KMac::new_kmac256(key, custom);\n\n kmac.update(data);\n\n kmac.finalize(&mut buf);\n\n assert_eq!(buf, &output[..]);\n\n\n\n\n\n let key = b\"\\x40\\x41\\x42\\x43\\x44\\x45\\x46\\x47\\x48\\x49\\x4A\\x4B\\x4C\\x4D\\x4E\\x4F\\x50\\x51\\x52\\x53\\x54\\x55\\x56\\x57\\x58\\x59\\x5A\\x5B\\x5C\\x5D\\x5E\\x5F\";\n\n let data = b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0A\\x0B\\x0C\\x0D\\x0E\\x0F\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1A\\x1B\\x1C\\x1D\\x1E\\x1F\\\n\n \\x20\\x21\\x22\\x23\\x24\\x25\\x26\\x27\\x28\\x29\\x2A\\x2B\\x2C\\x2D\\x2E\\x2F\\x30\\x31\\x32\\x33\\x34\\x35\\x36\\x37\\x38\\x39\\x3A\\x3B\\x3C\\x3D\\x3E\\x3F\\\n\n \\x40\\x41\\x42\\x43\\x44\\x45\\x46\\x47\\x48\\x49\\x4A\\x4B\\x4C\\x4D\\x4E\\x4F\\x50\\x51\\x52\\x53\\x54\\x55\\x56\\x57\\x58\\x59\\x5A\\x5B\\x5C\\x5D\\x5E\\x5F\\\n\n \\x60\\x61\\x62\\x63\\x64\\x65\\x66\\x67\\x68\\x69\\x6A\\x6B\\x6C\\x6D\\x6E\\x6F\\x70\\x71\\x72\\x73\\x74\\x75\\x76\\x77\\x78\\x79\\x7A\\x7B\\x7C\\x7D\\x7E\\x7F\\\n\n \\x80\\x81\\x82\\x83\\x84\\x85\\x86\\x87\\x88\\x89\\x8A\\x8B\\x8C\\x8D\\x8E\\x8F\\x90\\x91\\x92\\x93\\x94\\x95\\x96\\x97\\x98\\x99\\x9A\\x9B\\x9C\\x9D\\x9E\\x9F\\\n\n \\xA0\\xA1\\xA2\\xA3\\xA4\\xA5\\xA6\\xA7\\xA8\\xA9\\xAA\\xAB\\xAC\\xAD\\xAE\\xAF\\xB0\\xB1\\xB2\\xB3\\xB4\\xB5\\xB6\\xB7\\xB8\\xB9\\xBA\\xBB\\xBC\\xBD\\xBE\\xBF\\\n", "file_path": "tests/kmac.rs", "rank": 54, "score": 3.034385094376675 }, { "content": "extern crate sp800_185;\n\n\n\nuse sp800_185::TupleHash;\n\n\n\n\n\n#[test]\n", "file_path": "tests/tuplehash.rs", "rank": 55, "score": 2.653513228924179 }, { "content": "extern crate sp800_185;\n\n\n\nuse sp800_185::KMac;\n\n\n\n\n\n#[test]\n", "file_path": "tests/kmac.rs", "rank": 56, "score": 2.653513228924179 } ]
Rust
src/mesh/model.rs
OllieBerzs/tegne-rs
be614c6b5091ffe476ffbe44ef4ea5b62d998078
use std::collections::HashSet; use std::slice::Iter; use super::Mesh; use crate::math::Mat4; use crate::pipeline::Descriptor; use crate::pipeline::Material; use crate::resources::Handle; pub struct Model { pub nodes: Vec<ModelNode>, } #[derive(Clone)] pub struct ModelNode { pub meshes: Vec<Handle<Mesh>>, pub materials: Vec<Handle<Material>>, pub matrix: Mat4, pub children: Vec<Self>, } struct ChildIter<'a> { stack: Vec<Iter<'a, ModelNode>>, } impl Model { pub fn fix_color_space(&mut self) { let mut fixed = HashSet::new(); self.nodes .iter_mut() .for_each(|n| n.fix_color_space(&mut fixed)); } pub fn meshes(&self) -> impl Iterator<Item = &Handle<Mesh>> { self.nodes.iter().map(|node| node.meshes()).flatten() } pub fn materials(&self) -> impl Iterator<Item = &Handle<Material>> { self.nodes.iter().map(|node| node.materials()).flatten() } } impl ModelNode { pub(crate) fn orders(&self) -> impl Iterator<Item = (&Handle<Mesh>, &Handle<Material>)> { self.meshes.iter().zip(self.materials.iter()) } fn fix_color_space(&mut self, fixed: &mut HashSet<Descriptor>) { for mat in &mut self.materials { let mut m = mat.write(); if !fixed.contains(&m.descriptor()) { m.a[0] = to_linear(m.a[0]); m.a[1] = to_linear(m.a[1]); m.a[2] = to_linear(m.a[2]); fixed.insert(m.descriptor()); } } self.children .iter_mut() .for_each(|c| c.fix_color_space(fixed)); } fn meshes(&self) -> impl Iterator<Item = &Handle<Mesh>> { self.meshes .iter() .chain(self.child_iter().map(|node| node.meshes.iter()).flatten()) } fn materials(&self) -> impl Iterator<Item = &Handle<Material>> { self.materials.iter().chain( self.child_iter() .map(|node| node.materials.iter()) .flatten(), ) } fn child_iter(&self) -> ChildIter<'_> { ChildIter { stack: vec![self.children.iter()], } } } impl<'a> Iterator for ChildIter<'a> { type Item = &'a ModelNode; fn next(&mut self) -> Option<Self::Item> { loop { if let Some(mut top_iter) = self.stack.pop() { if let Some(node) = top_iter.next() { self.stack.push(top_iter); self.stack.push(node.children.iter()); return Some(&node); } } else { return None; } } } } fn to_linear(value: f32) -> f32 { let s = clamp(value, 0.0, 1.0); let cutoff = 0.04045; let gamma = 2.2; if s <= cutoff { s / 12.92 } else { ((s + 0.055) / 1.055).powf(gamma) } } fn clamp(value: f32, min: f32, max: f32) -> f32 { if value < min { min } else if value > max { max } else { value } }
use std::collections::HashSet; use std::slice::Iter; use super::Mesh; use crate::math::Mat4; use crate::pipeline::Descriptor; use crate::pipeline::Material; use crate::resources::Handle; pub struct Model { pub nodes: Vec<ModelNode>, } #[derive(Clone)] pub struct ModelNode { pub meshes: Vec<Handle<Mesh>>, pub materials: Vec<Handle<Material>>, pub matrix: Mat4, pub children: Vec<Self>, } struct ChildIter<'a> { stack: Vec<Iter<'a, ModelNode>>, } impl Model {
pub fn meshes(&self) -> impl Iterator<Item = &Handle<Mesh>> { self.nodes.iter().map(|node| node.meshes()).flatten() } pub fn materials(&self) -> impl Iterator<Item = &Handle<Material>> { self.nodes.iter().map(|node| node.materials()).flatten() } } impl ModelNode { pub(crate) fn orders(&self) -> impl Iterator<Item = (&Handle<Mesh>, &Handle<Material>)> { self.meshes.iter().zip(self.materials.iter()) } fn fix_color_space(&mut self, fixed: &mut HashSet<Descriptor>) { for mat in &mut self.materials { let mut m = mat.write(); if !fixed.contains(&m.descriptor()) { m.a[0] = to_linear(m.a[0]); m.a[1] = to_linear(m.a[1]); m.a[2] = to_linear(m.a[2]); fixed.insert(m.descriptor()); } } self.children .iter_mut() .for_each(|c| c.fix_color_space(fixed)); } fn meshes(&self) -> impl Iterator<Item = &Handle<Mesh>> { self.meshes .iter() .chain(self.child_iter().map(|node| node.meshes.iter()).flatten()) } fn materials(&self) -> impl Iterator<Item = &Handle<Material>> { self.materials.iter().chain( self.child_iter() .map(|node| node.materials.iter()) .flatten(), ) } fn child_iter(&self) -> ChildIter<'_> { ChildIter { stack: vec![self.children.iter()], } } } impl<'a> Iterator for ChildIter<'a> { type Item = &'a ModelNode; fn next(&mut self) -> Option<Self::Item> { loop { if let Some(mut top_iter) = self.stack.pop() { if let Some(node) = top_iter.next() { self.stack.push(top_iter); self.stack.push(node.children.iter()); return Some(&node); } } else { return None; } } } } fn to_linear(value: f32) -> f32 { let s = clamp(value, 0.0, 1.0); let cutoff = 0.04045; let gamma = 2.2; if s <= cutoff { s / 12.92 } else { ((s + 0.055) / 1.055).powf(gamma) } } fn clamp(value: f32, min: f32, max: f32) -> f32 { if value < min { min } else if value > max { max } else { value } }
pub fn fix_color_space(&mut self) { let mut fixed = HashSet::new(); self.nodes .iter_mut() .for_each(|n| n.fix_color_space(&mut fixed)); }
function_block-full_function
[ { "content": "struct Cache {\n\n shader: Option<Handle<Shader>>,\n\n material: Option<Handle<Material>>,\n\n font: Option<Handle<Font>>,\n\n\n\n // colors\n\n background: Rgb,\n\n fill: Rgb,\n\n stroke: Rgb,\n\n tint: Rgb,\n\n\n\n // shadows\n\n shadows: bool,\n\n\n\n // other\n\n matrix: Mat4,\n\n stroke_weight: f32,\n\n font_size: u32,\n\n shape_mode: ShapeMode,\n\n border_mode: BorderMode,\n", "file_path": "src/renderer/target.rs", "rank": 8, "score": 53117.41939910229 }, { "content": "struct Sphere {\n\n center: Vec3,\n\n radius: f32,\n\n}\n\n\n\nimpl ShadowRenderer {\n\n pub(crate) fn new(\n\n device: &Device,\n\n uniforms: &mut Uniforms,\n\n map_size: u32,\n\n target_count: u32,\n\n ) -> Result<Self> {\n\n let shader = Shader::from_spirv_bytes(\n\n device,\n\n uniforms,\n\n Msaa::Disabled,\n\n include_bytes!(\"../../shaders/shadow.spirv\"),\n\n )\n\n .expect(\"bad shader\");\n\n\n", "file_path": "src/renderer/shadow.rs", "rank": 9, "score": 53117.41939910229 }, { "content": "#[derive(Copy, Clone)]\n\nstruct Frame {\n\n offset: Vec2,\n\n size: Vec2,\n\n}\n\n\n", "file_path": "examples/2d-examples/run_animation.rs", "rank": 10, "score": 51223.537626112 }, { "content": "#[derive(Debug)]\n\nstruct Defines {\n\n values: HashMap<String, String>,\n\n}\n\n\n\npub(crate) fn compile(src: &str) -> Result<(Vec<u8>, Vec<u8>, [u8; 4])> {\n\n let defines = Defines::new(src);\n\n\n\n let bytes = [\n\n match defines.get(\"DEPTH\") {\n\n \"test\" => 0,\n\n \"write\" => 1,\n\n \"test_and_write\" => 2,\n\n \"disabled\" => 3,\n\n \"\" => {\n\n return Err(Error::InvalidGlsl(\n\n \"depth mode not set. set with '#define DEPTH <mode>'\".to_string(),\n\n ))\n\n }\n\n s => {\n\n return Err(Error::InvalidGlsl(format!(\n", "file_path": "src/features/glsl_compiler.rs", "rank": 11, "score": 51223.537626112 }, { "content": "struct TargetResources {\n\n world_descriptor: Descriptor,\n\n world_buffer: Buffer<ShaderWorld>,\n\n text_mesh: Mesh,\n\n line_mesh: Mesh,\n\n shape_mesh: Mesh,\n\n}\n\n\n\nimpl ForwardRenderer {\n\n pub(crate) fn new(\n\n device: &Device,\n\n uniforms: &mut Uniforms,\n\n shadow_map_size: u32,\n\n target_count: u32,\n\n ) -> Result<Self> {\n\n let shadow_renderer = ShadowRenderer::new(device, uniforms, shadow_map_size, target_count)?;\n\n let target_resources: Vec<_> = (0..target_count)\n\n .map(|_| TargetResources::new(device, uniforms))\n\n .collect::<Result<_>>()?;\n\n\n", "file_path": "src/renderer/forward.rs", "rank": 12, "score": 51223.537626112 }, { "content": "struct TargetResources {\n\n world_descriptors: [Descriptor; SHADOW_SPLIT_COUNT],\n\n world_buffers: [Buffer<ShaderWorld>; SHADOW_SPLIT_COUNT],\n\n shadow_descriptor: Descriptor,\n\n shadow_maps: [Canvas; SHADOW_SPLIT_COUNT],\n\n}\n\n\n", "file_path": "src/renderer/shadow.rs", "rank": 13, "score": 51223.537626112 }, { "content": "struct Animation {\n\n frames: Vec<Frame>,\n\n current: f32,\n\n speed: f32,\n\n}\n\n\n", "file_path": "examples/2d-examples/run_animation.rs", "rank": 14, "score": 51223.537626112 }, { "content": "/// Trait to get color values in-between two.\n\npub trait Mix {\n\n /// Calculate color between `from` and `to`\n\n /// at point `p` in range 0 to 1\n\n fn mix(from: Self, to: Self, p: f32) -> Self;\n\n}\n\n\n\npub(crate) fn mixf(from: f32, to: f32, p: f32) -> f32 {\n\n from + (to - from) * p\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::mixf;\n\n\n\n #[test]\n\n fn simple_mix() {\n\n assert_eq_delta!(mixf(0.0, 1.0, 0.5), 0.5);\n\n assert_eq_delta!(mixf(1.0, 1.0, 0.5), 1.0);\n\n assert_eq_delta!(mixf(0.0, 0.0, 0.5), 0.0);\n\n }\n\n}\n", "file_path": "src/color/mix.rs", "rank": 15, "score": 47396.12426236768 }, { "content": "// Utils\n\npub fn check(result: Result) {\n\n match result {\n\n SUCCESS | NOT_READY | TIMEOUT | EVENT_SET | EVENT_RESET | INCOMPLETE => (),\n\n ERROR_OUT_OF_HOST_MEMORY => panic!(\"out of host memory\"),\n\n ERROR_OUT_OF_DEVICE_MEMORY => panic!(\"out of device memory\"),\n\n ERROR_INITIALIZATION_FAILED => panic!(\"initialization failed\"),\n\n ERROR_DEVICE_LOST => panic!(\"device has been lost\"),\n\n ERROR_MEMORY_MAP_FAILED => panic!(\"memory mapping failed\"),\n\n ERROR_LAYER_NOT_PRESENT => panic!(\"layer not present\"),\n\n ERROR_EXTENSION_NOT_PRESENT => panic!(\"extension not present\"),\n\n ERROR_FEATURE_NOT_PRESENT => panic!(\"feature not present\"),\n\n ERROR_INCOMPATIBLE_DRIVER => panic!(\"requested Vulkan version is not supported\"),\n\n ERROR_TOO_MANY_OBJECTS => panic!(\"too many objects of type have been created\"),\n\n ERROR_FORMAT_NOT_SUPPORTED => panic!(\"requested format is not supported\"),\n\n ERROR_FRAGMENTED_POOL => panic!(\"pool allocation failed due to fragmentation\"),\n\n n => panic!(\"unknowned error has occured: {}\", n),\n\n }\n\n}\n", "file_path": "src/vk.rs", "rank": 16, "score": 41884.72680257005 }, { "content": " pub d: Vec4,\n\n /// parameter E\n\n pub e: Vec4,\n\n /// parameter F\n\n pub f: Vec4,\n\n /// parameter G\n\n pub g: Vec4,\n\n /// parameter H\n\n pub h: Vec4,\n\n /// texture storage for that are used\n\n /// in the material\n\n pub textures: Vec<Handle<Texture>>,\n\n\n\n descriptor: Descriptor,\n\n buffer: Buffer<ShaderMaterial>,\n\n}\n\n\n\nimpl Material {\n\n pub(crate) fn new(device: &Device, uniforms: &mut Uniforms) -> Result<Self> {\n\n let buffer = Buffer::dynamic(device, BufferUsage::Uniform, 1);\n", "file_path": "src/pipeline/material.rs", "rank": 17, "score": 40209.509493620986 }, { "content": "/// ```no_run\n\n/// # use duku::Duku;\n\n/// # let (mut duku, _) = Duku::windowed(1, 1).unwrap();\n\n/// let material = duku.create_material_pbr().unwrap();\n\n/// material.write().roughness(0.5);\n\n///\n\n/// # duku.draw(None, |t| {\n\n/// // when drawing\n\n/// t.material(&material);\n\n/// t.cube([1.0, 1.0, 1.0]);\n\n/// # });\n\n/// ```\n\npub struct Material {\n\n /// parameter A\n\n pub a: Vec4,\n\n /// parameter B\n\n pub b: Vec4,\n\n /// parameter C\n\n pub c: Vec4,\n\n /// parameter D\n", "file_path": "src/pipeline/material.rs", "rank": 18, "score": 40207.065119244886 }, { "content": " c: self.c,\n\n d: self.d,\n\n e: self.e,\n\n f: self.f,\n\n g: self.g,\n\n h: self.h,\n\n }]);\n\n }\n\n\n\n pub(crate) const fn descriptor(&self) -> Descriptor {\n\n self.descriptor\n\n }\n\n\n\n pub(crate) fn destroy(&self, device: &Device) {\n\n self.buffer.destroy(device);\n\n }\n\n}\n\n\n\nimpl PartialEq for Material {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.buffer == other.buffer\n\n }\n\n}\n", "file_path": "src/pipeline/material.rs", "rank": 19, "score": 40205.59395661313 }, { "content": "// Oliver Berzs\n\n// https://github.com/oberzs/duku\n\n\n\nuse super::Descriptor;\n\nuse super::ShaderMaterial;\n\nuse super::Uniforms;\n\nuse crate::buffer::Buffer;\n\nuse crate::buffer::BufferUsage;\n\nuse crate::color::Rgbf;\n\nuse crate::device::Device;\n\nuse crate::error::Result;\n\nuse crate::image::Canvas;\n\nuse crate::image::Texture;\n\nuse crate::math::Vec4;\n\nuse crate::resources::Handle;\n\n\n\n/// Material parameters to use in a shader.\n\n///\n\n/// # Examples\n\n///\n", "file_path": "src/pipeline/material.rs", "rank": 20, "score": 40203.11430941492 }, { "content": " let descriptor = uniforms.material_set(device, &buffer)?;\n\n\n\n Ok(Self {\n\n a: Vec4::default(),\n\n b: Vec4::default(),\n\n c: Vec4::default(),\n\n d: Vec4::default(),\n\n e: Vec4::default(),\n\n f: Vec4::default(),\n\n g: Vec4::default(),\n\n h: Vec4::default(),\n\n textures: vec![],\n\n buffer,\n\n descriptor,\n\n })\n\n }\n\n\n\n /// Set albedo color for the PBR and other various shaders\n\n pub fn albedo_color(&mut self, color: impl Into<Rgbf>) {\n\n let temp = self.a[3];\n", "file_path": "src/pipeline/material.rs", "rank": 21, "score": 40202.80866079668 }, { "content": " /// Set roughness factor for the PBR shader\n\n pub fn roughness(&mut self, value: f32) {\n\n self.b[1] = value;\n\n }\n\n\n\n /// Set emissive color for the PBR shader\n\n pub fn emissive(&mut self, color: impl Into<Rgbf>) {\n\n let temp = self.d[3];\n\n self.d = color.into().into();\n\n self.d[3] = temp;\n\n }\n\n\n\n /// Set metalness-roughness texture for the PBR shader\n\n pub fn metalness_roughness_texture(&mut self, texture: Handle<Texture>) {\n\n self.b[2] = texture.read().shader_index() as f32;\n\n self.textures.push(texture);\n\n }\n\n\n\n /// Set ambient occlusion texture for the PBR shader\n\n pub fn ambient_occlusion_texture(&mut self, texture: Handle<Texture>) {\n", "file_path": "src/pipeline/material.rs", "rank": 22, "score": 40200.13958989482 }, { "content": " self.b[3] = texture.read().shader_index() as f32;\n\n self.textures.push(texture);\n\n }\n\n\n\n /// Set normal texture for the PBR shader\n\n pub fn normal_texture(&mut self, texture: Handle<Texture>) {\n\n self.c[0] = texture.read().shader_index() as f32;\n\n self.textures.push(texture);\n\n }\n\n\n\n /// Set emissive texture for the PBR shader\n\n pub fn emissive_texture(&mut self, texture: Handle<Texture>) {\n\n self.c[1] = texture.read().shader_index() as f32;\n\n self.textures.push(texture);\n\n }\n\n\n\n pub(crate) fn update(&mut self) {\n\n self.buffer.copy_from_data(&[ShaderMaterial {\n\n a: self.a,\n\n b: self.b,\n", "file_path": "src/pipeline/material.rs", "rank": 23, "score": 40199.98995133173 }, { "content": " self.a = color.into().into();\n\n self.a[3] = temp;\n\n }\n\n\n\n /// Set albedo texture for the PBR and other various shaders\n\n pub fn albedo_texture(&mut self, texture: Handle<Texture>) {\n\n self.a[3] = texture.read().shader_index() as f32;\n\n self.textures.push(texture);\n\n }\n\n\n\n /// Set albedo canvas for the PBR and other various shaders\n\n pub fn albedo_canvas(&mut self, f: &Handle<Canvas>) {\n\n self.a[3] = f.read().shader_index() as f32;\n\n }\n\n\n\n /// Set metalness factor for the PBR shader\n\n pub fn metalness(&mut self, value: f32) {\n\n self.b[0] = value;\n\n }\n\n\n", "file_path": "src/pipeline/material.rs", "rank": 24, "score": 40196.78548419295 }, { "content": "// Oliver Berzs\n\n// https://github.com/oberzs/duku\n\n\n\nmod model;\n\nmod vertex;\n\n\n\nuse std::iter;\n\n\n\nuse crate::buffer::Buffer;\n\nuse crate::buffer::BufferUsage;\n\nuse crate::color::Rgbf;\n\nuse crate::device::Device;\n\nuse crate::math::Vec2;\n\nuse crate::math::Vec3;\n\nuse crate::vk;\n\n\n\npub(crate) use vertex::Vertex;\n\n\n\npub use model::Model;\n\npub use model::ModelNode;\n", "file_path": "src/mesh/mod.rs", "rank": 25, "score": 40129.64419662296 }, { "content": "// Oliver Berzs\n\n// https://github.com/oberzs/duku\n\n\n\n// Vertex - struct representing a vertex for a mesh\n\n\n\nuse std::mem;\n\n\n\nuse crate::math::Vec2;\n\nuse crate::math::Vec3;\n\nuse crate::math::Vec4;\n\nuse crate::vk;\n\n\n\n#[derive(Debug, Copy, Clone)]\n\n#[repr(C)]\n\npub(crate) struct Vertex {\n\n pub(crate) in_local_position: Vec3,\n\n pub(crate) in_normal: Vec3,\n\n pub(crate) in_tangent: Vec3,\n\n pub(crate) in_uv: Vec2,\n\n pub(crate) in_color: Vec4,\n", "file_path": "src/mesh/vertex.rs", "rank": 26, "score": 40123.724534623856 }, { "content": " self.normals[c] += normal;\n\n }\n\n for norm in &mut self.normals {\n\n *norm = norm.unit();\n\n }\n\n }\n\n }\n\n\n\n /// Calculate vertex tangents automatically\n\n /// smoothing the values to achieve smooth\n\n /// shading.\n\n ///\n\n /// Should only be called if normal texture is\n\n /// used in material.\n\n pub fn calculate_tangents(&mut self) {\n\n self.tangents = vec![Vec3::default(); self.vertices.len()];\n\n\n\n if self.indices.len() % 3 == 0 {\n\n for tri in self.indices.chunks(3) {\n\n let a = tri[0] as usize;\n", "file_path": "src/mesh/mod.rs", "rank": 27, "score": 40120.500777209665 }, { "content": "///\n\n/// # duku.draw(None, |t| {\n\n/// // while rendering\n\n/// t.mesh(&mesh);\n\n/// # });\n\n/// ```\n\npub struct Mesh {\n\n /// vertex positions\n\n pub vertices: Vec<Vec3>,\n\n /// vertex UV coordinates\n\n pub uvs: Vec<Vec2>,\n\n /// vertex normal directions\n\n pub normals: Vec<Vec3>,\n\n /// vertex tangent directions\n\n pub tangents: Vec<Vec3>,\n\n /// vertex colors\n\n pub colors: Vec<Rgbf>,\n\n /// vertex texture indices\n\n pub textures: Vec<u32>,\n\n /// vertex indices\n", "file_path": "src/mesh/mod.rs", "rank": 28, "score": 40120.3424844407 }, { "content": "\n\n/// Shape collection for rendering\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_run\n\n/// # use duku::Duku;\n\n/// # use duku::Vec3;\n\n/// # let (mut duku, _) = Duku::windowed(1, 1).unwrap();\n\n/// // setup a triangle\n\n/// let mesh = duku.create_mesh();\n\n/// {\n\n/// let mut m = mesh.write();\n\n/// m.vertices = vec![\n\n/// Vec3::new(-1.0, -1.0, 0.0),\n\n/// Vec3::new(0.0, 1.0, 0.0),\n\n/// Vec3::new(1.0, -1.0, 0.0),\n\n/// ];\n\n/// m.indices = vec![0, 1, 2];\n\n/// }\n", "file_path": "src/mesh/mod.rs", "rank": 29, "score": 40120.30175826297 }, { "content": " pub indices: Vec<u32>,\n\n\n\n vertex_buffer: Buffer<Vertex>,\n\n index_buffer: Buffer<u32>,\n\n index_count: usize,\n\n}\n\n\n\nimpl Mesh {\n\n pub(crate) fn new(device: &Device) -> Self {\n\n let vertex_buffer = Buffer::dynamic(device, BufferUsage::Vertex, 1);\n\n let index_buffer = Buffer::dynamic(device, BufferUsage::Index, 3);\n\n\n\n Self {\n\n vertices: vec![Vec3::default(); 1],\n\n uvs: vec![Vec2::default(); 1],\n\n normals: vec![Vec3::default(); 1],\n\n tangents: vec![Vec3::default(); 1],\n\n colors: vec![Rgbf::gray(1.0); 1],\n\n textures: vec![0; 1],\n\n indices: vec![0; 3],\n", "file_path": "src/mesh/mod.rs", "rank": 30, "score": 40119.83567445374 }, { "content": " index_count: 3,\n\n vertex_buffer,\n\n index_buffer,\n\n }\n\n }\n\n\n\n pub(crate) fn combine(device: &Device, meshes: &[&Self]) -> Self {\n\n let mut offset = 0;\n\n let mut indices = vec![];\n\n let mut vertices = vec![];\n\n let mut normals = vec![];\n\n let mut tangents = vec![];\n\n let mut uvs = vec![];\n\n let mut colors = vec![];\n\n let mut textures = vec![];\n\n\n\n for mesh in meshes {\n\n indices.extend(mesh.indices.iter().map(|t| t + offset));\n\n vertices.extend(&mesh.vertices);\n\n normals.extend(&mesh.normals);\n", "file_path": "src/mesh/mod.rs", "rank": 31, "score": 40118.698108091856 }, { "content": " pub(crate) in_texture: u32,\n\n}\n\n\n\nimpl Vertex {\n\n pub(crate) const fn binding_description() -> vk::VertexInputBindingDescription {\n\n vk::VertexInputBindingDescription {\n\n binding: 0,\n\n stride: mem::size_of::<Self>() as u32,\n\n input_rate: vk::VERTEX_INPUT_RATE_VERTEX,\n\n }\n\n }\n\n\n\n pub(crate) const fn attribute_descriptions() -> [vk::VertexInputAttributeDescription; 6] {\n\n let mut offsets = [0; 6];\n\n offsets[0] = 0;\n\n offsets[1] = offsets[0] + mem::size_of::<Vec3>() as u32;\n\n offsets[2] = offsets[1] + mem::size_of::<Vec3>() as u32;\n\n offsets[3] = offsets[2] + mem::size_of::<Vec3>() as u32;\n\n offsets[4] = offsets[3] + mem::size_of::<Vec2>() as u32;\n\n offsets[5] = offsets[4] + mem::size_of::<Vec4>() as u32;\n", "file_path": "src/mesh/vertex.rs", "rank": 32, "score": 40116.847613898906 }, { "content": " tangents.extend(&mesh.tangents);\n\n uvs.extend(&mesh.uvs);\n\n colors.extend(&mesh.colors);\n\n textures.extend(&mesh.textures);\n\n offset = vertices.len() as u32;\n\n }\n\n\n\n let mut result = Self::new(device);\n\n result.vertices = vertices;\n\n result.normals = normals;\n\n result.tangents = tangents;\n\n result.colors = colors;\n\n result.uvs = uvs;\n\n result.textures = textures;\n\n result.indices = indices;\n\n result.update(device);\n\n result\n\n }\n\n\n\n /// Calculate vertex normals automatically\n", "file_path": "src/mesh/mod.rs", "rank": 33, "score": 40115.3943224845 }, { "content": " self.index_count = self.indices.len();\n\n }\n\n\n\n pub(crate) fn vertex_buffer(&self) -> vk::Buffer {\n\n self.vertex_buffer.handle()\n\n }\n\n\n\n pub(crate) fn index_buffer(&self) -> vk::Buffer {\n\n self.index_buffer.handle()\n\n }\n\n\n\n pub(crate) const fn index_count(&self) -> usize {\n\n self.index_count\n\n }\n\n\n\n pub(crate) fn destroy(&self, device: &Device) {\n\n self.vertex_buffer.destroy(device);\n\n self.index_buffer.destroy(device);\n\n }\n\n}\n", "file_path": "src/mesh/mod.rs", "rank": 34, "score": 40114.72091576522 }, { "content": " /// smoothing the values to achieve smooth\n\n /// shading.\n\n pub fn calculate_normals(&mut self) {\n\n self.normals = vec![Vec3::default(); self.vertices.len()];\n\n\n\n if self.indices.len() % 3 == 0 {\n\n for tri in self.indices.chunks(3) {\n\n let a = tri[0] as usize;\n\n let b = tri[1] as usize;\n\n let c = tri[2] as usize;\n\n\n\n // get vertices\n\n let vtx_a = self.vertices[a];\n\n let vtx_b = self.vertices[b];\n\n let vtx_c = self.vertices[c];\n\n\n\n // calculate normal\n\n let normal = (vtx_b - vtx_a).cross(vtx_c - vtx_a);\n\n self.normals[a] += normal;\n\n self.normals[b] += normal;\n", "file_path": "src/mesh/mod.rs", "rank": 35, "score": 40112.86928507171 }, { "content": " let tangent = (dp1 * du2.y - dp2 * du1.y) * r;\n\n\n\n self.tangents[a] += tangent;\n\n self.tangents[b] += tangent;\n\n self.tangents[c] += tangent;\n\n }\n\n }\n\n }\n\n\n\n pub(crate) fn update(&mut self, device: &Device) {\n\n let vertices: Vec<_> = self\n\n .vertices\n\n .iter()\n\n .zip(self.uvs.iter().chain(iter::repeat(&Vec2::default())))\n\n .zip(self.normals.iter().chain(iter::repeat(&Vec3::default())))\n\n .zip(self.tangents.iter().chain(iter::repeat(&Vec3::default())))\n\n .zip(self.colors.iter().chain(iter::repeat(&Rgbf::gray(1.0))))\n\n .zip(self.textures.iter().chain(iter::repeat(&0)))\n\n .map(|(((((pos, uv), normal), tangent), col), tex)| Vertex {\n\n in_local_position: *pos,\n", "file_path": "src/mesh/mod.rs", "rank": 36, "score": 40112.407600635706 }, { "content": " let b = tri[1] as usize;\n\n let c = tri[2] as usize;\n\n\n\n // get vertices\n\n let pos_a = self.vertices[a];\n\n let pos_b = self.vertices[b];\n\n let pos_c = self.vertices[c];\n\n\n\n // get uvs\n\n let uv_a = self.uvs.get(a).copied().unwrap_or_default();\n\n let uv_b = self.uvs.get(b).copied().unwrap_or_default();\n\n let uv_c = self.uvs.get(c).copied().unwrap_or_default();\n\n\n\n // calculate tangent\n\n let dp1 = pos_b - pos_a;\n\n let dp2 = pos_c - pos_a;\n\n let du1 = uv_b - uv_a;\n\n let du2 = uv_c - uv_a;\n\n\n\n let r = 1.0 / (du1.x * du2.y - du1.y * du2.x);\n", "file_path": "src/mesh/mod.rs", "rank": 37, "score": 40109.824148833 }, { "content": " format: vk::FORMAT_R32G32B32_SFLOAT,\n\n offset: offsets[2],\n\n },\n\n // in_uv\n\n vk::VertexInputAttributeDescription {\n\n location: 3,\n\n binding: 0,\n\n format: vk::FORMAT_R32G32_SFLOAT,\n\n offset: offsets[3],\n\n },\n\n // in_color\n\n vk::VertexInputAttributeDescription {\n\n location: 4,\n\n binding: 0,\n\n format: vk::FORMAT_R32G32B32A32_SFLOAT,\n\n offset: offsets[4],\n\n },\n\n // in_texture\n\n vk::VertexInputAttributeDescription {\n\n location: 5,\n\n binding: 0,\n\n format: vk::FORMAT_R32_UINT,\n\n offset: offsets[5],\n\n },\n\n ]\n\n }\n\n}\n", "file_path": "src/mesh/vertex.rs", "rank": 38, "score": 40109.824148833 }, { "content": "\n\n [\n\n // in_local_position\n\n vk::VertexInputAttributeDescription {\n\n location: 0,\n\n binding: 0,\n\n format: vk::FORMAT_R32G32B32_SFLOAT,\n\n offset: offsets[0],\n\n },\n\n // in_normal\n\n vk::VertexInputAttributeDescription {\n\n location: 1,\n\n binding: 0,\n\n format: vk::FORMAT_R32G32B32_SFLOAT,\n\n offset: offsets[1],\n\n },\n\n // in_tangent\n\n vk::VertexInputAttributeDescription {\n\n location: 2,\n\n binding: 0,\n", "file_path": "src/mesh/vertex.rs", "rank": 39, "score": 40109.824148833 }, { "content": " in_normal: *normal,\n\n in_tangent: *tangent,\n\n in_uv: *uv,\n\n in_color: (*col).into(),\n\n in_texture: *tex,\n\n })\n\n .collect();\n\n\n\n // resize buffers if needed\n\n if vertices.len() > self.vertex_buffer.len() {\n\n self.vertex_buffer.resize(device, vertices.len());\n\n }\n\n if self.indices.len() > self.index_buffer.len() {\n\n self.index_buffer.resize(device, self.indices.len());\n\n }\n\n\n\n if !vertices.is_empty() && !self.indices.is_empty() {\n\n self.vertex_buffer.copy_from_data(&vertices);\n\n self.index_buffer.copy_from_data(&self.indices);\n\n }\n", "file_path": "src/mesh/mod.rs", "rank": 40, "score": 40109.824148833 }, { "content": "impl Mat4 {\n\n /// Create matrix from column vectors\n\n pub fn columns(\n\n x: impl Into<Vec4>,\n\n y: impl Into<Vec4>,\n\n z: impl Into<Vec4>,\n\n w: impl Into<Vec4>,\n\n ) -> Self {\n\n Self {\n\n x: x.into(),\n\n y: y.into(),\n\n z: z.into(),\n\n w: w.into(),\n\n }\n\n }\n\n\n\n /// Create matrix from row vectors\n\n pub fn rows(\n\n x: impl Into<Vec4>,\n\n y: impl Into<Vec4>,\n", "file_path": "src/math/mat4.rs", "rank": 41, "score": 40096.75804982553 }, { "content": "/// ```\n\n/// # use duku::Vec3;\n\n/// # use duku::Mat4;\n\n/// let vector = Vec3::new(2.0, 0.0, 0.0);\n\n/// let matrix = Mat4::scale([5.0, 1.0, 1.0]);\n\n/// let scaled = matrix * vector;\n\n/// ```\n\n#[repr(C)]\n\n#[derive(Default, Debug, Copy, Clone, PartialEq)]\n\npub struct Mat4 {\n\n /// the X column\n\n pub x: Vec4,\n\n /// the Y column\n\n pub y: Vec4,\n\n /// the Z column\n\n pub z: Vec4,\n\n /// the W column\n\n pub w: Vec4,\n\n}\n\n\n", "file_path": "src/math/mat4.rs", "rank": 42, "score": 40096.67428307746 }, { "content": " [m[12], m[13], m[14], m[15]],\n\n )\n\n }\n\n}\n\n\n\nimpl Into<[f32; 16]> for Mat4 {\n\n fn into(self) -> [f32; 16] {\n\n [\n\n self.x.x, self.x.y, self.x.z, self.x.w, self.y.x, self.y.y, self.y.z, self.y.w,\n\n self.z.x, self.z.y, self.z.z, self.z.w, self.w.x, self.w.y, self.w.z, self.w.w,\n\n ]\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::Mat4;\n\n use super::Quat;\n\n use super::Vec3;\n\n use super::Vec4;\n", "file_path": "src/math/mat4.rs", "rank": 43, "score": 40092.42854934237 }, { "content": " v.y * v.z * (1.0 - cos) - v.x * sin,\n\n 0.0,\n\n ];\n\n let row_z = [\n\n v.x * v.z * (1.0 - cos) - v.y * sin,\n\n v.y * v.z * (1.0 - cos) + v.x * sin,\n\n v.z * v.z * (1.0 - cos) + cos,\n\n 0.0,\n\n ];\n\n let row_w = [0.0, 0.0, 0.0, 1.0];\n\n\n\n Self::rows(row_x, row_y, row_z, row_w)\n\n }\n\n\n\n /// Create rotation matrix to rotate towards direction\n\n ///\n\n /// `up` is used as a guide to try aligning to\n\n pub fn look_rotation(forward: impl Into<Vec3>, up: impl Into<Vec3>) -> Self {\n\n let f = forward.into().unit();\n\n let r = up.into().unit().cross(f).unit();\n", "file_path": "src/math/mat4.rs", "rank": 44, "score": 40092.1175898805 }, { "content": " [0.0, 1.0, 0.0, 0.0],\n\n [0.0, 0.0, 1.0, 0.0],\n\n [0.0, 0.0, 0.0, 1.0],\n\n )\n\n }\n\n\n\n /// Create translation matrix\n\n ///\n\n /// Translation matrix moves vectors around\n\n pub fn translation(vector: impl Into<Vec3>) -> Self {\n\n let v = vector.into();\n\n Self::rows(\n\n [1.0, 0.0, 0.0, v.x],\n\n [0.0, 1.0, 0.0, v.y],\n\n [0.0, 0.0, 1.0, v.z],\n\n [0.0, 0.0, 0.0, 1.0],\n\n )\n\n }\n\n\n\n /// Create scale matrix\n", "file_path": "src/math/mat4.rs", "rank": 45, "score": 40090.66336650695 }, { "content": " /// Oposite of [decompose](crate::math::Mat4::decompose).\n\n pub fn compose(position: Vec3, scale: Vec3, rotation: Quat) -> Self {\n\n Mat4::translation(position) * Mat4::from(rotation) * Mat4::scale(scale)\n\n }\n\n\n\n /// Calculate the inverse of the matrix\n\n pub fn inverse(&self) -> Option<Self> {\n\n let m: [f32; 16] = (*self).into();\n\n let mut inv = [0.0; 16];\n\n\n\n inv[0] = m[5] * m[10] * m[15] - m[5] * m[11] * m[14] - m[9] * m[6] * m[15]\n\n + m[9] * m[7] * m[14]\n\n + m[13] * m[6] * m[11]\n\n - m[13] * m[7] * m[10];\n\n\n\n inv[4] = -m[4] * m[10] * m[15] + m[4] * m[11] * m[14] + m[8] * m[6] * m[15]\n\n - m[8] * m[7] * m[14]\n\n - m[12] * m[6] * m[11]\n\n + m[12] * m[7] * m[10];\n\n\n", "file_path": "src/math/mat4.rs", "rank": 46, "score": 40090.644403445694 }, { "content": " ///\n\n /// Scale matrix scales vectors\n\n pub fn scale(vector: impl Into<Vec3>) -> Self {\n\n let v = vector.into();\n\n Self::rows(\n\n [v.x, 0.0, 0.0, 0.0],\n\n [0.0, v.y, 0.0, 0.0],\n\n [0.0, 0.0, v.z, 0.0],\n\n [0.0, 0.0, 0.0, 1.0],\n\n )\n\n }\n\n\n\n /// Create rotation matrix with euler angles\n\n ///\n\n /// This rotation's yaw, pitch and roll are z, y and x\n\n pub fn euler_rotation(x: f32, y: f32, z: f32) -> Self {\n\n let sin_a = z.to_radians().sin();\n\n let cos_a = z.to_radians().cos();\n\n let sin_b = y.to_radians().sin();\n\n let cos_b = y.to_radians().cos();\n", "file_path": "src/math/mat4.rs", "rank": 47, "score": 40089.28024891935 }, { "content": " z: impl Into<Vec4>,\n\n w: impl Into<Vec4>,\n\n ) -> Self {\n\n let rx = x.into();\n\n let ry = y.into();\n\n let rz = z.into();\n\n let rw = w.into();\n\n\n\n Self::columns(\n\n [rx.x, ry.x, rz.x, rw.x],\n\n [rx.y, ry.y, rz.y, rw.y],\n\n [rx.z, ry.z, rz.z, rw.z],\n\n [rx.w, ry.w, rz.w, rw.w],\n\n )\n\n }\n\n\n\n /// Create identity matrix\n\n pub fn identity() -> Self {\n\n Self::rows(\n\n [1.0, 0.0, 0.0, 0.0],\n", "file_path": "src/math/mat4.rs", "rank": 48, "score": 40088.768073121675 }, { "content": "\n\nimpl From<Quat> for Mat4 {\n\n fn from(q: Quat) -> Self {\n\n let angle = 2.0 * q.w.acos().to_degrees();\n\n let scale = (1.0 - q.w * q.w).sqrt();\n\n let axis = if scale < 0.001 {\n\n Vec3::new(1.0, 0.0, 0.0)\n\n } else {\n\n Vec3::new(q.x, q.y, q.z) / scale\n\n };\n\n Mat4::axis_rotation(axis, angle)\n\n }\n\n}\n\n\n\nimpl From<[f32; 16]> for Mat4 {\n\n fn from(m: [f32; 16]) -> Self {\n\n Self::columns(\n\n [m[0], m[1], m[2], m[3]],\n\n [m[4], m[5], m[6], m[7]],\n\n [m[8], m[9], m[10], m[11]],\n", "file_path": "src/math/mat4.rs", "rank": 49, "score": 40087.41376094017 }, { "content": " (position, scale, rotation)\n\n }\n\n\n\n /// Access the X row of the matrix\n\n pub const fn rx(&self) -> Vec4 {\n\n Vec4::new(self.x.x, self.y.x, self.z.x, self.w.x)\n\n }\n\n\n\n /// Access the Y row of the matrix\n\n pub const fn ry(&self) -> Vec4 {\n\n Vec4::new(self.x.y, self.y.y, self.z.y, self.w.y)\n\n }\n\n\n\n /// Access the Z row of the matrix\n\n pub const fn rz(&self) -> Vec4 {\n\n Vec4::new(self.x.z, self.y.z, self.z.z, self.w.z)\n\n }\n\n\n\n /// Access the W row of the matrix\n\n pub const fn rw(&self) -> Vec4 {\n", "file_path": "src/math/mat4.rs", "rank": 50, "score": 40087.38814431293 }, { "content": " )\n\n }\n\n\n\n /// Create rotation matrix around axis\n\n ///\n\n /// This rotates vectors around axis by the angle\n\n pub fn axis_rotation(axis: impl Into<Vec3>, angle: f32) -> Self {\n\n let v = axis.into();\n\n let sin = angle.to_radians().sin();\n\n let cos = angle.to_radians().cos();\n\n\n\n let row_x = [\n\n v.x * v.x * (1.0 - cos) + cos,\n\n v.x * v.y * (1.0 - cos) - v.z * sin,\n\n v.x * v.z * (1.0 - cos) + v.y * sin,\n\n 0.0,\n\n ];\n\n let row_y = [\n\n v.x * v.y * (1.0 - cos) + v.z * sin,\n\n v.y * v.y * (1.0 - cos) + cos,\n", "file_path": "src/math/mat4.rs", "rank": 51, "score": 40087.214306347894 }, { "content": " }\n\n}\n\n\n\nimpl Mul<Self> for Mat4 {\n\n type Output = Self;\n\n\n\n fn mul(self, rhs: Self) -> Self::Output {\n\n let x = self * rhs.x;\n\n let y = self * rhs.y;\n\n let z = self * rhs.z;\n\n let w = self * rhs.w;\n\n Self::columns(x, y, z, w)\n\n }\n\n}\n\n\n\nimpl MulAssign<Self> for Mat4 {\n\n fn mul_assign(&mut self, rhs: Self) {\n\n *self = *self * rhs;\n\n }\n\n}\n", "file_path": "src/math/mat4.rs", "rank": 52, "score": 40086.96142224897 }, { "content": " Vec4::new(self.x.w, self.y.w, self.z.w, self.w.w)\n\n }\n\n}\n\n\n\nimpl Index<usize> for Mat4 {\n\n type Output = Vec4;\n\n\n\n fn index(&self, index: usize) -> &Vec4 {\n\n match index {\n\n 0 => &self.x,\n\n 1 => &self.y,\n\n 2 => &self.z,\n\n 3 => &self.w,\n\n _ => panic!(\"index out of range {}\", index),\n\n }\n\n }\n\n}\n\n\n\nimpl IndexMut<usize> for Mat4 {\n\n fn index_mut(&mut self, index: usize) -> &mut Vec4 {\n", "file_path": "src/math/mat4.rs", "rank": 53, "score": 40086.84946713787 }, { "content": " }\n\n}\n\n\n\nimpl Mul<Vec4> for Mat4 {\n\n type Output = Vec4;\n\n\n\n fn mul(self, rhs: Vec4) -> Self::Output {\n\n let x = self.rx().dot(rhs);\n\n let y = self.ry().dot(rhs);\n\n let z = self.rz().dot(rhs);\n\n let w = self.rw().dot(rhs);\n\n Vec4::new(x, y, z, w)\n\n }\n\n}\n\n\n\nimpl Mul<Vec3> for Mat4 {\n\n type Output = Vec3;\n\n\n\n fn mul(self, rhs: Vec3) -> Self::Output {\n\n (self * Vec4::from((rhs, 1.0))).xyz()\n", "file_path": "src/math/mat4.rs", "rank": 54, "score": 40086.57982534364 }, { "content": "// Oliver Berzs\n\n// https://github.com/oberzs/duku\n\n\n\nuse std::ops::Index;\n\nuse std::ops::IndexMut;\n\nuse std::ops::Mul;\n\nuse std::ops::MulAssign;\n\n\n\nuse super::Quat;\n\nuse super::Vec3;\n\nuse super::Vec4;\n\n\n\n/// 4x4 Matrix.\n\n///\n\n/// Used for transforming vectors\n\n///\n\n/// Is column-major\n\n///\n\n/// # Examples\n\n///\n", "file_path": "src/math/mat4.rs", "rank": 55, "score": 40086.43515404087 }, { "content": " match index {\n\n 0 => &mut self.x,\n\n 1 => &mut self.y,\n\n 2 => &mut self.z,\n\n 3 => &mut self.w,\n\n _ => panic!(\"index out of range {}\", index),\n\n }\n\n }\n\n}\n\n\n\nimpl Mul<f32> for Mat4 {\n\n type Output = Mat4;\n\n\n\n fn mul(self, rhs: f32) -> Self::Output {\n\n let mut m = self;\n\n m.x *= rhs;\n\n m.y *= rhs;\n\n m.z *= rhs;\n\n m.w *= rhs;\n\n m\n", "file_path": "src/math/mat4.rs", "rank": 56, "score": 40086.013277938844 }, { "content": " assert_eq_delta!(r.y.y, 1.0);\n\n assert_eq_delta!(r.z.z, 1.0);\n\n assert_eq_delta!(r.w.w, 1.0);\n\n }\n\n\n\n #[test]\n\n fn projection() {\n\n let matrix = Mat4::perspective(90.0, 16.0 / 9.0, 0.1, 10.0);\n\n let point = Vec4::new(0.0, 0.0, 10.0, 1.0);\n\n let r = matrix * point;\n\n assert_eq_delta!(r.x, 0.0);\n\n assert_eq_delta!(r.y, 0.0);\n\n assert_eq_delta!(r.z, 10.0);\n\n assert_eq_delta!(r.w, 10.0);\n\n }\n\n\n\n #[test]\n\n fn compose() {\n\n let position = Vec3::new(1.0, 2.0, 3.0);\n\n let scale = Vec3::new(1.0, 1.0, 1.0);\n", "file_path": "src/math/mat4.rs", "rank": 57, "score": 40085.28670472466 }, { "content": " /// Create orthographic projection matrix\n\n ///\n\n /// This is a left-handed matrix\n\n /// with Z in range of [0; 1]\n\n pub fn orthographic(width: f32, height: f32, near: f32, far: f32) -> Self {\n\n let x_scale = 2.0 / width;\n\n let y_scale = 2.0 / height;\n\n let z_scale = 1.0 / (far - near);\n\n let z_move = -near / (far - near);\n\n\n\n Self::rows(\n\n [x_scale, 0.0, 0.0, 0.0],\n\n [0.0, y_scale, 0.0, 0.0],\n\n [0.0, 0.0, z_scale, z_move],\n\n [0.0, 0.0, 0.0, 1.0],\n\n )\n\n }\n\n\n\n /// Create matrix from position, scale and rotation\n\n ///\n", "file_path": "src/math/mat4.rs", "rank": 58, "score": 40085.128239640566 }, { "content": "\n\n /// Separate translation, scale and rotation parts of the matrix\n\n pub fn decompose(mut self) -> (Vec3, Vec3, Quat) {\n\n let position = Vec3::new(self.w.x, self.w.y, self.w.z);\n\n\n\n let determinant = self.x.x * (self.y.y * self.z.z - self.z.y * self.y.z)\n\n - self.y.x * (self.x.y * self.z.z - self.z.y * self.x.z)\n\n + self.z.x * (self.x.y * self.y.z - self.y.y * self.x.z);\n\n\n\n let sx = self.x.xyz().length();\n\n let sy = self.y.xyz().length();\n\n let sz = self.z.xyz().length() * determinant.signum();\n\n let scale = Vec3::new(sx, sy, sz);\n\n\n\n self.x *= 1.0 / sx;\n\n self.y *= 1.0 / sy;\n\n self.z *= 1.0 / sz;\n\n\n\n let rotation = Quat::from(self);\n\n\n", "file_path": "src/math/mat4.rs", "rank": 59, "score": 40083.60514391786 }, { "content": " let u = f.cross(r).unit();\n\n\n\n // Self::rows(\n\n // [r.x, u.x, f.x, 0.0],\n\n // [r.y, u.y, f.y, 0.0],\n\n // [r.z, u.z, f.z, 0.0],\n\n // [0.0, 0.0, 0.0, 1.0],\n\n // )\n\n\n\n Self::rows(\n\n [r.x, r.y, r.z, 0.0],\n\n [u.x, u.y, u.z, 0.0],\n\n [f.x, f.y, f.z, 0.0],\n\n [0.0, 0.0, 0.0, 1.0],\n\n )\n\n }\n\n\n\n /// Create perspective projection matrix\n\n ///\n\n /// This is a left-handed matrix\n", "file_path": "src/math/mat4.rs", "rank": 60, "score": 40082.25408059743 }, { "content": " let rotation = Quat::default();\n\n assert_eq!(\n\n Mat4::compose(position, scale, rotation),\n\n Mat4::translation([1.0, 2.0, 3.0])\n\n );\n\n }\n\n\n\n #[test]\n\n fn from_quaternion() {\n\n let v = Vec3::new(0.0, 0.0, 1.0);\n\n let mq = Mat4::from(Quat::euler_rotation(90.0, 0.0, 0.0));\n\n let m = Mat4::euler_rotation(90.0, 0.0, 0.0);\n\n\n\n let rq = mq * v;\n\n let r = m * v;\n\n\n\n assert_eq_delta!(rq.x, 0.0);\n\n assert_eq_delta!(rq.y, -1.0);\n\n assert_eq_delta!(rq.z, 0.0);\n\n\n\n assert_eq_delta!(r.x, 0.0);\n\n assert_eq_delta!(r.y, -1.0);\n\n assert_eq_delta!(r.z, 0.0);\n\n }\n\n}\n", "file_path": "src/math/mat4.rs", "rank": 61, "score": 40082.23510352316 }, { "content": " assert_eq!(m.rw(), Vec4::new(0.0, 0.0, 0.0, 1.0));\n\n }\n\n\n\n #[test]\n\n fn translation() {\n\n let m = Mat4::translation([3.0, 4.0, 5.0]);\n\n let v = Vec3::new(6.0, 7.0, 8.0);\n\n assert_eq!(m * v, Vec3::new(9.0, 11.0, 13.0));\n\n }\n\n\n\n #[test]\n\n fn scale() {\n\n let m = Mat4::scale([1.0, 2.0, 3.0]);\n\n let v = Vec3::new(3.0, 4.0, 5.0);\n\n assert_eq!(m * v, Vec3::new(3.0, 8.0, 15.0));\n\n }\n\n\n\n #[test]\n\n fn axis_rotation() {\n\n let m = Mat4::axis_rotation([1.0, 0.0, 0.0], 180.0);\n", "file_path": "src/math/mat4.rs", "rank": 62, "score": 40082.21625418901 }, { "content": " }\n\n\n\n #[test]\n\n fn mul_with_vector() {\n\n let m = Mat4::rows(\n\n [1.0, 2.0, 3.0, 4.0],\n\n [5.0, 6.0, 7.0, 8.0],\n\n [8.0, 7.0, 6.0, 5.0],\n\n [4.0, 3.0, 2.0, 1.0],\n\n );\n\n let v = Vec4::new(1.0, 2.0, 3.0, 4.0);\n\n assert_eq!(m * v, Vec4::new(30.0, 70.0, 60.0, 20.0));\n\n }\n\n\n\n #[test]\n\n fn mul_with_self() {\n\n let ma = Mat4::rows(\n\n [1.0, 2.0, 3.0, 4.0],\n\n [5.0, 6.0, 7.0, 8.0],\n\n [8.0, 7.0, 6.0, 5.0],\n", "file_path": "src/math/mat4.rs", "rank": 63, "score": 40082.1137893053 }, { "content": "\n\n #[test]\n\n fn columns() {\n\n let m = Mat4::columns(\n\n [1.0, 2.0, 3.0, 4.0],\n\n [5.0, 6.0, 7.0, 8.0],\n\n [8.0, 7.0, 6.0, 5.0],\n\n [4.0, 3.0, 2.0, 1.0],\n\n );\n\n assert_eq!(m.x, Vec4::new(1.0, 2.0, 3.0, 4.0));\n\n assert_eq!(m.y, Vec4::new(5.0, 6.0, 7.0, 8.0));\n\n assert_eq!(m.z, Vec4::new(8.0, 7.0, 6.0, 5.0));\n\n assert_eq!(m.w, Vec4::new(4.0, 3.0, 2.0, 1.0));\n\n }\n\n\n\n #[test]\n\n fn rows() {\n\n let m = Mat4::rows(\n\n [1.0, 2.0, 3.0, 4.0],\n\n [5.0, 6.0, 7.0, 8.0],\n", "file_path": "src/math/mat4.rs", "rank": 64, "score": 40081.89719199663 }, { "content": " assert_eq_delta!(r.y, -1.0);\n\n assert_eq_delta!(r.z, 0.0);\n\n }\n\n\n\n #[test]\n\n fn euler_rotation_y() {\n\n let m = Mat4::euler_rotation(0.0, 90.0, 0.0);\n\n let v = Vec3::new(0.0, 0.0, 1.0);\n\n let r = m * v;\n\n assert_eq_delta!(r.x, 1.0);\n\n assert_eq_delta!(r.y, 0.0);\n\n assert_eq_delta!(r.z, 0.0);\n\n }\n\n\n\n #[test]\n\n fn euler_rotation_z() {\n\n let m = Mat4::euler_rotation(0.0, 0.0, 90.0);\n\n let v = Vec3::new(1.0, 0.0, 0.0);\n\n let r = m * v;\n\n assert_eq_delta!(r.x, 0.0);\n", "file_path": "src/math/mat4.rs", "rank": 65, "score": 40081.6154209401 }, { "content": " [4.0, 3.0, 2.0, 1.0],\n\n );\n\n let mb = Mat4::rows(\n\n [8.0, 7.0, 6.0, 5.0],\n\n [4.0, 3.0, 2.0, 1.0],\n\n [1.0, 2.0, 3.0, 4.0],\n\n [5.0, 6.0, 7.0, 8.0],\n\n );\n\n let r = ma * mb;\n\n assert_eq!(r.rx(), Vec4::new(39.0, 43.0, 47.0, 51.0));\n\n assert_eq!(r.ry(), Vec4::new(111.0, 115.0, 119.0, 123.0));\n\n assert_eq!(r.rz(), Vec4::new(123.0, 119.0, 115.0, 111.0));\n\n assert_eq!(r.rw(), Vec4::new(51.0, 47.0, 43.0, 39.0));\n\n }\n\n\n\n #[test]\n\n fn inverse() {\n\n let m = Mat4::orthographic(20.0, 20.0, 0.1, 50.0);\n\n let r = m * m.inverse().expect(\"no inverse\");\n\n assert_eq_delta!(r.x.x, 1.0);\n", "file_path": "src/math/mat4.rs", "rank": 66, "score": 40081.58886475132 }, { "content": " assert_eq_delta!(r.x, 0.0);\n\n assert_eq_delta!(r.y, 1.0);\n\n assert_eq_delta!(r.z, 0.0);\n\n }\n\n\n\n #[test]\n\n fn look_rotation_z() {\n\n let m = Mat4::look_rotation([0.0, 0.0, -1.0], Vec3::up());\n\n let r = m * Vec3::forward();\n\n assert_eq_delta!(r.x, 0.0);\n\n assert_eq_delta!(r.y, 0.0);\n\n assert_eq_delta!(r.z, -1.0);\n\n }\n\n\n\n #[test]\n\n fn euler_rotation_x() {\n\n let m = Mat4::euler_rotation(90.0, 0.0, 0.0);\n\n let v = Vec3::new(0.0, 0.0, 1.0);\n\n let r = m * v;\n\n assert_eq_delta!(r.x, 0.0);\n", "file_path": "src/math/mat4.rs", "rank": 67, "score": 40081.510836993715 }, { "content": " let v = Vec3::new(1.0, 1.0, 1.0);\n\n let r = m * v;\n\n assert_eq_delta!(r.x, 1.0);\n\n assert_eq_delta!(r.y, -1.0);\n\n assert_eq_delta!(r.z, -1.0);\n\n }\n\n\n\n #[test]\n\n fn look_rotation_x() {\n\n let m = Mat4::look_rotation(Vec3::new(1.0, 0.0, 0.0), Vec3::up());\n\n let r = m * Vec3::forward();\n\n assert_eq_delta!(r.x, -1.0);\n\n assert_eq_delta!(r.y, 0.0);\n\n assert_eq_delta!(r.z, 0.0);\n\n }\n\n\n\n #[test]\n\n fn look_rotation_y() {\n\n let m = Mat4::look_rotation([0.0, 1.0, 0.0], Vec3::forward());\n\n let r = m * Vec3::forward();\n", "file_path": "src/math/mat4.rs", "rank": 68, "score": 40081.43518309152 }, { "content": " assert_eq_delta!(r.y, 1.0);\n\n assert_eq_delta!(r.z, 0.0);\n\n }\n\n\n\n #[test]\n\n fn perspective() {\n\n let m = Mat4::perspective(90.0, 1.0, 0.0, 100.0);\n\n assert_eq!(m.rx(), Vec4::new(1.0, 0.0, 0.0, 0.0));\n\n assert_eq!(m.ry(), Vec4::new(0.0, 1.0, 0.0, 0.0));\n\n assert_eq!(m.rz(), Vec4::new(0.0, 0.0, 1.0, -0.0));\n\n assert_eq!(m.rw(), Vec4::new(0.0, 0.0, 1.0, 0.0));\n\n }\n\n\n\n #[test]\n\n fn orthographic() {\n\n let m = Mat4::orthographic(1.0, 1.0, 0.0, 1.0);\n\n assert_eq!(m.rx(), Vec4::new(2.0, 0.0, 0.0, 0.0));\n\n assert_eq!(m.ry(), Vec4::new(0.0, 2.0, 0.0, 0.0));\n\n assert_eq!(m.rz(), Vec4::new(0.0, 0.0, 1.0, -0.0));\n\n assert_eq!(m.rw(), Vec4::new(0.0, 0.0, 0.0, 1.0));\n", "file_path": "src/math/mat4.rs", "rank": 69, "score": 40081.13230143267 }, { "content": " [8.0, 7.0, 6.0, 5.0],\n\n [4.0, 3.0, 2.0, 1.0],\n\n );\n\n assert_eq!(m.x, Vec4::new(1.0, 5.0, 8.0, 4.0));\n\n assert_eq!(m.y, Vec4::new(2.0, 6.0, 7.0, 3.0));\n\n assert_eq!(m.z, Vec4::new(3.0, 7.0, 6.0, 2.0));\n\n assert_eq!(m.w, Vec4::new(4.0, 8.0, 5.0, 1.0));\n\n\n\n assert_eq!(m.rx(), Vec4::new(1.0, 2.0, 3.0, 4.0));\n\n assert_eq!(m.ry(), Vec4::new(5.0, 6.0, 7.0, 8.0));\n\n assert_eq!(m.rz(), Vec4::new(8.0, 7.0, 6.0, 5.0));\n\n assert_eq!(m.rw(), Vec4::new(4.0, 3.0, 2.0, 1.0));\n\n }\n\n\n\n #[test]\n\n fn identity() {\n\n let m = Mat4::identity();\n\n assert_eq!(m.rx(), Vec4::new(1.0, 0.0, 0.0, 0.0));\n\n assert_eq!(m.ry(), Vec4::new(0.0, 1.0, 0.0, 0.0));\n\n assert_eq!(m.rz(), Vec4::new(0.0, 0.0, 1.0, 0.0));\n", "file_path": "src/math/mat4.rs", "rank": 70, "score": 40079.90777875302 }, { "content": " /// with Z in range of [0; 1]\n\n pub fn perspective(fov: f32, aspect: f32, near: f32, far: f32) -> Self {\n\n let half_fov = (fov / 2.0).to_radians();\n\n let zoom_len = 1.0 / half_fov.tan();\n\n\n\n let x_scale = zoom_len / aspect;\n\n let y_scale = zoom_len;\n\n let z_scale = far / (far - near);\n\n let z_move = -(near * far) / (far - near);\n\n\n\n let copy = 1.0;\n\n\n\n Self::rows(\n\n [x_scale, 0.0, 0.0, 0.0],\n\n [0.0, y_scale, 0.0, 0.0],\n\n [0.0, 0.0, z_scale, z_move],\n\n [0.0, 0.0, copy, 0.0],\n\n )\n\n }\n\n\n", "file_path": "src/math/mat4.rs", "rank": 71, "score": 40079.58254112997 }, { "content": " let sin_g = x.to_radians().sin();\n\n let cos_g = x.to_radians().cos();\n\n\n\n let xx = cos_a * cos_b;\n\n let xy = cos_a * sin_b * sin_g - sin_a * cos_g;\n\n let xz = cos_a * sin_b * cos_g + sin_a * sin_g;\n\n\n\n let yx = sin_a * cos_b;\n\n let yy = sin_a * sin_b * sin_g + cos_a * cos_g;\n\n let yz = sin_a * sin_b * cos_g - cos_a * sin_g;\n\n\n\n let zx = -sin_b;\n\n let zy = cos_b * sin_g;\n\n let zz = cos_b * cos_g;\n\n\n\n Self::rows(\n\n [xx, xy, xz, 0.0],\n\n [yx, yy, yz, 0.0],\n\n [zx, zy, zz, 0.0],\n\n [0.0, 0.0, 0.0, 1.0],\n", "file_path": "src/math/mat4.rs", "rank": 72, "score": 40076.537404891256 }, { "content": " inv[10] = m[0] * m[5] * m[15] - m[0] * m[7] * m[13] - m[4] * m[1] * m[15]\n\n + m[4] * m[3] * m[13]\n\n + m[12] * m[1] * m[7]\n\n - m[12] * m[3] * m[5];\n\n\n\n inv[14] = -m[0] * m[5] * m[14] + m[0] * m[6] * m[13] + m[4] * m[1] * m[14]\n\n - m[4] * m[2] * m[13]\n\n - m[12] * m[1] * m[6]\n\n + m[12] * m[2] * m[5];\n\n\n\n inv[3] = -m[1] * m[6] * m[11] + m[1] * m[7] * m[10] + m[5] * m[2] * m[11]\n\n - m[5] * m[3] * m[10]\n\n - m[9] * m[2] * m[7]\n\n + m[9] * m[3] * m[6];\n\n\n\n inv[7] = m[0] * m[6] * m[11] - m[0] * m[7] * m[10] - m[4] * m[2] * m[11]\n\n + m[4] * m[3] * m[10]\n\n + m[8] * m[2] * m[7]\n\n - m[8] * m[3] * m[6];\n\n\n", "file_path": "src/math/mat4.rs", "rank": 73, "score": 40076.537404891256 }, { "content": " inv[11] = -m[0] * m[5] * m[11] + m[0] * m[7] * m[9] + m[4] * m[1] * m[11]\n\n - m[4] * m[3] * m[9]\n\n - m[8] * m[1] * m[7]\n\n + m[8] * m[3] * m[5];\n\n\n\n inv[15] = m[0] * m[5] * m[10] - m[0] * m[6] * m[9] - m[4] * m[1] * m[10]\n\n + m[4] * m[2] * m[9]\n\n + m[8] * m[1] * m[6]\n\n - m[8] * m[2] * m[5];\n\n\n\n let mut det = m[0] * inv[0] + m[1] * inv[4] + m[2] * inv[8] + m[3] * inv[12];\n\n\n\n if det == 0.0 {\n\n return None;\n\n }\n\n\n\n det = 1.0 / det;\n\n\n\n Some(Self::from(inv) * det)\n\n }\n", "file_path": "src/math/mat4.rs", "rank": 74, "score": 40076.537404891256 }, { "content": " inv[9] = -m[0] * m[9] * m[15] + m[0] * m[11] * m[13] + m[8] * m[1] * m[15]\n\n - m[8] * m[3] * m[13]\n\n - m[12] * m[1] * m[11]\n\n + m[12] * m[3] * m[9];\n\n\n\n inv[13] = m[0] * m[9] * m[14] - m[0] * m[10] * m[13] - m[8] * m[1] * m[14]\n\n + m[8] * m[2] * m[13]\n\n + m[12] * m[1] * m[10]\n\n - m[12] * m[2] * m[9];\n\n\n\n inv[2] = m[1] * m[6] * m[15] - m[1] * m[7] * m[14] - m[5] * m[2] * m[15]\n\n + m[5] * m[3] * m[14]\n\n + m[13] * m[2] * m[7]\n\n - m[13] * m[3] * m[6];\n\n\n\n inv[6] = -m[0] * m[6] * m[15] + m[0] * m[7] * m[14] + m[4] * m[2] * m[15]\n\n - m[4] * m[3] * m[14]\n\n - m[12] * m[2] * m[7]\n\n + m[12] * m[3] * m[6];\n\n\n", "file_path": "src/math/mat4.rs", "rank": 75, "score": 40076.537404891256 }, { "content": " inv[8] = m[4] * m[9] * m[15] - m[4] * m[11] * m[13] - m[8] * m[5] * m[15]\n\n + m[8] * m[7] * m[13]\n\n + m[12] * m[5] * m[11]\n\n - m[12] * m[7] * m[9];\n\n\n\n inv[12] = -m[4] * m[9] * m[14] + m[4] * m[10] * m[13] + m[8] * m[5] * m[14]\n\n - m[8] * m[6] * m[13]\n\n - m[12] * m[5] * m[10]\n\n + m[12] * m[6] * m[9];\n\n\n\n inv[1] = -m[1] * m[10] * m[15] + m[1] * m[11] * m[14] + m[9] * m[2] * m[15]\n\n - m[9] * m[3] * m[14]\n\n - m[13] * m[2] * m[11]\n\n + m[13] * m[3] * m[10];\n\n\n\n inv[5] = m[0] * m[10] * m[15] - m[0] * m[11] * m[14] - m[8] * m[2] * m[15]\n\n + m[8] * m[3] * m[14]\n\n + m[12] * m[2] * m[11]\n\n - m[12] * m[3] * m[10];\n\n\n", "file_path": "src/math/mat4.rs", "rank": 76, "score": 40076.537404891256 }, { "content": "fn create_plane(device: &Device) -> Mesh {\n\n let mut mesh = Mesh::new(device);\n\n\n\n mesh.vertices = vec![\n\n Vec3::new(-0.5, 0.5, 0.0),\n\n Vec3::new(0.5, 0.5, 0.0),\n\n Vec3::new(0.5, -0.5, 0.0),\n\n Vec3::new(-0.5, -0.5, 0.0),\n\n ];\n\n mesh.uvs = vec![\n\n Vec2::new(0.0, 1.0),\n\n Vec2::new(1.0, 1.0),\n\n Vec2::new(1.0, 0.0),\n\n Vec2::new(0.0, 0.0),\n\n ];\n\n mesh.indices = vec![0, 1, 2, 0, 2, 3];\n\n\n\n mesh.calculate_normals();\n\n mesh.update(device);\n\n mesh\n", "file_path": "src/resources/builtins.rs", "rank": 77, "score": 38818.45549031684 }, { "content": "fn create_surface(device: &Device) -> Mesh {\n\n let mut mesh = Mesh::new(device);\n\n\n\n mesh.vertices = vec![\n\n Vec3::new(-1.0, 1.0, 0.0),\n\n Vec3::new(1.0, 1.0, 0.0),\n\n Vec3::new(1.0, -1.0, 0.0),\n\n Vec3::new(-1.0, -1.0, 0.0),\n\n ];\n\n mesh.uvs = vec![\n\n Vec2::new(0.0, 0.0),\n\n Vec2::new(1.0, 0.0),\n\n Vec2::new(1.0, 1.0),\n\n Vec2::new(0.0, 1.0),\n\n ];\n\n mesh.indices = vec![0, 1, 2, 0, 2, 3];\n\n\n\n mesh.calculate_normals();\n\n mesh.update(device);\n\n mesh\n\n}\n\n\n", "file_path": "src/resources/builtins.rs", "rank": 78, "score": 38818.45549031684 }, { "content": "// Oliver Berzs\n\n// https://github.com/oberzs/duku\n\n\n\n// This example loads and draws glTF ship model\n\n\n\nuse duku::window::Orbit;\n\nuse duku::Camera;\n\nuse duku::Duku;\n\nuse duku::Light;\n\nuse duku::Result;\n\n\n", "file_path": "examples/3d-examples/ship_model.rs", "rank": 79, "score": 38087.70773129575 }, { "content": " // start window loop\n\n window.while_open(move |events| {\n\n // update camera based on controller\n\n orbit.update(&mut camera, events, duku.delta_time());\n\n\n\n // start drawing on window\n\n duku.draw(Some(&camera), |t| {\n\n // setup scene\n\n t.light(light);\n\n t.ambient(\"#E6EDFF\", 0.03);\n\n t.debug_grid();\n\n\n\n // transform scene to\n\n // make ship in the center\n\n t.rotate_y(180.0);\n\n t.translate_x(4.0);\n\n\n\n // draw ship model\n\n t.model(&ship);\n\n });\n\n });\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/3d-examples/ship_model.rs", "rank": 80, "score": 38082.398890440025 }, { "content": "fn error(s: impl AsRef<str>, no_color: bool) -> ! {\n\n if no_color {\n\n eprintln!(\"error: {}\", s.as_ref());\n\n } else {\n\n eprintln!(\"\\x1b[91merror\\x1b[0m: {}\", s.as_ref());\n\n }\n\n process::exit(1);\n\n}\n\n\n", "file_path": "src/dc.rs", "rank": 81, "score": 36407.27837133016 }, { "content": "pub fn fira_mono() -> FontData<'static> {\n\n let mut font_data = FontData {\n\n height: 1.0,\n\n line_gap: 0.0,\n\n ascender: 0.77916664,\n\n descender: -0.22083332,\n\n char_data: HashMap::new(),\n\n texture_data: DATA,\n\n texture_width: 970,\n\n texture_height: 23,\n\n };\n\n\n\n font_data.char_data.insert(\n\n '!',\n\n CharData {\n\n uvs: Vec4::new(0.0010309279, 0.04347826, 0.0051546395, 0.6956522),\n\n bounds: Vec2::new(0.16666667, 0.625),\n\n bearing: Vec2::new(0.20833333, 0.041666668),\n\n advance: 0.5,\n\n },\n", "file_path": "src/font/fira_mono.rs", "rank": 82, "score": 36389.49540923765 }, { "content": "fn load_node(\n\n node: &Node<'_>,\n\n meshes: &HashMap<(usize, usize), Handle<Mesh>>,\n\n materials: &HashMap<usize, Handle<Material>>,\n\n) -> Result<ModelNode> {\n\n // get transform matrix\n\n let matrix = {\n\n let (t, r, s) = node.transform().decomposed();\n\n Mat4::translation([t[0], t[1], -t[2]])\n\n * Mat4::from(Quat::new(r[0], r[1], -r[2], -r[3]))\n\n * Mat4::scale(s)\n\n };\n\n\n\n // get mesh and material\n\n let mut ms = vec![];\n\n let mut mats = vec![];\n\n if let Some(mesh) = node.mesh() {\n\n for primitive in mesh.primitives() {\n\n let m = meshes\n\n .get(&(mesh.index(), primitive.index()))\n", "file_path": "src/features/gltf.rs", "rank": 83, "score": 36222.10697432249 }, { "content": "fn main() -> Result<()> {\n\n // create duku context and window\n\n let (mut duku, window) = Duku::windowed(500, 500)?;\n\n\n\n // create 3D camera with 90 fov and move it\n\n let mut camera = Camera::perspective(90);\n\n camera.move_by([8.0, 8.0, -8.0]);\n\n\n\n // create basic orbit mode camera controller\n\n let mut orbit = Orbit::new([0.0, 0.0, 0.0]);\n\n\n\n // load glTF ship model and fix it's color space\n\n // cause it was exported incorrectly\n\n let ship = duku.create_model_gltf(\"examples/models/ship_dark.gltf\", None)?;\n\n ship.write().fix_color_space();\n\n\n\n // create directional light\n\n let mut light = Light::directional(\"#FFFAEB\", [-0.7, -0.5, 1.0]);\n\n light.brightness = 2.0;\n\n\n", "file_path": "examples/3d-examples/ship_model.rs", "rank": 84, "score": 34470.36661933751 }, { "content": "fn record_meshes(cmd: &Commands, uniforms: &Uniforms, orders: Vec<ShaderOrder>) {\n\n for s_order in orders {\n\n // bind shader\n\n cmd.bind_shader(&s_order.shader.read());\n\n\n\n for m_order in &s_order.orders {\n\n // bind material\n\n cmd.bind_material(uniforms, &m_order.material.read());\n\n\n\n for order in &m_order.orders {\n\n cmd.push_constants(\n\n uniforms,\n\n ShaderConstants {\n\n local_to_world: order.matrix,\n\n tint_color: Rgbf::from(order.color).into(),\n\n sampler_index: order.sampler_index,\n\n },\n\n );\n\n {\n\n let m = order.mesh.read();\n\n cmd.bind_mesh(&m);\n\n cmd.draw(m.index_count(), 0);\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/renderer/forward.rs", "rank": 85, "score": 25841.83657302589 }, { "content": "fn clear<T>(handles: &mut Vec<Handle<T>>, mut clear_fn: impl FnMut(&T)) {\n\n for h in handles.iter() {\n\n clear_fn(&h.read());\n\n }\n\n handles.clear();\n\n}\n", "file_path": "src/resources/mod.rs", "rank": 86, "score": 25813.50142648857 }, { "content": "fn clear_unused<T>(handles: &mut Vec<Handle<T>>, mut clear_fn: impl FnMut(&T)) {\n\n handles.retain(|h| {\n\n if h.count() == 0 {\n\n clear_fn(&h.read());\n\n false\n\n } else {\n\n true\n\n }\n\n });\n\n}\n\n\n", "file_path": "src/resources/mod.rs", "rank": 87, "score": 25128.51619238417 }, { "content": "fn create_rectangle<V: Into<Vec3>>(device: &Device, p1: V, p2: V, p3: V, p4: V) -> Mesh {\n\n let mut mesh = Mesh::new(device);\n\n\n\n mesh.vertices = vec![p1.into(), p2.into(), p3.into(), p4.into()];\n\n mesh.uvs = vec![\n\n Vec2::new(0.0, 0.0),\n\n Vec2::new(1.0, 0.0),\n\n Vec2::new(1.0, 1.0),\n\n Vec2::new(0.0, 1.0),\n\n ];\n\n mesh.indices = vec![0, 1, 2, 0, 2, 3];\n\n mesh.calculate_normals();\n\n mesh.update(device);\n\n\n\n mesh\n\n}\n\n\n\npub(crate) fn create_ico_sphere(device: &Device, detail_level: u32) -> Mesh {\n\n let mut vertices = vec![];\n\n let mut indices = vec![];\n", "file_path": "src/resources/builtins.rs", "rank": 88, "score": 24515.72092187506 }, { "content": " self.shape_mode = cache.shape_mode;\n\n self.border_mode = cache.border_mode;\n\n self.filter = cache.filter;\n\n self.wrap = cache.wrap;\n\n }\n\n }\n\n\n\n fn model_node(&mut self, node: &ModelNode, parent: Mat4) {\n\n self.matrix = parent * node.matrix;\n\n // self.matrix = node.matrix * parent;\n\n\n\n for (mesh, material) in node.orders() {\n\n self.material(material);\n\n self.mesh(mesh);\n\n }\n\n\n\n for child in &node.children {\n\n self.push();\n\n self.model_node(child, self.matrix);\n\n self.pop();\n", "file_path": "src/renderer/target.rs", "rank": 89, "score": 35.20862985469449 }, { "content": "pub use image::CubemapSides;\n\npub use image::Filter;\n\npub use image::Format;\n\npub use image::Mips;\n\npub use image::Msaa;\n\npub use image::Texture;\n\npub use image::Wrap;\n\npub use math::Mat4;\n\npub use math::Quat;\n\npub use math::Vec2;\n\npub use math::Vec3;\n\npub use math::Vec4;\n\npub use mesh::Mesh;\n\npub use mesh::Model;\n\npub use mesh::ModelNode;\n\npub use pipeline::Material;\n\npub use pipeline::Shader;\n\npub use renderer::BorderMode;\n\npub use renderer::Camera;\n\npub use renderer::Light;\n", "file_path": "src/lib.rs", "rank": 90, "score": 29.656789141660827 }, { "content": " .cloned()\n\n .ok_or(Error::InvalidGltf)?;\n\n let mat = materials\n\n .get(&primitive.material().index().unwrap_or(0))\n\n .cloned()\n\n .ok_or(Error::InvalidGltf)?;\n\n ms.push(m);\n\n mats.push(mat);\n\n }\n\n }\n\n\n\n // get children\n\n let children: Vec<_> = node\n\n .children()\n\n .map(|n| load_node(&n, meshes, materials))\n\n .collect::<Result<_>>()?;\n\n Ok(ModelNode {\n\n meshes: ms,\n\n materials: mats,\n\n children,\n\n matrix,\n\n })\n\n}\n", "file_path": "src/features/gltf.rs", "rank": 91, "score": 29.076596195196455 }, { "content": "use crate::image::ColorSpace;\n\nuse crate::image::Mips;\n\nuse crate::image::Texture;\n\nuse crate::math::Mat4;\n\nuse crate::math::Quat;\n\nuse crate::math::Vec2;\n\nuse crate::math::Vec3;\n\nuse crate::mesh::Mesh;\n\nuse crate::mesh::Model;\n\nuse crate::mesh::ModelNode;\n\nuse crate::pipeline::Material;\n\nuse crate::resources::Handle;\n\n\n\n/// Y axis for the gltf model\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]\n\npub enum YAxis {\n\n /// +Y is up\n\n Up,\n\n /// +Y is down\n\n Down,\n", "file_path": "src/features/gltf.rs", "rank": 92, "score": 28.442072782818983 }, { "content": "pub(crate) use builtins::Builtins;\n\npub use handle::Handle;\n\npub use handle::ReadGuard;\n\npub use handle::WriteGuard;\n\n\n\n#[derive(Default)]\n\npub(crate) struct Resources {\n\n shaders: Vec<Handle<Shader>>,\n\n fonts: Vec<Handle<Font>>,\n\n textures: Vec<Handle<Texture>>,\n\n cubemaps: Vec<Handle<Cubemap>>,\n\n canvases: Vec<Handle<Canvas>>,\n\n materials: Vec<Handle<Material>>,\n\n meshes: Vec<Handle<Mesh>>,\n\n models: Vec<Handle<Model>>,\n\n}\n\n\n\nimpl Resources {\n\n pub(crate) fn add_shader(&mut self, value: Shader) -> Handle<Shader> {\n\n add(&mut self.shaders, value)\n", "file_path": "src/resources/mod.rs", "rank": 93, "score": 25.82015231077812 }, { "content": " pub fn create_mesh_sphere_uv(&mut self, meridians: u32, parallels: u32) -> Handle<Mesh> {\n\n self.resources.add_mesh(resources::create_uv_sphere(\n\n &self.device,\n\n meridians,\n\n parallels,\n\n ))\n\n }\n\n\n\n /// Create a model\n\n pub fn create_model(&mut self) -> Handle<Model> {\n\n let model = Model { nodes: vec![] };\n\n self.resources.add_model(model)\n\n }\n\n\n\n /// Create a material\n\n pub fn create_material(&mut self) -> Result<Handle<Material>> {\n\n let mat = Material::new(&self.device, &mut self.uniforms)?;\n\n Ok(self.resources.add_material(mat))\n\n }\n\n\n", "file_path": "src/duku.rs", "rank": 94, "score": 24.674464837134344 }, { "content": " self.pop();\n\n }\n\n\n\n /// Draw a scaled cube\n\n pub fn cube(&mut self, scale: impl Into<Vec3>) {\n\n self.push();\n\n self.matrix *= Mat4::scale(scale);\n\n let mesh = self.builtins.cube_mesh.clone();\n\n self.mesh(&mesh);\n\n self.pop();\n\n }\n\n\n\n /// Draw a scaled uv sphere\n\n pub fn sphere_uv(&mut self, scale: impl Into<Vec3>) {\n\n self.push();\n\n self.matrix *= Mat4::scale(scale);\n\n let mesh = self.builtins.uv_sphere_mesh.clone();\n\n self.mesh(&mesh);\n\n self.pop();\n\n }\n", "file_path": "src/renderer/target.rs", "rank": 95, "score": 24.44716188648035 }, { "content": " /// Set tint color for meshes and textures\n\n pub fn tint(&mut self, color: impl Into<Rgb>) {\n\n self.tint = color.into();\n\n }\n\n\n\n /// Set material for meshes\n\n pub fn material(&mut self, m: &Handle<Material>) {\n\n self.material = Some(m.clone());\n\n }\n\n\n\n /// Use default material for meshes\n\n pub fn no_material(&mut self) {\n\n self.material = None;\n\n }\n\n\n\n /// Set shader for meshes\n\n pub fn shader(&mut self, s: &Handle<Shader>) {\n\n self.shader = Some(s.clone());\n\n }\n\n\n", "file_path": "src/renderer/target.rs", "rank": 96, "score": 24.194560274597112 }, { "content": "\n\n /// Draw a scaled ico sphere\n\n pub fn sphere_ico(&mut self, scale: impl Into<Vec3>) {\n\n self.push();\n\n self.matrix *= Mat4::scale(scale);\n\n let mesh = self.builtins.ico_sphere_mesh.clone();\n\n self.mesh(&mesh);\n\n self.pop();\n\n }\n\n\n\n /// Draw a scaled plane\n\n pub fn plane(&mut self, scale: impl Into<Vec2>) {\n\n self.push();\n\n self.matrix *= Mat4::scale(Vec3::from((scale.into(), 1.0)));\n\n let mesh = self.builtins.plane_mesh.clone();\n\n self.mesh(&mesh);\n\n self.pop();\n\n }\n\n\n\n /// Draw a surface with a custom shader\n", "file_path": "src/renderer/target.rs", "rank": 97, "score": 24.05764270383532 }, { "content": "use crate::mesh::ModelNode;\n\nuse crate::pipeline::Material;\n\nuse crate::pipeline::Shader;\n\nuse crate::resources::Builtins;\n\nuse crate::resources::Handle;\n\n\n\n/// Active render target.\n\n///\n\n/// Records rendering commands and settings.\n\n/// Received from the [draw](crate::duku::Duku::draw) or\n\n/// [draw_on_canvas](crate::duku::Duku::draw_on_canvas)\n\n/// functions.\n\npub struct Target {\n\n // colors\n\n fill: Rgb,\n\n stroke: Rgb,\n\n tint: Rgb,\n\n pub(crate) background: Rgb,\n\n\n\n // shadows\n", "file_path": "src/renderer/target.rs", "rank": 98, "score": 22.881271333708167 }, { "content": "use crate::pipeline::Material;\n\nuse crate::pipeline::Shader;\n\nuse crate::pipeline::Uniforms;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Builtins {\n\n // textures\n\n pub white_texture: Handle<Texture>,\n\n pub black_texture: Handle<Texture>,\n\n\n\n // cubemaps\n\n pub white_cubemap: Handle<Cubemap>,\n\n\n\n // materials\n\n pub white_material: Handle<Material>,\n\n\n\n // meshes\n\n pub surface_mesh: Handle<Mesh>,\n\n pub plane_mesh: Handle<Mesh>,\n\n pub cube_mesh: Handle<Mesh>,\n", "file_path": "src/resources/builtins.rs", "rank": 99, "score": 21.944595394685066 } ]
Rust
src/socket.rs
caizixian/streamdeck-rs
3848d18fa4bf48b95ccb77c6cf689d422145509e
use super::{Message, MessageOut}; use failure::Fail; use futures::prelude::*; use serde::{de, ser}; use serde_derive::Serialize; use std::marker::PhantomData; use std::pin::Pin; use std::task::{Context, Poll}; use tokio::net::TcpStream; use tokio_tungstenite::{self, WebSocketStream, MaybeTlsStream}; use url::Url; pub struct StreamDeckSocket<G, S, MI, MO> { inner: WebSocketStream<MaybeTlsStream<TcpStream>>, _g: PhantomData<G>, _s: PhantomData<S>, _mi: PhantomData<MI>, _mo: PhantomData<MO>, } impl<G, S, MI, MO> StreamDeckSocket<G, S, MI, MO> { pub async fn connect<A: Into<Address>>( address: A, event: String, uuid: String, ) -> Result<Self, ConnectError> { let address = address.into(); let (mut stream, _) = tokio_tungstenite::connect_async(address.url) .await .map_err(ConnectError::ConnectionError)?; let message = serde_json::to_string(&Registration { event: &event, uuid: &uuid, }) .unwrap(); stream .send(tungstenite::Message::Text(message)) .await .map_err(ConnectError::SendError)?; Ok(StreamDeckSocket { inner: stream, _g: PhantomData, _s: PhantomData, _mi: PhantomData, _mo: PhantomData, }) } fn pin_get_inner(self: Pin<&mut Self>) -> Pin<&mut WebSocketStream<MaybeTlsStream<TcpStream>>> { unsafe { self.map_unchecked_mut(|s| &mut s.inner) } } } #[derive(Debug, Fail)] pub enum StreamDeckSocketError { #[fail(display = "WebSocket error")] WebSocketError(#[fail(cause)] tungstenite::error::Error), #[fail(display = "Bad message")] BadMessage(#[fail(cause)] serde_json::Error), } impl<G, S, MI, MO> Stream for StreamDeckSocket<G, S, MI, MO> where G: de::DeserializeOwned, S: de::DeserializeOwned, MI: de::DeserializeOwned, { type Item = Result<Message<G, S, MI>, StreamDeckSocketError>; fn poll_next(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> { let mut inner = self.pin_get_inner(); loop { match inner.as_mut().poll_next(cx) { Poll::Ready(Some(Ok(tungstenite::Message::Text(message)))) => { break match serde_json::from_str(&message) { Ok(message) => Poll::Ready(Some(Ok(message))), Err(error) => { Poll::Ready(Some(Err(StreamDeckSocketError::BadMessage(error)))) } }; } Poll::Ready(Some(Ok(_))) => {} Poll::Ready(Some(Err(error))) => { break Poll::Ready(Some(Err(StreamDeckSocketError::WebSocketError(error)))) } Poll::Ready(None) => break Poll::Ready(None), Poll::Pending => break Poll::Pending, } } } } impl<G, S, MI, MO> Sink<MessageOut<G, S, MO>> for StreamDeckSocket<G, S, MI, MO> where G: ser::Serialize, S: ser::Serialize, MO: ser::Serialize, { type Error = StreamDeckSocketError; fn poll_ready(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> { self.pin_get_inner() .poll_ready(cx) .map_err(StreamDeckSocketError::WebSocketError) } fn start_send(self: Pin<&mut Self>, item: MessageOut<G, S, MO>) -> Result<(), Self::Error> { let message = serde_json::to_string(&item).map_err(StreamDeckSocketError::BadMessage)?; self.pin_get_inner() .start_send(tungstenite::Message::Text(message)) .map_err(StreamDeckSocketError::WebSocketError) } fn poll_flush(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> { self.pin_get_inner() .poll_flush(cx) .map_err(StreamDeckSocketError::WebSocketError) } fn poll_close(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> { self.pin_get_inner() .poll_close(cx) .map_err(StreamDeckSocketError::WebSocketError) } } pub struct Address { pub url: Url, } impl From<Url> for Address { fn from(value: Url) -> Self { Address { url: value } } } impl From<u16> for Address { fn from(value: u16) -> Self { let mut url = Url::parse("ws://localhost").unwrap(); url.set_port(Some(value)).unwrap(); Address { url } } } #[derive(Debug, Fail)] pub enum ConnectError { #[fail(display = "Websocket connection error")] ConnectionError(#[fail(cause)] tungstenite::error::Error), #[fail(display = "Send error")] SendError(#[fail(cause)] tungstenite::error::Error), } #[derive(Serialize)] struct Registration<'a> { event: &'a str, uuid: &'a str, }
use super::{Message, MessageOut}; use failure::Fail; use futures::prelude::*; use serde::{de, ser}; use serde_derive::Serialize; use std::marker::PhantomData; use std::pin::Pin; use std::task::{Context, Poll}; use tokio::net::TcpStream; use tokio_tungstenite::{self, WebSocketStream, MaybeTlsStream}; use url::Url; pub struct StreamDeckSocket<G, S, MI, MO> { inner: WebSocketStream<MaybeTlsStream<TcpStream>>, _g: PhantomData<G>, _s: PhantomData<S>, _mi: PhantomData<MI>, _mo: PhantomData<MO>, } impl<G, S, MI, MO> StreamDeckSocket<G, S, MI, MO> { pub async fn connect<A: Into<Address>>( address: A, event: String, uuid: String, ) -> Result<Self, ConnectError> { let address = address.into(); let (mut stream, _) = tokio_tungstenite::connect_async(address.url) .await .map_err(ConnectError::ConnectionError)?; let message = serde_json::to_string(&Registration { event: &event, uuid: &uuid, }) .unwrap(); stream .send(tungstenite::Message::Text(message)) .await .map_err(ConnectError::SendError)?; Ok(StreamDeckSocket { inner: stream, _g: PhantomData, _s: PhantomData, _mi: PhantomData, _mo: PhantomData, }) } fn pin_get_inner(self: Pin<&mut Self>) -> Pin<&mut WebSocketStream<MaybeTlsStream<TcpStream>>> { unsafe { self.map_unchecked_mut(|s| &mut s.inner) } } } #[derive(Debug, Fail)] pub enum StreamDeckSocketError { #[fail(display = "WebSocket error")] WebSocketError(#[fail(cause)] tungstenite::error::Error), #[fail(display = "Bad message")] BadMessage(#[fail(cause)] serde_json::Error), } impl<G, S, MI, MO> Stream for StreamDeckSocket<G, S, MI, MO> where G: de::DeserializeOwned, S: de::DeserializeOwned, MI: de::DeserializeOwned, { type Item = Result<Message<G, S, MI>, StreamDeckSocketError>; fn poll_next(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> { let mut inner = self.pin_get_inner(); loop { match inner.as_mut().poll_next(cx) { Poll::Ready(Some(Ok(tungstenite::Message::Text(message)))) => { break match serde_json::from_str(&message) { Ok(message) => Poll::Ready(Some(Ok(message))), Err(error) => { Poll::Ready(Some(Err(StreamDeckSocketError::BadMessage(error)))) } }; } Poll::Ready(Some(Ok(_))) => {} Poll::Ready(Some(Err
} Poll::Ready(None) => break Poll::Ready(None), Poll::Pending => break Poll::Pending, } } } } impl<G, S, MI, MO> Sink<MessageOut<G, S, MO>> for StreamDeckSocket<G, S, MI, MO> where G: ser::Serialize, S: ser::Serialize, MO: ser::Serialize, { type Error = StreamDeckSocketError; fn poll_ready(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> { self.pin_get_inner() .poll_ready(cx) .map_err(StreamDeckSocketError::WebSocketError) } fn start_send(self: Pin<&mut Self>, item: MessageOut<G, S, MO>) -> Result<(), Self::Error> { let message = serde_json::to_string(&item).map_err(StreamDeckSocketError::BadMessage)?; self.pin_get_inner() .start_send(tungstenite::Message::Text(message)) .map_err(StreamDeckSocketError::WebSocketError) } fn poll_flush(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> { self.pin_get_inner() .poll_flush(cx) .map_err(StreamDeckSocketError::WebSocketError) } fn poll_close(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> { self.pin_get_inner() .poll_close(cx) .map_err(StreamDeckSocketError::WebSocketError) } } pub struct Address { pub url: Url, } impl From<Url> for Address { fn from(value: Url) -> Self { Address { url: value } } } impl From<u16> for Address { fn from(value: u16) -> Self { let mut url = Url::parse("ws://localhost").unwrap(); url.set_port(Some(value)).unwrap(); Address { url } } } #[derive(Debug, Fail)] pub enum ConnectError { #[fail(display = "Websocket connection error")] ConnectionError(#[fail(cause)] tungstenite::error::Error), #[fail(display = "Send error")] SendError(#[fail(cause)] tungstenite::error::Error), } #[derive(Serialize)] struct Registration<'a> { event: &'a str, uuid: &'a str, }
(error))) => { break Poll::Ready(Some(Err(StreamDeckSocketError::WebSocketError(error))))
function_block-random_span
[ { "content": "struct Serializer {\n\n stack: Vec<String>,\n\n}\n\n\n\nimpl slog::Serializer for Serializer {\n\n fn emit_none(&mut self, key: Key) -> slog::Result {\n\n self.stack.push(format!(\"{}: None\", key));\n\n Ok(())\n\n }\n\n fn emit_unit(&mut self, key: Key) -> slog::Result {\n\n self.stack.push(format!(\"{}: ()\", key));\n\n Ok(())\n\n }\n\n fn emit_bool(&mut self, key: Key, val: bool) -> slog::Result {\n\n self.stack.push(format!(\"{}: {}\", key, val));\n\n Ok(())\n\n }\n\n fn emit_char(&mut self, key: Key, val: char) -> slog::Result {\n\n self.stack.push(format!(\"{}: {}\", key, val));\n\n Ok(())\n", "file_path": "src/logging.rs", "rank": 0, "score": 20617.52926531425 }, { "content": "# Changelog\n\nAll notable changes to this project will be documented in this file.\n\n\n\nThe format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)\n\nand this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).\n\n\n\n## [Unreleased]\n\n\n\n## [0.5.0] - 2020-05-16\n\n### Changed\n\n- Now compatible with futures 0.3.\n\n\n\n## [0.4.0] - 2019-08-24\n\n### Fixed\n\n- `VisibilityPayload<S>::State` and `KeyPayload<S>::State` are now `Option<u8>` for compatibility with plugins that don't have states. [#3](https://github.com/mdonoughe/streamdeck-rs/issues/3)\n\n\n\n## [0.3.0] - 2019-06-15\n\n### Added\n\n- New events and properties from Stream Deck software version 4.3.\n\n\n\n### Changed\n\n- Unrecognized events are now reported as `Message::Unknown` instead of an error.\n\n\n\n## [0.2.0] - 2019-03-10\n\n### Added\n\n- Logging support via Slog.\n\n\n\n### Changed\n\n- Now for SDK version 2. You must update the plugin manifest file. See https://developer.elgato.com/documentation/stream-deck/sdk/changelog/#changes-in-stream-deck-41\n\n\n\n## 0.1.0 - 2019-01-14\n\n### Added\n\n- Command line parsing.\n\n- Protocol for registration and message handling.\n\n\n\n[Unreleased]: https://github.com/mdonoughe/streamdeck-rs/compare/v0.5.0...HEAD\n\n[0.5.0]: https://github.com/mdonoughe/streamdeck-rs/compare/v0.4.0...v0.5.0\n\n[0.4.0]: https://github.com/mdonoughe/streamdeck-rs/compare/v0.3.0...v0.4.0\n\n[0.3.0]: https://github.com/mdonoughe/streamdeck-rs/compare/v0.2.0...v0.3.0\n\n[0.2.0]: https://github.com/mdonoughe/streamdeck-rs/compare/v0.1.0...v0.2.0\n", "file_path": "CHANGELOG.md", "rank": 2, "score": 9326.883773984688 }, { "content": "# streamdeck-rs\n\n\n\n> Unofficial [Stream Deck](https://www.elgato.com/en/gaming/stream-deck) SDK for Rust\n\n\n\n[![Crates.io](https://img.shields.io/crates/v/streamdeck-rs.svg)](https://crates.io/crates/streamdeck-rs) ![License](https://img.shields.io/badge/license-MIT%2FApache--2.0-blue.svg) [![Build status](https://travis-ci.org/mdonoughe/streamdeck-rs.svg)](https://travis-ci.org/mdonoughe/streamdeck-rs/) [![Docs.rs](https://docs.rs/streamdeck-rs/badge.svg)](https://docs.rs/streamdeck-rs)\n\n\n\nElgato's official [Stream Deck SDK](https://developer.elgato.com/documentation/stream-deck/sdk/overview/) works by launching plugins in their own processes and communicating via web sockets. This library provides the command line argument parsing and basic protocol details for creating a plugin using Rust.\n\n\n\nThis library is pretty basic for now. In the future it could provide a framework for instancing actions (keys) and routing messages to the appropriate instances.\n\n\n\n## Usage\n\n\n\n1. Create a binary executable project.\n\n2. Use `RegistrationParams` to get the information required to use `StreamDeckSocket`.\n\n3. See [the official documentation](https://developer.elgato.com/documentation/stream-deck/sdk/overview/) for information about [creating manifests](https://developer.elgato.com/documentation/stream-deck/sdk/manifest/) and [loading your plugin](https://developer.elgato.com/documentation/stream-deck/sdk/create-your-own-plugin/).\n", "file_path": "README.md", "rank": 3, "score": 9326.383432365603 }, { "content": " /// ```\n\n pub fn from_args<I: IntoIterator<Item = String>>(\n\n args: I,\n\n ) -> Result<RegistrationParams, RegistrationParamsError> {\n\n let mut iter = args.into_iter();\n\n let mut port = None;\n\n let mut uuid = None;\n\n let mut event = None;\n\n let mut info = None;\n\n\n\n loop {\n\n match iter.next().as_deref() {\n\n Some(\"-port\") => port = iter.next().map(|a| u16::from_str(&a)),\n\n Some(\"-pluginUUID\") => uuid = iter.next(),\n\n Some(\"-registerEvent\") => event = iter.next(),\n\n Some(\"-info\") => info = iter.next().map(|a| serde_json::from_str(&a)),\n\n Some(_) => {}\n\n None => break,\n\n }\n\n }\n", "file_path": "src/registration.rs", "rank": 11, "score": 20.64053575964615 }, { "content": " /// The web socket port listening for the plugin.\n\n pub port: u16,\n\n /// The uuid of the plugin.\n\n pub uuid: String,\n\n /// The event the plugin should send to register with the Stream Deck software.\n\n pub event: String,\n\n /// Information about the environment the plugin is being loaded into.\n\n pub info: RegistrationInfo,\n\n}\n\n\n\n/// An error that occurred while collecting the registration parameters.\n\n#[derive(Debug, Fail)]\n\npub enum RegistrationParamsError {\n\n /// The port number was not found.\n\n #[fail(display = \"port not provided\")]\n\n NoPort,\n\n /// The port number was found but could not be parsed.\n\n #[fail(display = \"port could not be parsed\")]\n\n BadPort(#[fail(cause)] std::num::ParseIntError),\n\n /// The uuid was not found.\n", "file_path": "src/registration.rs", "rank": 14, "score": 17.023734457992248 }, { "content": "#[cfg(feature = \"logging\")]\n\npub mod logging;\n\npub mod registration;\n\npub mod socket;\n\n\n\npub use crate::registration::RegistrationInfo;\n\npub use crate::socket::StreamDeckSocket;\n\n\n\nuse serde::{de, ser};\n\nuse serde_derive::{Deserialize, Serialize};\n\nuse std::fmt;\n\n\n\n/// A message received from the Stream Deck software.\n\n///\n\n/// - `G` represents the global settings that are persisted within the Stream Deck software.\n\n/// - `S` represents the settings that are persisted within the Stream Deck software.\n\n/// - `M` represents the messages that are received from the property inspector.\n\n///\n\n/// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-received/)\n\n#[derive(Debug, Deserialize)]\n", "file_path": "src/lib.rs", "rank": 15, "score": 15.569200665196862 }, { "content": "#[serde(tag = \"event\", rename_all = \"camelCase\")]\n\npub enum Message<G, S, M> {\n\n /// A key has been pressed.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-received/#keydown)\n\n #[serde(rename_all = \"camelCase\")]\n\n KeyDown {\n\n /// The uuid of the action.\n\n action: String,\n\n /// The instance of the action (key or part of a multiaction).\n\n context: String,\n\n /// The device where the key was pressed.\n\n device: String,\n\n /// Additional information about the key press.\n\n payload: KeyPayload<S>,\n\n },\n\n /// A key has been released.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-received/#keyup)\n\n #[serde(rename_all = \"camelCase\")]\n", "file_path": "src/lib.rs", "rank": 16, "score": 15.175764089022058 }, { "content": "use crate::{LogMessagePayload, MessageOut};\n\nuse futures::channel::mpsc;\n\nuse slog::{Drain, Key, OwnedKVList, Record, KV};\n\nuse std::fmt::{self, Write};\n\nuse std::sync::Mutex;\n\n\n\npub struct StreamDeckDrain<G, S, M> {\n\n sink: Mutex<mpsc::UnboundedSender<MessageOut<G, S, M>>>,\n\n}\n\n\n\nimpl<G, S, M> StreamDeckDrain<G, S, M> {\n\n pub fn new(sink: mpsc::UnboundedSender<MessageOut<G, S, M>>) -> Self {\n\n Self {\n\n sink: Mutex::new(sink),\n\n }\n\n }\n\n}\n\n\n\nimpl<G, S, M> Drain for StreamDeckDrain<G, S, M> {\n\n type Ok = ();\n", "file_path": "src/logging.rs", "rank": 17, "score": 14.40042127666585 }, { "content": "use super::{DeviceSize, DeviceType};\n\nuse failure::Fail;\n\nuse serde::de;\n\nuse serde_derive::Deserialize;\n\nuse std::fmt;\n\nuse std::str::FromStr;\n\n\n\n/// Information about a connected device.\n\n///\n\n/// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/registration-procedure/#info-parameter)\n\n#[derive(Deserialize)]\n\npub struct RegistrationInfoDevice {\n\n /// The ID of the specific device.\n\n pub id: String,\n\n /// The user-specified name of the device.\n\n ///\n\n /// Added in Stream Deck software version 4.3.\n\n pub name: Option<String>,\n\n /// The size of the device.\n\n pub size: DeviceSize,\n", "file_path": "src/registration.rs", "rank": 18, "score": 14.047362835336147 }, { "content": " type Err = mpsc::TrySendError<MessageOut<G, S, M>>;\n\n\n\n fn log(&self, record: &Record, values: &OwnedKVList) -> Result<Self::Ok, Self::Err> {\n\n let mut message = format!(\"{} {}\", record.level().as_short_str(), record.msg());\n\n\n\n let mut serializer = Serializer { stack: Vec::new() };\n\n record.kv().serialize(record, &mut serializer).unwrap();\n\n values.serialize(record, &mut serializer).unwrap();\n\n\n\n let kv_len = serializer.stack.iter().fold(0, |a, b| a + b.len() + 2);\n\n message.reserve_exact(kv_len);\n\n while let Some(value) = serializer.stack.pop() {\n\n write!(message, \", {}\", value).unwrap()\n\n }\n\n\n\n self.sink\n\n .lock()\n\n .unwrap()\n\n .unbounded_send(MessageOut::LogMessage {\n\n payload: LogMessagePayload { message },\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/logging.rs", "rank": 19, "score": 13.31218171208737 }, { "content": " #[fail(display = \"uuid not provided\")]\n\n NoUuid,\n\n /// The registration event to send was not found.\n\n #[fail(display = \"event not provided\")]\n\n NoEvent,\n\n /// The registration environment info was not found.\n\n #[fail(display = \"info not provided\")]\n\n NoInfo,\n\n /// The registration environment info could not be parsed.\n\n #[fail(display = \"info could not be parsed\")]\n\n BadInfo(#[fail(cause)] serde_json::Error),\n\n}\n\n\n\nimpl RegistrationParams {\n\n /// Pull the registration parameters out of a command line.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// RegistrationParams::from_args(env::args())\n", "file_path": "src/registration.rs", "rank": 20, "score": 12.933642577025617 }, { "content": " },\n\n /// Send data to the property inspector.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-sent/#sendtopropertyinspector)\n\n #[serde(rename_all = \"camelCase\")]\n\n SendToPropertyInspector {\n\n /// The uuid of the action.\n\n action: String,\n\n /// The instance of the action (key or part of a multiaction).\n\n context: String,\n\n /// The message to send.\n\n payload: M,\n\n },\n\n /// Select a new profile.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-sent/#switchtoprofile)\n\n #[serde(rename_all = \"camelCase\")]\n\n SwitchToProfile {\n\n /// The instance of the action (key or part of a multiaction).\n\n context: String,\n", "file_path": "src/lib.rs", "rank": 21, "score": 12.76899040600008 }, { "content": " },\n\n /// Store plugin settings.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-sent/#setglobalsettings)\n\n #[serde(rename_all = \"camelCase\")]\n\n SetGlobalSettings {\n\n /// The instance of the action (key or part of a multiaction).\n\n context: String,\n\n /// The settings to save.\n\n payload: G,\n\n },\n\n /// Write to the log.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-sent/#logmessage)\n\n #[serde(rename_all = \"camelCase\")]\n\n LogMessage {\n\n /// The message to log.\n\n payload: LogMessagePayload,\n\n },\n\n}\n", "file_path": "src/lib.rs", "rank": 22, "score": 12.733076266691402 }, { "content": " /// newer than the plugin, and it should be safe to ignore these.\n\n #[serde(other)]\n\n Unknown,\n\n}\n\n\n\n/// A message to be sent to the Stream Deck software.\n\n///\n\n/// - `G` represents the global settings that are persisted within the Stream Deck software.\n\n/// - `S` represents the action settings that are persisted within the Stream Deck software.\n\n/// - `M` represents the messages that are sent to the property inspector.\n\n///\n\n/// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-sent/)\n\n#[derive(Debug, Serialize)]\n\n#[serde(tag = \"event\", rename_all = \"camelCase\")]\n\npub enum MessageOut<G, S, M> {\n\n /// Set the title of an action instance.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-sent/#settitle)\n\n #[serde(rename_all = \"camelCase\")]\n\n SetTitle {\n", "file_path": "src/lib.rs", "rank": 23, "score": 11.87596318232286 }, { "content": " pub title_parameters: TitleParameters,\n\n}\n\n\n\n/// The new global settings.\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct GlobalSettingsPayload<G> {\n\n /// The stored settings for the plugin.\n\n pub settings: G,\n\n}\n\n\n\n/// A log message.\n\n#[derive(Debug, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct LogMessagePayload {\n\n /// The log message text.\n\n pub message: String,\n\n}\n\n\n\n/// Information about a hardware device.\n", "file_path": "src/lib.rs", "rank": 24, "score": 11.707531705114906 }, { "content": "///\n\n/// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-sent/#SwitchToProfile)\n\n#[derive(Debug, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct ProfilePayload {\n\n /// The name of the profile to activate.\n\n pub profile: String,\n\n}\n\n\n\n/// The URL to launch as part of a [OpenUrl](enum.MessageOut.html#variant.OpenUrl) message.\n\n///\n\n/// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-sent/#openurl)\n\n#[derive(Debug, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct UrlPayload {\n\n /// The URL to launch.\n\n pub url: String,\n\n}\n\n\n\n/// Additional information about the key pressed.\n", "file_path": "src/lib.rs", "rank": 25, "score": 11.66784240809874 }, { "content": " /// This message is sent in response to GetSettings, but also after the\n\n /// property inspector changes the settings.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-received/#didreceivesettings)\n\n #[serde(rename_all = \"camelCase\")]\n\n DidReceiveSettings {\n\n /// The uuid of the action.\n\n action: String,\n\n /// The instance of the action (key or part of a multiaction).\n\n context: String,\n\n /// The device where the action exists.\n\n device: String,\n\n /// The current settings for the action.\n\n payload: KeyPayload<S>,\n\n },\n\n /// The property inspector for an action has become visible.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-received/#propertyinspectordidappear)\n\n #[serde(rename_all = \"camelCase\")]\n\n PropertyInspectorDidAppear {\n", "file_path": "src/lib.rs", "rank": 26, "score": 11.535641397085339 }, { "content": "///\n\n/// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-received/#devicedidconnect)\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct DeviceInfo {\n\n /// The user-provided name of the device.\n\n ///\n\n /// Added in Stream Deck software version 4.3.\n\n pub name: Option<String>,\n\n /// The size of the device.\n\n pub size: DeviceSize,\n\n /// The type of the device, or None if the Stream Deck software is running with no device attached.\n\n #[serde(rename = \"type\")]\n\n pub _type: Option<DeviceType>,\n\n}\n\n\n\n/// Information about a monitored application that has launched or terminated.\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct ApplicationPayload {\n", "file_path": "src/lib.rs", "rank": 27, "score": 11.483966956300979 }, { "content": " let port = port\n\n .ok_or(RegistrationParamsError::NoPort)?\n\n .map_err(RegistrationParamsError::BadPort)?;\n\n let uuid = uuid.ok_or(RegistrationParamsError::NoUuid)?;\n\n let event = event.ok_or(RegistrationParamsError::NoEvent)?;\n\n let info = info\n\n .ok_or(RegistrationParamsError::NoInfo)?\n\n .map_err(RegistrationParamsError::BadInfo)?;\n\n\n\n Ok(RegistrationParams {\n\n port,\n\n uuid,\n\n event,\n\n info,\n\n })\n\n }\n\n}\n", "file_path": "src/registration.rs", "rank": 28, "score": 11.296233022625552 }, { "content": " /// The uuid of the action.\n\n action: String,\n\n /// The instance of the action (key or part of a multiaction).\n\n context: String,\n\n /// The device where the action exists.\n\n device: String,\n\n },\n\n /// The property inspector for an action is no longer visible.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-received/#propertyinspectordiddisappear)\n\n #[serde(rename_all = \"camelCase\")]\n\n PropertyInspectorDidDisappear {\n\n /// The uuid of the action.\n\n action: String,\n\n /// The instance of the action (key or part of a multiaction).\n\n context: String,\n\n /// The device where the action exists.\n\n device: String,\n\n },\n\n /// The application has sent settings for an action.\n", "file_path": "src/lib.rs", "rank": 29, "score": 11.171111287403484 }, { "content": " Target::Software => 2,\n\n })\n\n }\n\n}\n\n\n\n/// The title to set as part of a [SetTitle](enum.MessageOut.html#variant.SetTitle) message.\n\n///\n\n/// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-sent/#settitle)\n\n#[derive(Debug, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct TitlePayload {\n\n /// The new title.\n\n pub title: Option<String>,\n\n /// The target displays.\n\n pub target: Target,\n\n}\n\n\n\n/// The image to set as part of a [SetImage](enum.MessageOut.html#variant.SetImage) message.\n\n///\n\n/// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-sent/#setimage)\n", "file_path": "src/lib.rs", "rank": 30, "score": 11.02838989421348 }, { "content": "#[derive(Debug, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct ImagePayload {\n\n /// An image in the form of a data URI.\n\n pub image: Option<String>,\n\n /// The target displays.\n\n pub target: Target,\n\n}\n\n\n\n/// The state to set as part of a [SetState](enum.MessageOut.html#variant.SetState) message.\n\n///\n\n/// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-sent/#setstate)\n\n#[derive(Debug, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct StatePayload {\n\n /// The new state.\n\n pub state: u8,\n\n}\n\n\n\n/// The profile to activate as part of a [SwitchToProfile](enum.MessageOut.html#variant.SwitchToProfile) message.\n", "file_path": "src/lib.rs", "rank": 31, "score": 10.806985759387924 }, { "content": " KeyUp {\n\n /// The uuid of the action.\n\n action: String,\n\n /// The instance of the action (key or part of a multiaction).\n\n context: String,\n\n /// The device where the key was pressed.\n\n device: String,\n\n /// Additional information about the key press.\n\n payload: KeyPayload<S>,\n\n },\n\n /// An instance of the action has been added to the display.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-received/#willappear)\n\n #[serde(rename_all = \"camelCase\")]\n\n WillAppear {\n\n /// The uuid of the action.\n\n action: String,\n\n /// The instance of the action (key or part of a multiaction).\n\n context: String,\n\n /// The device where the action will appear, or None if it does not appear on a device.\n", "file_path": "src/lib.rs", "rank": 32, "score": 10.720778381101951 }, { "content": " /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-received/#titleparametersdidchange)\n\n #[serde(rename_all = \"camelCase\")]\n\n TitleParametersDidChange {\n\n /// The uuid of the action.\n\n action: String,\n\n /// The instance of the action (key or part of a multiaction).\n\n context: String,\n\n /// The device where the action is visible, or None if it is not on a device.\n\n device: Option<String>,\n\n /// Additional information about the new title.\n\n payload: TitleParametersPayload<S>,\n\n },\n\n /// A device has connected.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-received/#devicedidconnect)\n\n #[serde(rename_all = \"camelCase\")]\n\n DeviceDidConnect {\n\n /// The ID of the device that has connected.\n\n device: String,\n\n /// Information about the device.\n", "file_path": "src/lib.rs", "rank": 33, "score": 10.420610681139813 }, { "content": " /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-received/#applicationdidterminate)\n\n #[serde(rename_all = \"camelCase\")]\n\n ApplicationDidTerminate {\n\n /// Information about the terminated application.\n\n payload: ApplicationPayload,\n\n },\n\n /// The property inspector has sent data.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-received/#sendtoplugin)\n\n #[serde(rename_all = \"camelCase\")]\n\n SendToPlugin {\n\n /// The uuid of the action.\n\n action: String,\n\n /// The instance of the action (key or part of a multiaction).\n\n context: String,\n\n /// Information sent from the property inspector.\n\n payload: M,\n\n },\n\n /// The application has sent settings for an action.\n\n ///\n", "file_path": "src/lib.rs", "rank": 34, "score": 10.28333285601018 }, { "content": "\n\n/// The target of a command.\n\n#[derive(Debug)]\n\npub enum Target {\n\n /// Both the device and a the display within the Stream Deck software.\n\n Both, // 0\n\n /// Only the device.\n\n Hardware, // 1\n\n /// Only the display within the Stream Deck software.\n\n Software, // 2\n\n}\n\n\n\nimpl ser::Serialize for Target {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: ser::Serializer,\n\n {\n\n serializer.serialize_u8(match self {\n\n Target::Both => 0,\n\n Target::Hardware => 1,\n", "file_path": "src/lib.rs", "rank": 35, "score": 9.930799741731354 }, { "content": " /// The instance of the action (key or part of a multiaction).\n\n context: String,\n\n },\n\n /// Temporarily overlay the key image with a checkmark.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-sent/#showok)\n\n #[serde(rename_all = \"camelCase\")]\n\n ShowOk {\n\n /// The instance of the action (key or part of a multiaction).\n\n context: String,\n\n },\n\n /// Retrieve settings for an instance of an action via DidReceiveSettings.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-sent/#getsettings)\n\n #[serde(rename_all = \"camelCase\")]\n\n GetSettings {\n\n /// The instance of the action (key or part of a multiaction).\n\n context: String,\n\n },\n\n /// Store settings for an instance of an action.\n", "file_path": "src/lib.rs", "rank": 36, "score": 9.824727615991536 }, { "content": " ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-sent/#setsettings)\n\n #[serde(rename_all = \"camelCase\")]\n\n SetSettings {\n\n /// The instance of the action (key or part of a multiaction).\n\n context: String,\n\n /// The settings to save.\n\n payload: S,\n\n },\n\n /// Set the state of an action.\n\n ///\n\n /// Normally, Stream Deck changes the state of an action automatically when the key is pressed.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-sent/#setstate)\n\n #[serde(rename_all = \"camelCase\")]\n\n SetState {\n\n /// The instance of the action (key or part of a multiaction).\n\n context: String,\n\n /// The desired state.\n\n payload: StatePayload,\n", "file_path": "src/lib.rs", "rank": 37, "score": 9.507169955969559 }, { "content": " /// The type of the device.\n\n #[serde(rename = \"type\")]\n\n pub _type: Option<DeviceType>,\n\n}\n\n\n\n/// The language the Stream Deck software is running in.\n\n///\n\n/// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/registration-procedure/#Info-parameter)\n\npub enum Language {\n\n English,\n\n French,\n\n German,\n\n Spanish,\n\n Japanese,\n\n /// Unlike the other lanuages which are not specifically localized to a country, Chinese is specifically zh-CN.\n\n ChineseChina,\n\n /// A language that was not documented in the 4.0.0 SDK.\n\n Unknown(String),\n\n}\n\n\n", "file_path": "src/registration.rs", "rank": 38, "score": 9.467803508515773 }, { "content": " device: Option<String>,\n\n /// Additional information about the action's appearance.\n\n payload: VisibilityPayload<S>,\n\n },\n\n /// An instance of the action has been removed from the display.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-received/#willdisappear)\n\n #[serde(rename_all = \"camelCase\")]\n\n WillDisappear {\n\n /// The uuid of the action.\n\n action: String,\n\n /// The instance of the action (key or part of a multiaction).\n\n context: String,\n\n /// The device where the action was visible, or None if it was not on a device.\n\n device: Option<String>,\n\n /// Additional information about the action's appearance.\n\n payload: VisibilityPayload<S>,\n\n },\n\n /// The title has changed for an instance of an action.\n\n ///\n", "file_path": "src/lib.rs", "rank": 39, "score": 9.389263580830399 }, { "content": " /// The instance of the action (key or part of a multiaction).\n\n context: String,\n\n /// The title to set.\n\n payload: TitlePayload,\n\n },\n\n /// Set the image of an action instance.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-sent/#setimage)\n\n #[serde(rename_all = \"camelCase\")]\n\n SetImage {\n\n /// The instance of the action (key or part of a multiaction).\n\n context: String,\n\n /// The image to set.\n\n payload: ImagePayload,\n\n },\n\n /// Temporarily overlay the key image with an alert icon.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-sent/#showalert)\n\n #[serde(rename_all = \"camelCase\")]\n\n ShowAlert {\n", "file_path": "src/lib.rs", "rank": 40, "score": 9.273578173453222 }, { "content": " E: de::Error,\n\n {\n\n Ok(match value {\n\n \"mac\" => Platform::Mac,\n\n \"windows\" => Platform::Windows,\n\n value => Platform::Unknown(value.to_string()),\n\n })\n\n }\n\n }\n\n\n\n deserializer.deserialize_str(Visitor)\n\n }\n\n}\n\n\n\n/// Information about the Stream Deck software.\n\n///\n\n/// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/registration-procedure/#info-parameter)\n\n#[derive(Deserialize)]\n\npub struct RegistrationInfoApplication {\n\n pub language: Language,\n", "file_path": "src/registration.rs", "rank": 41, "score": 9.149544635034626 }, { "content": "impl<'de> de::Deserialize<'de> for Language {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: de::Deserializer<'de>,\n\n {\n\n struct Visitor;\n\n\n\n impl<'de> de::Visitor<'de> for Visitor {\n\n type Value = Language;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a string\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Language, E>\n\n where\n\n E: de::Error,\n\n {\n\n Ok(match value {\n\n \"en\" => Language::English,\n", "file_path": "src/registration.rs", "rank": 42, "score": 8.862201141979213 }, { "content": " /// The device to change the profile of.\n\n device: String,\n\n /// The profile to activate.\n\n payload: ProfilePayload,\n\n },\n\n /// Open a URL in the default browser.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-sent/#openurl)\n\n #[serde(rename_all = \"camelCase\")]\n\n OpenUrl {\n\n /// The url to open.\n\n payload: UrlPayload,\n\n },\n\n /// Retrieve plugin settings for via DidReceiveGlobalSettings.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-sent/#getglobalsettings)\n\n #[serde(rename_all = \"camelCase\")]\n\n GetGlobalSettings {\n\n /// The instance of the action (key or part of a multiaction).\n\n context: String,\n", "file_path": "src/lib.rs", "rank": 43, "score": 8.626419988463729 }, { "content": " ///\n\n /// This message is sent in response to GetGlobalSettings, but also after\n\n /// the property inspector changes the settings.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-received/#didreceiveglobalsettings)\n\n #[serde(rename_all = \"camelCase\")]\n\n DidReceiveGlobalSettings {\n\n /// The current settings for the action.\n\n payload: GlobalSettingsPayload<G>,\n\n },\n\n /// The computer has resumed from sleep.\n\n ///\n\n /// Added in Stream Deck software version 4.3.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-received/#systemdidwakeup)\n\n SystemDidWakeUp,\n\n /// An event from an unsupported version of the Stream Deck software.\n\n ///\n\n /// This occurs when the Stream Deck software sends an event that is not\n\n /// understood. Usually this will be because the Stream Deck software is\n", "file_path": "src/lib.rs", "rank": 44, "score": 8.599457726044996 }, { "content": "#[serde(rename_all = \"camelCase\")]\n\npub enum Alignment {\n\n /// The title should appear at the top of the key.\n\n Top,\n\n /// The title should appear in the middle of the key.\n\n Middle,\n\n /// The title should appear at the bottom of the key.\n\n Bottom,\n\n}\n\n\n\n/// Style information for a title.\n\n///\n\n/// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-received/#titleparametersdidchange)\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct TitleParameters {\n\n /// The name of the font family.\n\n pub font_family: String,\n\n /// The font size.\n\n pub font_size: u8,\n", "file_path": "src/lib.rs", "rank": 45, "score": 8.56907875201876 }, { "content": " Unknown(u64),\n\n}\n\n\n\nimpl<'de> de::Deserialize<'de> for DeviceType {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: de::Deserializer<'de>,\n\n {\n\n struct Visitor;\n\n\n\n impl<'de> de::Visitor<'de> for Visitor {\n\n type Value = DeviceType;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"an integer\")\n\n }\n\n\n\n fn visit_u64<E>(self, value: u64) -> Result<DeviceType, E>\n\n where\n\n E: de::Error,\n", "file_path": "src/lib.rs", "rank": 46, "score": 8.524689619899075 }, { "content": " {\n\n Ok(match value {\n\n 0 => DeviceType::StreamDeck,\n\n 1 => DeviceType::StreamDeckMini,\n\n value => DeviceType::Unknown(value),\n\n })\n\n }\n\n }\n\n\n\n deserializer.deserialize_u64(Visitor)\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 47, "score": 7.820975892900551 }, { "content": " /// The name of the application.\n\n pub application: String,\n\n}\n\n\n\n/// The location of a key on a device.\n\n///\n\n/// Locations are specified using zero-indexed values starting from the top left corner of the device.\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct Coordinates {\n\n /// The x coordinate of the key.\n\n pub column: u8,\n\n /// The y-coordinate of the key.\n\n pub row: u8,\n\n}\n\n\n\n/// The vertical alignment of a title.\n\n///\n\n/// Titles are always centered horizontally.\n\n#[derive(Debug, Deserialize)]\n", "file_path": "src/lib.rs", "rank": 48, "score": 7.578619084814305 }, { "content": " /// A platform not documented in the 4.0.0 SDK.\n\n Unknown(String),\n\n}\n\n\n\nimpl<'de: 'a, 'a> de::Deserialize<'de> for Platform {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: de::Deserializer<'de>,\n\n {\n\n struct Visitor;\n\n\n\n impl<'de> de::Visitor<'de> for Visitor {\n\n type Value = Platform;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a string\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Platform, E>\n\n where\n", "file_path": "src/registration.rs", "rank": 49, "score": 7.241756453557083 }, { "content": " pub platform: Platform,\n\n pub version: String,\n\n}\n\n\n\n/// Information about the environment the plugin is being loaded into.\n\n///\n\n/// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/registration-procedure/#info-parameter)\n\n#[derive(Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct RegistrationInfo {\n\n pub application: RegistrationInfoApplication,\n\n pub device_pixel_ratio: u8,\n\n pub devices: Vec<RegistrationInfoDevice>,\n\n}\n\n\n\n/// Registration parameters provided to the plugin on startup.\n\n///\n\n/// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/registration-procedure/#compiled-plugin-registration)\n\n#[derive(Deserialize)]\n\npub struct RegistrationParams {\n", "file_path": "src/registration.rs", "rank": 50, "score": 6.909502910790562 }, { "content": " device_info: DeviceInfo,\n\n },\n\n /// A device has disconnected.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-received/#devicediddisconnect)\n\n #[serde(rename_all = \"camelCase\")]\n\n DeviceDidDisconnect {\n\n /// The ID of the device that has disconnected.\n\n device: String,\n\n },\n\n /// An application monitored by the manifest file has launched.\n\n ///\n\n /// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/events-received/#applicationdidlaunch)\n\n #[serde(rename_all = \"camelCase\")]\n\n ApplicationDidLaunch {\n\n /// Information about the launched application.\n\n payload: ApplicationPayload,\n\n },\n\n /// An application monitored by the manifest file has terminated.\n\n ///\n", "file_path": "src/lib.rs", "rank": 51, "score": 6.860913115862475 }, { "content": " /// Whether the font is bold and/or italic.\n\n pub font_style: String,\n\n /// Whether the font is underlined.\n\n pub font_underline: bool,\n\n /// Whether the title is displayed.\n\n pub show_title: bool,\n\n /// The vertical alignment of the title.\n\n pub title_alignment: Alignment,\n\n /// The color of the title.\n\n pub title_color: String,\n\n}\n\n\n\n/// The size of a device in keys.\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct DeviceSize {\n\n /// The number of key columns on the device.\n\n pub columns: u8,\n\n /// The number of key rows on the device.\n\n pub rows: u8,\n", "file_path": "src/lib.rs", "rank": 52, "score": 6.237904395912587 }, { "content": " /// The location of the key, or None if this action instance is part of a multi action.\n\n pub coordinates: Option<Coordinates>,\n\n /// The state of the action instance.\n\n pub state: Option<u8>,\n\n //TODO: is_in_multi_action ignored. replace coordinates with enum Location { Coordinates, MultiAction }.\n\n}\n\n\n\n/// The new title of a key.\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct TitleParametersPayload<S> {\n\n /// The stored settings for the action instance.\n\n pub settings: S,\n\n /// The location of the key, or None if this action instance is part of a multi action.\n\n pub coordinates: Coordinates,\n\n /// The state of the action instance.\n\n pub state: Option<u8>,\n\n /// The new title.\n\n pub title: String,\n\n /// Additional parameters for the display of the title.\n", "file_path": "src/lib.rs", "rank": 53, "score": 5.2266440703056425 }, { "content": "}\n\n\n\n/// The type of connected hardware device.\n\n///\n\n/// [Official Documentation](https://developer.elgato.com/documentation/stream-deck/sdk/overview/#stream-deck-hardware)\n\n#[derive(Debug)]\n\npub enum DeviceType {\n\n /// The [Stream Deck](https://www.elgato.com/en/gaming/stream-deck).\n\n StreamDeck,\n\n /// The [Stream Deck Mini](https://www.elgato.com/en/gaming/stream-deck-mini).\n\n StreamDeckMini,\n\n /// The [Stream Deck XL](https://www.elgato.com/en/gaming/stream-deck-xl).\n\n ///\n\n /// Added in Stream Deck software version 4.3.\n\n StreamDeckXl,\n\n /// The [Stream Deck Mobile](https://www.elgato.com/en/gaming/stream-deck-mobile) app.\n\n ///\n\n /// Added in Stream Deck software version 4.3.\n\n StreamDeckMobile,\n\n /// A device not documented in the 4.3.0 SDK.\n", "file_path": "src/lib.rs", "rank": 54, "score": 5.10172168760791 }, { "content": " \"fr\" => Language::French,\n\n \"de\" => Language::German,\n\n \"es\" => Language::Spanish,\n\n \"ja\" => Language::Japanese,\n\n \"zh_cn\" => Language::ChineseChina,\n\n value => Language::Unknown(value.to_string()),\n\n })\n\n }\n\n }\n\n\n\n deserializer.deserialize_str(Visitor)\n\n }\n\n}\n\n\n\n/// The platform on which the Stream Deck software is running.\n\npub enum Platform {\n\n /// Mac OS X\n\n Mac,\n\n /// Windows\n\n Windows,\n", "file_path": "src/registration.rs", "rank": 55, "score": 4.84188939919466 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct KeyPayload<S> {\n\n /// The stored settings for the action instance.\n\n pub settings: S,\n\n /// The location of the key that was pressed, or None if this action instance is part of a multi action.\n\n pub coordinates: Option<Coordinates>,\n\n /// The current state of the action instance.\n\n pub state: Option<u8>,\n\n /// The desired state of the action instance (if this instance is part of a multi action).\n\n pub user_desired_state: Option<u8>,\n\n //TODO: is_in_multi_action ignored. replace coordinates with enum Location { Coordinates, MultiAction }.\n\n}\n\n\n\n/// Additional information about a key's appearance.\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct VisibilityPayload<S> {\n\n /// The stored settings for the action instance.\n\n pub settings: S,\n", "file_path": "src/lib.rs", "rank": 56, "score": 4.45651198011859 }, { "content": " }\n\n fn emit_i16(&mut self, key: Key, val: i16) -> slog::Result {\n\n self.stack.push(format!(\"{}: {}\", key, val));\n\n Ok(())\n\n }\n\n fn emit_u32(&mut self, key: Key, val: u32) -> slog::Result {\n\n self.stack.push(format!(\"{}: {}\", key, val));\n\n Ok(())\n\n }\n\n fn emit_i32(&mut self, key: Key, val: i32) -> slog::Result {\n\n self.stack.push(format!(\"{}: {}\", key, val));\n\n Ok(())\n\n }\n\n fn emit_f32(&mut self, key: Key, val: f32) -> slog::Result {\n\n self.stack.push(format!(\"{}: {}\", key, val));\n\n Ok(())\n\n }\n\n fn emit_u64(&mut self, key: Key, val: u64) -> slog::Result {\n\n self.stack.push(format!(\"{}: {}\", key, val));\n\n Ok(())\n", "file_path": "src/logging.rs", "rank": 57, "score": 2.896963364408009 }, { "content": " }\n\n fn emit_usize(&mut self, key: Key, val: usize) -> slog::Result {\n\n self.stack.push(format!(\"{}: {}\", key, val));\n\n Ok(())\n\n }\n\n fn emit_isize(&mut self, key: Key, val: isize) -> slog::Result {\n\n self.stack.push(format!(\"{}: {}\", key, val));\n\n Ok(())\n\n }\n\n fn emit_u8(&mut self, key: Key, val: u8) -> slog::Result {\n\n self.stack.push(format!(\"{}: {}\", key, val));\n\n Ok(())\n\n }\n\n fn emit_i8(&mut self, key: Key, val: i8) -> slog::Result {\n\n self.stack.push(format!(\"{}: {}\", key, val));\n\n Ok(())\n\n }\n\n fn emit_u16(&mut self, key: Key, val: u16) -> slog::Result {\n\n self.stack.push(format!(\"{}: {}\", key, val));\n\n Ok(())\n", "file_path": "src/logging.rs", "rank": 58, "score": 2.896963364408009 }, { "content": " }\n\n fn emit_i64(&mut self, key: Key, val: i64) -> slog::Result {\n\n self.stack.push(format!(\"{}: {}\", key, val));\n\n Ok(())\n\n }\n\n fn emit_f64(&mut self, key: Key, val: f64) -> slog::Result {\n\n self.stack.push(format!(\"{}: {}\", key, val));\n\n Ok(())\n\n }\n\n fn emit_str(&mut self, key: Key, val: &str) -> slog::Result {\n\n self.stack.push(format!(\"{}: {}\", key, val));\n\n Ok(())\n\n }\n\n fn emit_arguments(&mut self, key: Key, val: &fmt::Arguments) -> slog::Result {\n\n self.stack.push(format!(\"{}: {}\", key, val));\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/logging.rs", "rank": 59, "score": 2.855105881865898 } ]
Rust
src/commands/install.rs
RyosukeNAKATA/mamimi
347583f657f0f8a4df2d91df86e05d6113251f44
use crate::alias::create_alias; use crate::archive::{self, extract::Error as ExtractError, extract::Extract}; use crate::config::MamimiConfig; use crate::current_python_version::current_python_version; use crate::input_version::InputVersion; use crate::outln; use crate::python_version::PythonVersion; use crate::version_files::get_user_version_for_directory; use anyhow::Result; use colored::Colorize; use dirs::config_dir; use log::debug; use num_cpus; use reqwest::Url; use std::env::current_dir; use std::error; use std::io::prelude::*; use std::path::{Path, PathBuf}; use tempfile; use thiserror::Error; #[derive(Error, Debug)] pub enum MamimiError { #[error(transparent)] HttpError(#[from] reqwest::Error), #[error(transparent)] IoError(#[from] std::io::Error), #[error("Can't find the number of cores")] FromUtf8Error(#[from] std::string::FromUtf8Error), #[error("Can't extract the file: {source:?}")] ExtractError { source: ExtractError }, #[error("The downloaded archive is empty")] TarIsEmpty, #[error("Can't find version")] VersionNotFound { version: InputVersion }, #[error("Can't list the remote versions: {source:?}")] CannotListRemoteVersions { source: reqwest::Error }, #[error("Version already installed at {path:?}")] VersionAlreadyInstalled { path: PathBuf }, #[error( "Cannnot find the version in dotfiles. Please provide a version manually to the command." )] CannotInferVersion, #[error("The requested version is not installable: {version}")] NotInstallableVerison { version: PythonVersion }, #[error("Cannot build Python: {stderr}")] CannotBuildPython { stderr: String }, } #[derive(clap::Parser, Debug, Default)] pub struct Install { pub version: Option<InputVersion>, pub configure_opts: Vec<String>, } impl super::command::Command for Install { type Error = MamimiError; fn apply(&self, config: &MamimiConfig) -> Result<(), Self::Error> { let current_version = self .version .clone() .or_else(|| get_user_version_for_directory(std::env::current_dir().unwrap())) .ok_or(MamimiError::CannotInferVersion)?; let version = match current_version.clone() { InputVersion::Full(PythonVersion::Semver(v)) => PythonVersion::Semver(v), InputVersion::Full(PythonVersion::System) => { return Err(MamimiError::NotInstallableVerison { version: PythonVersion::System, }) } current_version => { let avalable_versions = crate::remote_python_index::list() .map_err(|source| MamimiError::CannotListRemoteVersions { source })? .drain(..) .map(|x| x.python_version) .collect::<Vec<_>>(); current_version .to_version(&avalable_versions) .ok_or(MamimiError::VersionNotFound { version: current_version, })? .clone() } }; let installations_dir = config.python_versions_dir(); let installation_dir = PathBuf::from(&installations_dir).join(version.to_string()); if installation_dir.exists() { return Err(MamimiError::VersionAlreadyInstalled { path: installation_dir, }); } let url = package_url(&version); outln!( config, Error, "{} Downloading {}", "==>".green(), format!("{}", url).green() ); let response = reqwest::blocking::get(url)?; if response.status() == 404 { return Err(MamimiError::VersionNotFound { version: current_version, }); } outln!( config, Error, "{} Extracting {}", "==>".green(), format!("{}", url).green() ); let tmp_installations_dir = installations_dir.join(".downloads"); std::fs::create_dir_all(&tmp_installations_dir).map_err(MamimiError::IoError)?; let tmp_dir = tempfile::TempDir::new_in(&tmp_installations_dir) .expect("Cannot generate a temp directory"); extract_archive_into(&tmp_dir, response)?; outln!( config, Error, "{} Building {}", "==>".green(), format!("Python {}", current_version).green() ); let installed_directory = std::fs::read_dir(&tmp_dir) .map_err(MamimiError::IoError)? .next() .ok_or(MamimiError::TarIsEmpty)? .map_err(MamimiError::IoError)?; let installed_directory = installed_directory.path(); build_package( &installed_directory, &installation_dir, &self.configure_opts, )?; if !config.default_python_version_dir().exists() { debug!("Use {} as the default Python version", current_version); create_alias(&config, "default", &version).map_err(MamimiError::IoError)?; } Ok(()) } } fn extract_archive_into<P: AsRef<Path>>( path: P, response: reqwest::blocking::Response, ) -> Result<(), MamimiError> { #[cfg(unix)] let extractor = archive::tar_xz::TarXz::new(response); #[cfg(windows)] let extractor = archive::tar_xz::TarXz::new(response); extractor .extract_into(path) .map_err(|source| MamimiError::ExtractError { source })?; Ok(()) } fn package_url(version: &PythonVersion) -> Url { debug!("package url"); Url::parse(&format!( "https://www.python.org/ftp/python/{}/Python-{}.tar.xz", version, version )) .unwrap() } #[cfg(unix)] fn archive(version: &PythonVersion) -> String { format!("python-{}.tar.xz", version) } #[cfg(windows)] fn archive(version: &PythonVersion) -> String { format!("python-{}.zip", version) } #[allow(clippy::unnecessary_wraps)] fn openssl_dir() -> Result<String, MamimiError> { #[cfg(target_os = "macos")] return Ok(String::from_utf8_lossy( &super::command::Command::new("brew") .arg("--prefix") .arg("[email protected]") .output() .map_err(MamimiError::IoError)? .stdout, ) .trim() .to_string()); #[cfg(not(target_os = "macos"))] return Ok("/url/local".to_string()); } fn build_package( current_dir: &Path, installed_dir: &Path, configure_opts: &[String], ) -> Result<(), MamimiError> { debug!("./configure {}", configure_opts.join(" ")); let mut command = super::command::Command::new("sh"); command .arg("configure") .arg(format!("--prefix={}", installed_dir.to_str().unwrap())) .args(configure_opts); if !configure_opts .iter() .any(|opt| opt.starts_with("--with-openssl-dir")) { command.arg(format!("--with-openssl-dir={}", openssl_dir()?)); } let configure = command .current_dir(&current_dir) .output() .map_err(MamimiError::IoError)?; if !configure.status.success() { return Err(MamimiError::CannotBuildPython { stderr: format!( "configure failed: {}", String::from_utf8_lossy(&configure.stderr).to_string() ), }); }; debug!("make -j {}", num_cpus::get().to_string()); let make = super::command::Command::new("make") .arg("-j") .arg(num_cpus::get().to_string()) .current_dir(&current_dir) .output() .map_err(MamimiError::IoError)?; if !make.status.success() { return Err(MamimiError::CannotBuildPython { stderr: format!( "make failed: {}", String::from_utf8_lossy(&make.stderr).to_string() ), }); }; Ok(()) } #[cfg(test)] mod tests { use super::*; use crate::commands::command::Command; use crate::config::MamimiConfig; use crate::python_version::PythonVersion; use itertools::Itertools; use tempfile::tempdir; #[test] fn test_install_second_version() { let config = MamimiConfig { base_dir: Some(tempdir().unwrap().path().to_path_buf()), ..Default::default() }; Install { version: Some(InputVersion::Full(PythonVersion::Semver("3.8.7").unwrap())), configure_opts: vec![], } .apply(&config) .expect("Can't install Python3.8.7"); assert_eq!( std::fs::read_link(&config.default_python_version_dir()) .unwrap() .components() .last(), Some(std::path::Component::Normal(std::ffi::OsStr::new("3.9.6"))) ); } #[test] fn test_install_default_python_version() { let config = MamimiConfig { base_dir: Some(tempdir().unwrap().path().to_path_buf()), ..Default::default() }; Install { version: Some(InputVersion::Full(PythonVersion::Semver( semver::Version::parse("3.8.7").unwrap(), ))), configure_opts: vec![], } .apply(&config) .expect("Can't insatll"); assert!(config.installations_dir().join("3.8.7").exists()); assert!(config .installations_dir() .join("3.8.7") .join("bin") .join("python3") .exists()); assert!(config.default_python_version_dir().exists()); } }
use crate::alias::create_alias; use crate::archive::{self, extract::Error as ExtractError, extract::Extract}; use crate::config::MamimiConfig; use crate::current_python_version::current_python_version; use crate::input_version::InputVersion; use crate::outln; use crate::python_version::PythonVersion; use crate::version_files::get_user_version_for_directory; use anyhow::Result; use colored::Colorize; use dirs::config_dir; use log::debug; use num_cpus; use reqwest::Url; use std::env::current_dir; use std::error; use std::io::prelude::*; use std::path::{Path, PathBuf}; use tempfile; use thiserror::Error; #[derive(Error, Debug)] pub enum MamimiError { #[error(transparent)] HttpError(#[from] reqwest::Error), #[error(transparent)] IoError(#[from] std::io::Error), #[error("Can't find the number of cores")] FromUtf8Error(#[from] std::string::FromUtf8Error), #[error("Can't extract the file: {source:?}")] ExtractError { source: ExtractError }, #[error("The downloaded archive is empty")] TarIsEmpty, #[error("Can't find version")] VersionNotFound { version: InputVersion }, #[error("Can't list the remote versions: {source:?}")] CannotListRemoteVersions { source: reqwest::Error }, #[error("Version already installed at {path:?}")] VersionAlreadyInstalled { path: PathBuf }, #[error( "Cannnot find the version in dotfiles. Please provide a version manually to the command." )] CannotInferVersion, #[error("The requested version is not installable: {version}")] NotInstallableVerison { version: PythonVersion }, #[error("Cannot build Python: {stderr}")] CannotBuildPython { stderr: String }, } #[derive(clap::Parser, Debug, Default)] pub struct Install { pub version: Option<InputVersion>, pub configure_opts: Vec<String>, } impl super::command::Command for Install { type Error = MamimiError; fn apply(&self, config: &MamimiConfig) -> Result<(), Self::Error> { let current_version = self .version .clone() .or_else(|| get_user_version_for_directory(std::env::current_dir().unwrap())) .ok_or(MamimiError::CannotInferVersion)?; let version = match current_version.clone() { InputVersion::Full(PythonVersion::Semver(v)) => PythonVersion::Semver(v), InputVersion::Full(PythonVersion::System) => { return Err(MamimiError::NotInstallableVerison { version: PythonVersion::System, }) } current_version => { let avalable_versions = crate::remote_python_index::list() .map_err(|source| MamimiError::CannotListRemoteVersions { source })? .drain(..) .map(|x| x.python_version) .collect::<Vec<_>>(); current_version .to_version(&avalable_versions)
.expect("Cannot generate a temp directory"); extract_archive_into(&tmp_dir, response)?; outln!( config, Error, "{} Building {}", "==>".green(), format!("Python {}", current_version).green() ); let installed_directory = std::fs::read_dir(&tmp_dir) .map_err(MamimiError::IoError)? .next() .ok_or(MamimiError::TarIsEmpty)? .map_err(MamimiError::IoError)?; let installed_directory = installed_directory.path(); build_package( &installed_directory, &installation_dir, &self.configure_opts, )?; if !config.default_python_version_dir().exists() { debug!("Use {} as the default Python version", current_version); create_alias(&config, "default", &version).map_err(MamimiError::IoError)?; } Ok(()) } } fn extract_archive_into<P: AsRef<Path>>( path: P, response: reqwest::blocking::Response, ) -> Result<(), MamimiError> { #[cfg(unix)] let extractor = archive::tar_xz::TarXz::new(response); #[cfg(windows)] let extractor = archive::tar_xz::TarXz::new(response); extractor .extract_into(path) .map_err(|source| MamimiError::ExtractError { source })?; Ok(()) } fn package_url(version: &PythonVersion) -> Url { debug!("package url"); Url::parse(&format!( "https://www.python.org/ftp/python/{}/Python-{}.tar.xz", version, version )) .unwrap() } #[cfg(unix)] fn archive(version: &PythonVersion) -> String { format!("python-{}.tar.xz", version) } #[cfg(windows)] fn archive(version: &PythonVersion) -> String { format!("python-{}.zip", version) } #[allow(clippy::unnecessary_wraps)] fn openssl_dir() -> Result<String, MamimiError> { #[cfg(target_os = "macos")] return Ok(String::from_utf8_lossy( &super::command::Command::new("brew") .arg("--prefix") .arg("[email protected]") .output() .map_err(MamimiError::IoError)? .stdout, ) .trim() .to_string()); #[cfg(not(target_os = "macos"))] return Ok("/url/local".to_string()); } fn build_package( current_dir: &Path, installed_dir: &Path, configure_opts: &[String], ) -> Result<(), MamimiError> { debug!("./configure {}", configure_opts.join(" ")); let mut command = super::command::Command::new("sh"); command .arg("configure") .arg(format!("--prefix={}", installed_dir.to_str().unwrap())) .args(configure_opts); if !configure_opts .iter() .any(|opt| opt.starts_with("--with-openssl-dir")) { command.arg(format!("--with-openssl-dir={}", openssl_dir()?)); } let configure = command .current_dir(&current_dir) .output() .map_err(MamimiError::IoError)?; if !configure.status.success() { return Err(MamimiError::CannotBuildPython { stderr: format!( "configure failed: {}", String::from_utf8_lossy(&configure.stderr).to_string() ), }); }; debug!("make -j {}", num_cpus::get().to_string()); let make = super::command::Command::new("make") .arg("-j") .arg(num_cpus::get().to_string()) .current_dir(&current_dir) .output() .map_err(MamimiError::IoError)?; if !make.status.success() { return Err(MamimiError::CannotBuildPython { stderr: format!( "make failed: {}", String::from_utf8_lossy(&make.stderr).to_string() ), }); }; Ok(()) } #[cfg(test)] mod tests { use super::*; use crate::commands::command::Command; use crate::config::MamimiConfig; use crate::python_version::PythonVersion; use itertools::Itertools; use tempfile::tempdir; #[test] fn test_install_second_version() { let config = MamimiConfig { base_dir: Some(tempdir().unwrap().path().to_path_buf()), ..Default::default() }; Install { version: Some(InputVersion::Full(PythonVersion::Semver("3.8.7").unwrap())), configure_opts: vec![], } .apply(&config) .expect("Can't install Python3.8.7"); assert_eq!( std::fs::read_link(&config.default_python_version_dir()) .unwrap() .components() .last(), Some(std::path::Component::Normal(std::ffi::OsStr::new("3.9.6"))) ); } #[test] fn test_install_default_python_version() { let config = MamimiConfig { base_dir: Some(tempdir().unwrap().path().to_path_buf()), ..Default::default() }; Install { version: Some(InputVersion::Full(PythonVersion::Semver( semver::Version::parse("3.8.7").unwrap(), ))), configure_opts: vec![], } .apply(&config) .expect("Can't insatll"); assert!(config.installations_dir().join("3.8.7").exists()); assert!(config .installations_dir() .join("3.8.7") .join("bin") .join("python3") .exists()); assert!(config.default_python_version_dir().exists()); } }
.ok_or(MamimiError::VersionNotFound { version: current_version, })? .clone() } }; let installations_dir = config.python_versions_dir(); let installation_dir = PathBuf::from(&installations_dir).join(version.to_string()); if installation_dir.exists() { return Err(MamimiError::VersionAlreadyInstalled { path: installation_dir, }); } let url = package_url(&version); outln!( config, Error, "{} Downloading {}", "==>".green(), format!("{}", url).green() ); let response = reqwest::blocking::get(url)?; if response.status() == 404 { return Err(MamimiError::VersionNotFound { version: current_version, }); } outln!( config, Error, "{} Extracting {}", "==>".green(), format!("{}", url).green() ); let tmp_installations_dir = installations_dir.join(".downloads"); std::fs::create_dir_all(&tmp_installations_dir).map_err(MamimiError::IoError)?; let tmp_dir = tempfile::TempDir::new_in(&tmp_installations_dir)
random
[ { "content": "pub fn current_python_version(config: &MamimiConfig) -> Result<Option<PythonVersion>, Error> {\n\n let multishell_path = config.multishell_path().ok_or(Error::EnvNotApplied)?;\n\n\n\n if multishell_path.read_link().ok() == Some(system_version::path()) {\n\n return Ok(Some(PythonVersion::Bypassed));\n\n }\n\n if let Ok(resolved_path) = std::fs::canonicalize(multishell_path) {\n\n let installation_path = resolved_path\n\n .parent()\n\n .expect(\"multishell path can't be in the root\");\n\n let file_name = installation_path\n\n .file_name\n\n .expect(\"Can't get file name\")\n\n .to_str()\n\n .expect(\"Invalid OS string\");\n\n let version = PythonVersion::parse(file_name).map_err(|source| Error::VersionError {\n\n source,\n\n version: file_name.to_string(),\n\n })?;\n\n Ok(Some(version))\n", "file_path": "src/current_python_version.rs", "rank": 0, "score": 275621.2609281681 }, { "content": "pub fn list() -> Result<Vec<IndexedPythonVersion>, reqwest::Error> {\n\n let value = reqwest::blocking::get(format!(\"https://www.python.org/ftp/python/\").as_str())\n\n .unwrap()\n\n .text()\n\n .unwrap();\n\n let doc = scraper::Html::parse_document(&value);\n\n let sel = scraper::Selector::parse(\"a\").unwrap();\n\n\n\n let mut versions = vec![];\n\n for (index, node) in doc.select(&sel).enumerate() {\n\n if node.inner_html().is_empty() || index == 0 {\n\n continue;\n\n }\n\n let mut version = node.inner_html();\n\n version.retain(|c| c != '/');\n\n versions.push(IndexedPythonVersion {\n\n python_version: match PythonVersion::parse(&version.to_string()) {\n\n Ok(v) => v,\n\n Err(_) => continue,\n\n },\n\n })\n\n }\n\n versions.sort_by(|a, b| a.python_version.cmp(&b.python_version));\n\n Ok(versions)\n\n}\n", "file_path": "src/remote_python_index.rs", "rank": 1, "score": 271214.8236752645 }, { "content": "pub fn get_user_version_for_file(path: PathBuf) -> Option<InputVersion> {\n\n let file = std::fs::File::open(path).ok()?;\n\n let version = {\n\n let mut reader = DecodeReaderBytes::new(file);\n\n let mut version = String::new();\n\n reader.read_to_string(&mut version).map(|_| version)\n\n };\n\n\n\n match version {\n\n Err(err) => {\n\n info!(\"Cant't read file: {}\", err);\n\n None\n\n }\n\n Ok(version) => {\n\n info!(\"Found string {:?} in version file\", version);\n\n InputVersion::from_str(version.trim()).ok()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/version_files.rs", "rank": 4, "score": 201800.6427410756 }, { "content": "fn create_symlink(config: &crate::config::MamimiConfig) -> std::path::PathBuf {\n\n let system_temp_dir = std::env::temp_dir();\n\n let mut temp_dir = generate_symlink_path(&system_temp_dir);\n\n\n\n while temp_dir.exists() {\n\n temp_dir = generate_symlink_path(&system_temp_dir);\n\n }\n\n\n\n create_symlink_dir(config.default_python_version_dir(), &temp_dir)\n\n .expect(\"Can't create symlink\");\n\n temp_dir\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n #[test]\n\n fn test_smoke() {\n\n use crate::shell;\n\n let config: MamimiConfig = MamimiConfig::default();\n", "file_path": "src/commands/init.rs", "rank": 5, "score": 193549.01642459416 }, { "content": "pub fn get_user_version_for_directory(path: PathBuf) -> Option<InputVersion> {\n\n let version_file_path = find_up(path, VERSION_FILE)?;\n\n info!(\n\n \"Looking for version file in {}. exists? {}\",\n\n version_file_path.display(),\n\n version_file_path.exists()\n\n );\n\n if let Some(version) = get_user_version_for_file(version_file_path) {\n\n return Some(version);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/version_files.rs", "rank": 6, "score": 193423.19232784084 }, { "content": "pub fn find_up(search_dir: PathBuf, file_name: &str) -> Option<PathBuf> {\n\n for dir in each_dir(search_dir) {\n\n let path = dir.join(&file_name);\n\n if path.exists() {\n\n return Some(path);\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/version_files.rs", "rank": 7, "score": 192921.82357832868 }, { "content": "pub fn list_aliases(config: &MamimiConfig) -> std::io::Result<Vec<StroredAlias>> {\n\n let vec: Vec<_> = std::fs::read_dir(&config.aliases_dir())?\n\n .filter_map(Result::ok)\n\n .filter_map(|x| TryInto::<StroredAlias>::try_into(x.path().as_path()).ok())\n\n .collect();\n\n Ok(vec)\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct StroredAlias {\n\n alias_path: PathBuf,\n\n destination_path: PathBuf,\n\n}\n\n\n\nimpl std::convert::TryInto<StroredAlias> for &std::path::Path {\n\n type Error = std::io::Error;\n\n\n\n fn try_into(self) -> Result<StroredAlias, Self::Error> {\n\n let shallow_self = shallow_read_symlink(self)?;\n\n let destination_path = if shallow_self == system_version::path() {\n", "file_path": "src/alias.rs", "rank": 8, "score": 192636.69549006116 }, { "content": "pub fn path() -> PathBuf {\n\n let path_as_string = if cfg!(windows) {\n\n \"Z:/_mamimi/Nothing/Should/Be/Here/installation\"\n\n } else {\n\n \"/dev/null/installation\"\n\n };\n\n\n\n PathBuf::from(path_as_string)\n\n}\n\n\n", "file_path": "src/system_version.rs", "rank": 9, "score": 187897.60226163908 }, { "content": "fn create_cd_file_at(path: &std::path::Path) -> std::io::Result<()> {\n\n use std::io::Write;\n\n let cmd_contents = include_bytes!(\"./cd.cmd\");\n\n let mut file = std::fs::File::create(path)?;\n\n file.write_all(cmd_contents)?;\n\n Ok(())\n\n}\n", "file_path": "src/shell/windows_command/mod.rs", "rank": 11, "score": 172218.70933517753 }, { "content": "pub fn shallow_read_symlink<P: AsRef<Path>>(path: P) -> std::io::Result<std::path::PathBuf> {\n\n std::fs::read_link(path)\n\n}\n", "file_path": "src/symlink.rs", "rank": 12, "score": 165922.238240868 }, { "content": "fn each_dir(path: PathBuf) -> Vec<PathBuf> {\n\n let mut path = std::fs::canonicalize(path).unwrap();\n\n let mut paths = vec![path.clone()];\n\n\n\n while let Some(parent) = path.clone().parent() {\n\n path = parent.to_path_buf();\n\n debug!(\"get parent of {:?}...\", parent);\n\n paths.push(parent.to_path_buf())\n\n }\n\n paths.push(PathBuf::from(\"/\"));\n\n\n\n paths\n\n}\n", "file_path": "src/version_files.rs", "rank": 13, "score": 165538.59666130313 }, { "content": "#[cfg(windows)]\n\npub fn remove_symlink_dir<P: AsRef<Path>>(path: P) -> std::io::Result<()> {\n\n std::fs::remove_file(path)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/symlink.rs", "rank": 15, "score": 155183.93839737374 }, { "content": "fn replace_symlink(from: &std::path::Path, to: &std::path::Path) -> std::io::Result<()> {\n\n let symlink_deletion_result = remove_symlink_dir(&to);\n\n match create_symlink_dir(&from, &to) {\n\n ok @ Ok(_) => ok,\n\n err @ Err(_) => symlink_deletion_result.and(err),\n\n }\n\n}\n", "file_path": "src/commands/local.rs", "rank": 16, "score": 150781.29962570535 }, { "content": "pub fn is_dotfile(dir: &std::fs::DirEntry) -> bool {\n\n dir.file_name()\n\n .to_str()\n\n .map(|s| s.starts_with('.'))\n\n .unwrap_or(false)\n\n}\n\n\n", "file_path": "src/python_version.rs", "rank": 17, "score": 144234.97789096698 }, { "content": "#[cfg(windows)]\n\npub fn create_symlink_dir<P: AsRef<Path>, U: AsRef<Path>>(from: P, to: U) -> std::io::Result<()> {\n\n junction::create(from, to)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/symlink.rs", "rank": 18, "score": 139794.17850648903 }, { "content": "pub trait Extract {\n\n fn extract_into<P: AsRef<Path>>(self, path: P) -> Result<(), Error>;\n\n}\n", "file_path": "src/archive/extract.rs", "rank": 19, "score": 138881.54652083124 }, { "content": "fn generate_symlink_path(root: &std::path::Path) -> std::path::PathBuf {\n\n let temp_dir_name = format!(\n\n \"mamimi_{}_{}\",\n\n std::process::id(),\n\n chrono::Utc::now().timestamp_millis()\n\n );\n\n root.join(temp_dir_name)\n\n}\n\n\n", "file_path": "src/commands/init.rs", "rank": 20, "score": 134175.00844454928 }, { "content": "pub fn multishell_storage() -> PathBuf {\n\n runtime_dir()\n\n .or_else(state_dir)\n\n .or_else(cache_dir)\n\n .unwrap_or_else(std::env::temp_dir)\n\n .join(\"mamimi_multishells\")\n\n}\n", "file_path": "src/directories.rs", "rank": 21, "score": 132359.89164463003 }, { "content": "fn first_letter_is_number(s: &str) -> bool {\n\n s.chars().next().map_or(false, |x| x.is_digit(10))\n\n}\n\n\n\nimpl PythonVersion {\n\n pub fn parse<S: AsRef<str>>(version_str: S) -> Result<Self, semver::Error> {\n\n let lowercased = version_str.as_ref().to_lowercase();\n\n if lowercased == system_version::display_name() {\n\n Ok(Self::Bypassed)\n\n } else {\n\n Ok(Self::Alias(lowercased))\n\n }\n\n }\n\n\n\n pub fn alias_name(&self) -> Option<String> {\n\n match self {\n\n l @ (&Self::Lts(_) | &Self::Alias(_)) => Some(l.v_str()),\n\n _ => None,\n\n }\n\n }\n", "file_path": "src/python_version.rs", "rank": 23, "score": 115725.94527593727 }, { "content": "pub trait Command: Sized {\n\n type Error: std::error::Error;\n\n\n\n fn apply(&self, config: &MamimiConfig) -> Result<(), Self::Error>;\n\n\n\n fn handle_error(err: Self::Error, config: &MamimiConfig) {\n\n let err_s = format!(\"{}\", err);\n\n outln!(config, Error, \"{} {}\", \"error:\".red().bold(), err_s.red());\n\n std::process::exit(1);\n\n }\n\n\n\n fn call(&self, config: &MamimiConfig) {\n\n if let Err(err) = self.apply(&config) {\n\n Self::handle_error(err, &config)\n\n }\n\n }\n\n}\n", "file_path": "src/commands/command.rs", "rank": 24, "score": 106198.2750665947 }, { "content": "pub fn display_name() -> &'static str {\n\n \"system\"\n\n}\n", "file_path": "src/system_version.rs", "rank": 25, "score": 106050.98431875226 }, { "content": "pub trait Shell: Debug {\n\n fn path(&self, path: &Path) -> anyhow::Result<String>;\n\n fn set_env_var(&self, name: &str, value: &str) -> String;\n\n fn use_on_cd(&self, config: &crate::config::MamimiConfig) -> anyhow::Result<String>;\n\n fn rehash(&self) -> clap_complete::Shell {\n\n None\n\n }\n\n fn to_clap_shell(&self) -> clap_complete::Shell;\n\n}\n\n\n\n#[cfg(windows)]\n\npub const AVAILABLE_SHELLS: &[&str; 5] = &[\"cmd\", \"powershell\", \"bash\", \"fish\", \"zsh\"];\n\n\n\n#[cfg(unix)]\n\npub const AVAILABLE_SHELLS: &[&str; 5] = &[\"bash\", \"fish\", \"zsh\"];\n\n\n\nimpl std::str::FromStr for Box<dyn Shell> {\n\n type Err = String;\n\n\n\n fn from_str(s: &str) -> Result<Box<dyn Shell>, Self::Err> {\n", "file_path": "src/shell/shell.rs", "rank": 26, "score": 97902.08029742495 }, { "content": "pub fn create_alias(\n\n config: &MamimiConfig,\n\n common_name: &str,\n\n version: &PythonVersion,\n\n) -> std::io::Result<()> {\n\n let aliases_dir = config.aliases_dir();\n\n std::fs::create_dir_all(&aliases_dir)?;\n\n\n\n let version_dir = version\n\n .installation_path(config)\n\n .ok_or_else(|| std::io::ErrorKind::from(std::io::ErrorKind::NotFound))?;\n\n let alias_dir = aliases_dir.join(common_name);\n\n\n\n if alias_dir.exists() {\n\n remove_symlink_dir(&alias_dir)?;\n\n }\n\n\n\n create_symlink_dir(&version_dir, &alias_dir)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/alias.rs", "rank": 27, "score": 89212.73216606988 }, { "content": "use itertools::Itertools;\n\nuse thiserror::Error;\n\n\n\nuse crate::config;\n\n\n\n#[derive(Error, Debug)]\n\npub enum MamimiError {\n\n #[error(transparent)]\n\n HttpError(#[from] reqwest::Error),\n\n #[error(transparent)]\n\n IoError(#[from] std::io::Error),\n\n}\n\n\n\npub struct InstallList {}\n\n\n\nimpl crate::command::Command for InstallList {\n\n type Error = MamimiError;\n\n\n\n fn apply(&self, config: &crate::config::MamimiConfig) -> Result<(), MamimiError> {\n\n let versions = crate::remote_python_index::list()?;\n", "file_path": "src/commands/install_list.rs", "rank": 28, "score": 88190.07086327826 }, { "content": " let versions = versions\n\n .into_iter()\n\n .map(|v| v.python_version)\n\n .sorted()\n\n .dedup();\n\n for version in versions {\n\n crate::outln!(config #Info, \"{}\", version);\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/commands/install_list.rs", "rank": 29, "score": 88166.2780261352 }, { "content": "pub fn parse() -> Cli {\n\n Cli::parse()\n\n}\n", "file_path": "src/cli.rs", "rank": 30, "score": 87245.5199231327 }, { "content": "fn cache_dir() -> Option<PathBuf> {\n\n xdg_dir(\"XDG_CACHE_HOME\").or_else(dirs::cache_dir)\n\n}\n\n\n", "file_path": "src/directories.rs", "rank": 31, "score": 87169.83909015119 }, { "content": "fn state_dir() -> Option<PathBuf> {\n\n xdg_dir(\"XDG_STATE_HOME\").or_else(dirs::state_dir)\n\n}\n\n\n", "file_path": "src/directories.rs", "rank": 32, "score": 87169.83909015119 }, { "content": "fn runtime_dir() -> Option<PathBuf> {\n\n xdg_dir(\"XDG_RUNTIME_HOME\").or_else(dirs::runtime_dir)\n\n}\n\n\n", "file_path": "src/directories.rs", "rank": 33, "score": 87169.83909015119 }, { "content": "pub trait PathExt {\n\n fn ensure_exists_silently(self) -> Self;\n\n}\n\n\n\nimpl<T: AsRef<std::path::Path>> PathExt for T {\n\n fn ensure_exists_silently(self) -> Self {\n\n if let Err(err) = std::fs::create_dir_all(self.as_ref()) {\n\n warn!(\"Failed to create directory {:?}: {}\", self.as_ref(), err);\n\n }\n\n self\n\n }\n\n}\n", "file_path": "src/path_ext.rs", "rank": 34, "score": 82295.91803516055 }, { "content": "pub fn platform_arch() -> &'static str {\n\n \"x64\"\n\n}\n", "file_path": "src/system_info.rs", "rank": 35, "score": 80527.07475635919 }, { "content": "#[cfg(target_os = \"linux\")]\n\npub fn platform_name() -> &'static str {\n\n \"linux\"\n\n}\n\n\n\n#[cfg(all(\n\n target_pointer_width = \"32\",\n\n any(target_arch = \"arm\", target_arch = \"aarch64\")\n\n))]\n", "file_path": "src/system_info.rs", "rank": 36, "score": 80527.07475635919 }, { "content": "fn xdg_dir(env: &str) -> Option<PathBuf> {\n\n let env_var = std::env::var(env).ok()?;\n\n Some(PathBuf::from(env_var))\n\n}\n\n\n", "file_path": "src/directories.rs", "rank": 37, "score": 80500.16440547565 }, { "content": " function _mamimi_autoload_hook --on-valiable PWD --description 'Change Python version on directory change'\n\n status --is-command-substitution; and return\n\n mamimi --log-level quiet local\n\n end\n\n \"#\n\n )\n\n .into()\n\n }\n\n}\n", "file_path": "src/shell/fish.rs", "rank": 38, "score": 75408.42954412257 }, { "content": "pub fn infer_shell() -> Option<Box<dyn Shell>> {\n\n let mut pid = Some(std::process::id());\n\n let mut visited = 0;\n\n\n\n while pid != None && visited < MAX_INTERACTIONS {\n\n let process_info = get_process_info(pid.unwrap()).ok()?;\n\n let binary = process_info\n\n .command\n\n .trim_start_matches('-')\n\n .split('/')\n\n .last()\n\n .expect(\"Can't read file name of process tree\");\n\n\n\n if let Some(shell) = super::shell_from_string(binary) {\n\n return Some(shell);\n\n }\n\n\n\n pid = process_info.parent_pid;\n\n visited += 1;\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/shell/infer/unix.rs", "rank": 39, "score": 73396.53368754163 }, { "content": "pub fn infer_shell() -> Option<Box<dyn Shell>> {\n\n let mut system = System::new();\n\n let mut current_pid = sysinfo::get_current_pid().ok();\n\n\n\n while let Some(pid) = current_pid {\n\n system.refresh_process(pid);\n\n if let Some(process) = system.process(pid) {\n\n current_pid = process.parent();\n\n let process_name = process\n\n .exe()\n\n .file_stem()\n\n .and_then(OsStr::to_str)\n\n .map(str::to_lowercase);\n\n if let Some(shell) = process_name\n\n .as_ref()\n\n .map(|x| &x[..])\n\n .and_then(super::shell_from_string)\n\n {\n\n return Some(shell);\n\n }\n\n } else {\n\n current_pid = None;\n\n }\n\n }\n\n\n\n None\n\n}\n", "file_path": "src/shell/infer/windows.rs", "rank": 40, "score": 73396.53368754163 }, { "content": "fn get_process_info(pid: u32) -> std::io::Result<ProcessInfo> {\n\n use std::io::{BufRead, BufReader};\n\n use std::process::Command;\n\n\n\n let buffer = Command::new(\"ps\")\n\n .arg(\"-o\")\n\n .arg(\"ppid,comm\")\n\n .arg(pid.to_string())\n\n .stdout(std::process::Stdio::piped())\n\n .spawn()?\n\n .stdout\n\n .ok_or_else(|| Error::from(ErrorKind::UnexpectedEof))?;\n\n\n\n let mut lines = BufReader::new(buffer).lines();\n\n\n\n lines\n\n .next()\n\n .ok_or_else(|| Error::from(ErrorKind::UnexpectedEof))??;\n\n\n\n let line = lines\n", "file_path": "src/shell/infer/unix.rs", "rank": 41, "score": 71783.28200762326 }, { "content": "use std::error::Error as StdError;\n\nuse std::path::Path;\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n IoError(std::io::Error),\n\n ZipError(zip::result::ZipError),\n\n HttpError(reqwest::Error),\n\n}\n\n\n\nimpl std::fmt::Display for Error {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n Self::IoError(x) => x.fmt(f),\n\n Self::ZipError(x) => x.fmt(f),\n\n Self::HttpError(x) => x.fmt(f),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/archive/extract.rs", "rank": 42, "score": 62645.27009567182 }, { "content": "impl StdError for Error {}\n\n\n\nimpl From<std::io::Error> for Error {\n\n fn from(err: std::io::Error) -> Self {\n\n Self::IoError(err)\n\n }\n\n}\n\n\n\nimpl From<zip::result::ZipError> for Error {\n\n fn from(err: zip::result::ZipError) -> Self {\n\n Self::ZipError(err)\n\n }\n\n}\n\n\n\nimpl From<reqwest::Error> for Error {\n\n fn from(err: reqwest::Error) -> Self {\n\n Self::HttpError(err)\n\n }\n\n}\n\n\n", "file_path": "src/archive/extract.rs", "rank": 43, "score": 62636.66431176677 }, { "content": "use crate::input_version::InputVersion;\n\nuse encoding_rs_io::DecodeReaderBytes;\n\nuse log::{debug, info};\n\nuse std::io::Read;\n\nuse std::path::PathBuf;\n\nuse std::str::FromStr;\n\n\n\nconst VERSION_FILE: &str = \".python-version\";\n\n\n", "file_path": "src/version_files.rs", "rank": 44, "score": 61230.54996466202 }, { "content": " .join(v.v_str())\n\n .join(\"installation\"),\n\n }\n\n }\n\n\n\n pub fn root_path(&self, config: &config::MamimiConfig) -> Option<std::path::PathBuf> {\n\n let path = self.installation_path(config);\n\n let mut canon_path = path.canonicalize().ok()?;\n\n canon_path.pop();\n\n Some(canon_path)\n\n }\n\n}\n\n\n\nimpl<'de> serde::Deserialize<'de> for PythonVersion {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: serde::Deserializer<'de>,\n\n {\n\n let version_str = String::deserialize(deserializer)?;\n\n PythonVersion::parse(version_str).map_err(serde::de::Error::custom)\n", "file_path": "src/python_version.rs", "rank": 45, "score": 60807.74287316362 }, { "content": "\n\n pub fn find_aliases(\n\n &self,\n\n config: &config::MamimiConfig,\n\n ) -> std::io::Result<Vec<alias::StroredAlias>> {\n\n let aliases = alias::list_aliases(config)?\n\n .drain(..)\n\n .filter(|alias| alias.s_ver() == self.v_str())\n\n .collect();\n\n Ok(aliases)\n\n }\n\n\n\n pub fn v_str(&self) -> String {\n\n format!(\"{}\", self)\n\n }\n\n\n\n pub fn installation_path(&self, config: &crate::config::MamimiConfig) -> std::path::PathBuf {\n\n match self {\n\n v @ Self::Semver(_) => config\n\n .installations_dir()\n", "file_path": "src/python_version.rs", "rank": 46, "score": 60806.66454917264 }, { "content": " }\n\n}\n\n\n\nimpl std::fmt::Display for PythonVersion {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n Self::Bypassed => write!(f, \"{}\", system_version::display_name()),\n\n Self::Lts(lts) => write!(f, \"lts-{}\", lts),\n\n Self::Semver(semver) => write!(f, \"v{}\", semver),\n\n Self::Alias(alias) => write!(f, \"{}\", alias),\n\n }\n\n }\n\n}\n\n\n\nimpl FromStr for PythonVersion {\n\n type Err = semver::Error;\n\n fn from_str(s: &str) -> Result<PythonVersion, Self::Err> {\n\n Self::parse(s)\n\n }\n\n}\n", "file_path": "src/python_version.rs", "rank": 47, "score": 60800.64468828741 }, { "content": "use crate::alias;\n\nuse crate::config;\n\nuse crate::system_version;\n\nuse std::str::FromStr;\n\n\n\n#[derive(Debug, PartialEq, PartialOrd, Eq, Ord, Clone)]\n\npub enum PythonVersion {\n\n Semver(semver::Version),\n\n}\n\n\n", "file_path": "src/python_version.rs", "rank": 48, "score": 60799.950534932716 }, { "content": "\n\nimpl PartialEq<semver::Version> for PythonVersion {\n\n fn eq(&self, other: &semver::Version) -> bool {\n\n match self {\n\n Self::Bypassed | Self::Lts(_) | Self::Alias(_) => false,\n\n Self::Semver(v) => v == other,\n\n }\n\n }\n\n}\n", "file_path": "src/python_version.rs", "rank": 49, "score": 60796.43941771717 }, { "content": "use crate::python_version::PythonVersion;\n\nuse scraper;\n\nuse serde::Deserialize;\n\nuse url::Url;\n\n\n\nmod lts_status {\n\n use serde::{Deserialize, Deserializer};\n\n\n\n #[derive(Deserialize, Debug, PartialEq, Eq)]\n\n #[serde(untagged)]\n\n enum LtsStatus {\n\n Nope(bool),\n\n Yes(String),\n\n }\n\n\n\n impl From<LtsStatus> for Option<String> {\n\n fn from(status: LtsStatus) -> Self {\n\n match status {\n\n LtsStatus::Nope(_) => None,\n\n LtsStatus::Yes(x) => Some(x),\n", "file_path": "src/remote_python_index.rs", "rank": 50, "score": 60148.87139681734 }, { "content": " }\n\n }\n\n }\n\n\n\n pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<String>, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n Ok(LtsStatus::deserialize(deserializer)?.into())\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Debug)]\n\npub struct IndexedPythonVersion {\n\n /// https://npm.taobao.org/mirrors/python/ mirror\n\n pub python_version: PythonVersion,\n\n #[serde(with = \"lts_status\")]\n\n pub lts: Option<String>,\n\n pub files: Vec<String>,\n\n}\n\n\n", "file_path": "src/remote_python_index.rs", "rank": 51, "score": 60148.60144931481 }, { "content": "use crate::config::MamimiConfig;\n\nuse crate::outln;\n\nuse crate::python_version::{current_python_version, PythonVersion};\n\nuse colored::Colorize;\n\nuse log::debug;\n\nuse thiserror::Error;\n\n\n\n#[derive(Error, Debug)]\n\npub enum MamimiError {\n\n #[error(transparent)]\n\n HttpError(#[from] reqwest::Error),\n\n #[error(transparent)]\n\n IoError(#[from] std::io::Error),\n\n #[error(transparent)]\n\n SemverError(#[from] semver::Error),\n\n}\n\n\n\npub struct Versions {}\n\n\n\nimpl crate::command::Command for Versions {\n", "file_path": "src/commands/versions.rs", "rank": 63, "score": 59176.240107596204 }, { "content": " type Error = MamimiError;\n\n\n\n fn apply(&self, config: &MamimiConfig) -> Result<(), Self::Error> {\n\n for entry in config.versions().read_dir().map_err(MamimiError::IoError)? {\n\n let entry = entry.map_err(MamimiError::IoError)?;\n\n if crate::python_version::is_dotfile(&entry) {\n\n continue;\n\n }\n\n\n\n let path = entry.path();\n\n let filename = path\n\n .file_name()\n\n .ok_or_else(|| std::io::Error::from(std::io::ErrorKind::NotFound))\n\n .map_err(MamimiError::IoError)?\n\n .to_str()\n\n .ok_or_else(|| std::io::Error::from(std::io::ErrorKind::NotFound))\n\n .map_err(MamimiError::IoError)?;\n\n let version = PythonVersion::parse(filename).map_err(MamimiError::SemverError)?;\n\n let current_python_version = current_python_version(&config).ok().flatten();\n\n debug!(\n", "file_path": "src/commands/versions.rs", "rank": 64, "score": 59171.89758385236 }, { "content": " \"Current Python Version: {}\",\n\n current_python_version.clone().unwrap()\n\n );\n\n if let Some(current_python_version) = current_python_version {\n\n if current_python_version == version {\n\n outln!(config #info, \"{} {}\", \"*\".green(), version.to_string().green());\n\n } else {\n\n outln!(config #info, \"{} {}\", \" \", version);\n\n }\n\n } else {\n\n outln!(config #info, \"{} {}\", \" \", version);\n\n };\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/commands/versions.rs", "rank": 65, "score": 59165.981051230374 }, { "content": "\n\nimpl Default for VersionFileStrategy {\n\n fn default() -> Self {\n\n VersionFileStrategy::Local\n\n }\n\n}\n\n\n\nimpl FromStr for VersionFileStrategy {\n\n type Err = String;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s {\n\n \"local\" => Ok(VersionFileStrategy::Local),\n\n \"recursive\" => Ok(VersionFileStrategy::Recursive),\n\n _ => Err(format!(\n\n \"Invalid strategy: {}. Expected one of: local, recursive\",\n\n s\n\n )),\n\n }\n\n }\n\n}\n", "file_path": "src/version_file_strategy.rs", "rank": 66, "score": 59163.72962718897 }, { "content": "use std::str::FromStr;\n\n\n\n#[derive(Debug)]\n\npub enum VersionFileStrategy {\n\n Local,\n\n Recursive,\n\n}\n\n\n\nimpl VersionFileStrategy {\n\n pub fn possible_values() -> &'static [&'static str] {\n\n &[\"local\", \"recursive\"]\n\n }\n\n\n\n pub fn as_str(&self) -> &'static str {\n\n match self {\n\n VersionFileStrategy::Local => \"local\",\n\n VersionFileStrategy::Recursive => \"recursive\",\n\n }\n\n }\n\n}\n", "file_path": "src/version_file_strategy.rs", "rank": 67, "score": 59160.645257350145 }, { "content": "use thiserror::Error;\n\n\n\nuse crate::config::MamimiConfig;\n\nuse crate::python_version::PythonVersion;\n\nuse crate::system_version;\n\n\n", "file_path": "src/current_python_version.rs", "rank": 68, "score": 58740.57716790295 }, { "content": " } else {\n\n Ok(None)\n\n }\n\n}\n\n\n\n#[derive(Error)]\n\npub enum Error {\n\n #[error(\n\n \"`mamimi env` was not applied in this context\\nCan't find mamimi's environment variables\"\n\n )]\n\n EnvNotApplied,\n\n #[error(\"Can't read the version as a valid semver\")]\n\n VersionError {\n\n source: semver::Error,\n\n version: String,\n\n },\n\n}\n", "file_path": "src/current_python_version.rs", "rank": 69, "score": 58738.214838995955 }, { "content": "#[derive(Debug)]\n\nstruct ProcessInfo {\n\n parent_pid: Option<u32>,\n\n command: String,\n\n}\n\n\n\nconst MAX_INTERACTIONS: u8 = 10;\n\n\n", "file_path": "src/shell/infer/unix.rs", "rank": 70, "score": 54963.86811501936 }, { "content": "fn next_of<'a, T: FromStr, It: Iterator<Item = &'a str>>(i: &mut It) -> Option<T> {\n\n let x = i.next()?;\n\n T::from_str(x).ok()\n\n}\n", "file_path": "src/input_version.rs", "rank": 71, "score": 52954.69849751287 }, { "content": "fn main() {\n\n env_logger::init();\n\n let value = crate::cli::parse();\n\n value.subcmd.call(value.config);\n\n}\n", "file_path": "src/main.rs", "rank": 72, "score": 43955.72179041413 }, { "content": "use crate::config::MamimiConfig;\n\nuse crate::outln;\n\nuse colored::Colorize;\n\n\n", "file_path": "src/commands/command.rs", "rank": 73, "score": 35682.668260025945 }, { "content": "}\n\n\n\nimpl Default for MamimiConfig {\n\n fn default() -> Self {\n\n Self {\n\n python_ftp_mirror: Url::parse(\"https://www.python.org/ftp/python/\").unwrap(),\n\n base_dir: None,\n\n multishell_path: None,\n\n log_level: LogLevel::default(),\n\n version_file_strategy: VersionFileStrategy::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl MamimiConfig {\n\n pub fn version_file_strategy(&self) -> &VersionFileStrategy {\n\n &self.version_file_strategy\n\n }\n\n\n\n pub fn multishell_path(&self) -> Option<&std::path::Path> {\n", "file_path": "src/config.rs", "rank": 74, "score": 32280.523644556924 }, { "content": "use crate::log_level::LogLevel;\n\nuse crate::path_ext::PathExt;\n\nuse crate::version_file_strategy::VersionFileStrategy;\n\nuse dirs::{data_dir, home_dir};\n\nuse std::path::PathBuf;\n\nuse url::Url;\n\n\n\n#[derive(clap::Parser, Debug)]\n\npub struct MamimiConfig {\n\n /// https://www.python.org/ftp/python/ mirror\n\n #[clap(\n\n long,\n\n env = \"MAMIMI_PYTHON_FTP_MIRROR\",\n\n default_value = \"https://www.python.org/ftp/python/\",\n\n global = true,\n\n hide_env_values = true\n\n )]\n\n pub python_ftp_mirror: Url,\n\n\n\n /// The root directory of mamimi installations.\n", "file_path": "src/config.rs", "rank": 75, "score": 32276.546379508152 }, { "content": " let modern = data_dir().map(|dir| dir.join(\".mamimi\"));\n\n\n\n if let Some(dir) = legacy {\n\n return dir;\n\n }\n\n\n\n modern\n\n .expect(\"Can't get data directory\")\n\n .ensure_exists_silently()\n\n }\n\n\n\n pub fn installations_dir(&self) -> PathBuf {\n\n self.base_dir_with_default()\n\n .join(\"python-versinos\")\n\n .ensure_exists_silently()\n\n }\n\n\n\n pub fn default_python_version_dir(&self) -> PathBuf {\n\n self.aliases_dir().join(\"default\")\n\n }\n", "file_path": "src/config.rs", "rank": 76, "score": 32269.685815452915 }, { "content": " match &self.multishell_path {\n\n None => None,\n\n Some(v) => Some(v.as_path()),\n\n }\n\n }\n\n\n\n pub fn log_level(&self) -> &LogLevel {\n\n &self.log_level\n\n }\n\n\n\n pub fn base_dir_with_default(&self) -> PathBuf {\n\n let user_pref = self.base_dir.clone();\n\n if let Some(dir) = user_pref {\n\n return dir;\n\n }\n\n\n\n let legacy = home_dir()\n\n .map(|dir| dir.join(\".mamimi\"))\n\n .filter(|dir| dir.exists());\n\n\n", "file_path": "src/config.rs", "rank": 77, "score": 32268.019995452218 }, { "content": "\n\n pub fn aliases_dir(&self) -> PathBuf {\n\n self.base_dir_with_default()\n\n .join(\"aliases\")\n\n .ensure_exists_silently()\n\n }\n\n\n\n #[cfg(test)]\n\n pub fn with_base_dir(mut self, base_dir: Option<PathBuf>) -> Self {\n\n self.base_dir = base_dir;\n\n self\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 78, "score": 32266.18250340588 }, { "content": " long,\n\n env = \"MAMIMI_LOGLEVEL\",\n\n default_value = \"info\",\n\n global = true,\n\n hide_env_values = true,\n\n possible_values = LogLevel::possible_values()\n\n )]\n\n log_level: LogLevel,\n\n\n\n /// A strategy for how to resolve the Python version.\n\n /// - `local`: use the local version of Python defined within the current directory\n\n #[clap(\n\n long,\n\n env = \"MAMIMI_VERSION_FILE_STRATEGY\",\n\n possible_values = VersionFileStrategy::possible_values(),\n\n default_value = \"local\",\n\n global = true,\n\n hide_env_values = true,\n\n )]\n\n version_file_strategy: VersionFileStrategy,\n", "file_path": "src/config.rs", "rank": 79, "score": 32265.93242465055 }, { "content": " #[clap(\n\n long = \"mamimi-dir\",\n\n env = \"MAMIMI_DIR\",\n\n global = true,\n\n hide_env_values = true\n\n )]\n\n pub base_dir: Option<PathBuf>,\n\n\n\n /// This value will be automatically populated.\n\n /// 'mamimi env' in your shell profile. Read more about it using 'mamimi help env'\n\n #[clap(\n\n long,\n\n env = \"MAMIMI_MULTISHELL_PATH\",\n\n hide_env_values = true,\n\n hide = true\n\n )]\n\n multishell_path: Option<PathBuf>,\n\n\n\n /// The log level of mamimi commands\n\n #[clap(\n", "file_path": "src/config.rs", "rank": 80, "score": 32264.1160012629 }, { "content": "pub mod extract;\n\npub mod tar_xz;\n", "file_path": "src/archive/mod.rs", "rank": 81, "score": 31225.576753503177 }, { "content": "use log::warn;\n\n\n", "file_path": "src/path_ext.rs", "rank": 82, "score": 30536.397042339086 }, { "content": "use crate::archive::extract::{Error, Extract};\n\nuse reqwest::blocking::Response;\n\nuse std::path::Path;\n\n\n\npub struct TarXz {\n\n response: Response,\n\n}\n\n\n\nimpl TarXz {\n\n #[allow(dead_code)]\n\n pub fn new(response: Response) -> Self {\n\n Self { response }\n\n }\n\n}\n\n\n\nimpl Extract for TarXz {\n\n fn extract_into<P: AsRef<Path>>(self, path: P) -> Result<(), Error> {\n\n let xz_stream = xz2::read::XzDecoder::new(self.response);\n\n let mut tar_archive = tar::Archive::new(xz_stream);\n\n tar_archive.unpack(&path)?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/archive/tar_xz.rs", "rank": 83, "score": 30186.913342159332 }, { "content": "use crate::python_version::PythonVersion;\n\nuse std::str::FromStr;\n\n\n\n#[derive(Clone, Debug)]\n\npub enum InputVersion {\n\n Major(u64),\n\n MajorMinor(u64, u64),\n\n Full(PythonVersion),\n\n}\n\n\n\nimpl InputVersion {\n\n pub fn to_version<'a, T>(&self, available_versions: T) -> Option<&'a PythonVersion>\n\n where\n\n T: IntoIterator<Item = &'a PythonVersion>,\n\n {\n\n available_versions\n\n .into_iter()\n\n .filter(|x| self.matches(x))\n\n .max()\n\n }\n", "file_path": "src/input_version.rs", "rank": 84, "score": 30015.61277608342 }, { "content": "\n\n pub fn matches(&self, version: &PythonVersion) -> bool {\n\n match (self, version) {\n\n (Self::Full(a), b) => a == b,\n\n (_, PythonVersion::System) => false,\n\n (Self::Major(major), PythonVersion::Semver(other)) => *major == other.major,\n\n (Self::MajorMinor(major, minor), PythonVersion::Semver(other)) => {\n\n *major == other.major && *minor == other.minor\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for InputVersion {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n Self::Full(x) => x.fmt(f),\n\n Self::Major(major) => write!(f, \"{}.x.x\", major),\n\n Self::MajorMinor(major, minor) => write!(f, \"{}.{}.x\", major, minor),\n\n }\n", "file_path": "src/input_version.rs", "rank": 85, "score": 30013.454267366214 }, { "content": " }\n\n}\n\n\n\nimpl FromStr for InputVersion {\n\n type Err = semver::Error;\n\n fn from_str(s: &str) -> Result<InputVersion, Self::Err> {\n\n match PythonVersion::parse(s) {\n\n Ok(v) => Ok(Self::Full(v)),\n\n Err(e) => {\n\n let mut parts = s.trim().split('.');\n\n match (next_of::<u64, _>(&mut parts), next_of::<u64, _>(&mut parts)) {\n\n (Some(major), None) => Ok(Self::Major(major)),\n\n (Some(major), Some(minor)) => Ok(Self::MajorMinor(major, minor)),\n\n _ => Err(e),\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/input_version.rs", "rank": 86, "score": 30012.117478980734 }, { "content": "use std::path::PathBuf;\n\n\n", "file_path": "src/system_version.rs", "rank": 87, "score": 30001.54250087095 }, { "content": "pub struct Local {\n\n pub version: Option<InputVersion>,\n\n}\n\n\n\nimpl crate::commands::Command for Local {\n\n type Error = MamimiError;\n\n\n\n fn apply(&self, config: &crate::config::MamimiConfig) -> Result<(), Self::Error> {\n\n debug!(\"log level {:?}\", config.log_level);\n\n let current_python_version = match self.version.clone().ok_or_else(|| {\n\n match get_user_version_for_directory(std::env::current_dir().unwrap()) {\n\n Some(version) => Ok(version),\n\n None => {\n\n replace_symlink(\n\n &config.default_python_version_dir(),\n\n &config\n\n .mamimi_path\n\n .clone()\n\n .ok_or(MamimiError::MamimiPathNotFound)?,\n\n )?;\n", "file_path": "src/commands/local.rs", "rank": 88, "score": 29187.4534862857 }, { "content": " pub version: InputVersion,\n\n}\n\n\n\nimpl Command for Global {\n\n type Error = MamimiError;\n\n fn apply(&self, config: &crate::config::MamimiConfig) -> Result<(), Self::Error> {\n\n debug!(\"Use {} as the default version\", &self.version);\n\n let version = match self.version.clone() {\n\n InputVersion::Full(PythonVersion::Semver(v)) => PythonVersion::Semver(v),\n\n version => return Err(MamimiError::VersionNotFound { version }),\n\n };\n\n if !&config\n\n .python_versions_dir()\n\n .join(self.version.to_string())\n\n .exists()\n\n {\n\n return Err(MamimiError::VersionNotFound {\n\n version: self.version.clone(),\n\n });\n\n }\n\n create_alias(&config, \"default\", &version).map_err(MamimiError::IoError)?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/commands/global.rs", "rank": 89, "score": 29186.78023339363 }, { "content": "use crate::config::MamimiConfig;\n\nuse crate::input_version::InputVersion;\n\nuse crate::outln;\n\nuse crate::python_version::PythonVersion;\n\nuse crate::symlink::remove_symlink_dir;\n\nuse thiserror::Error;\n\n\n\n#[derive(Error, Debug)]\n\npub enum MamimiError {\n\n #[error(transparent)]\n\n HttpError(#[from] reqwest::Error),\n\n #[error(transparent)]\n\n IoError(#[from] std::io::Error),\n\n #[error(\"Can't find the number of cores.\")]\n\n FromUtf8Error(#[from] std::string::FromUtf8Error),\n\n #[error(\"Can't find version: {version}\")]\n\n VersionNotFound { version: InputVersion },\n\n #[error(\"The reqwested version is not installable: {version}\")]\n\n NotInstallableVersion { version: PythonVersion },\n\n #[error(\"We can't find the necessary envitonment to replace the Python version.\")]\n", "file_path": "src/commands/uninstall.rs", "rank": 90, "score": 29181.589295909023 }, { "content": "use crate::input_version::InputVersion;\n\nuse crate::symlink::{create_symlink_dir, remove_symlink_dir};\n\nuse crate::version_file::get_user_version_for_directory;\n\nuse log::debug;\n\nuse thiserror::Error;\n\n\n\n#[derive(Error, Debug)]\n\npub enum MamimiError {\n\n #[error(transparent)]\n\n HttpError(#[from] reqwest::Error),\n\n #[error(transparent)]\n\n IoError(#[from] std::io::Error),\n\n #[error(\"We can't find the necessary environment variables to replace the Ruby version.\")]\n\n MamimiPathNotFound,\n\n #[error(\"Requested version {version} is not currently installed\")]\n\n VersionNotFound { version: InputVersion },\n\n #[error(\"Can't find version in dotfiles. Please provide a version manually to the command.\")]\n\n CannotInferVersion,\n\n}\n\n\n", "file_path": "src/commands/local.rs", "rank": 91, "score": 29181.44273379553 }, { "content": "use super::command::Command;\n\nuse crate::alias::create_alias;\n\nuse crate::commands::versions;\n\nuse crate::input_version::InputVersion;\n\nuse crate::python_version::PythonVersion;\n\nuse log::debug;\n\nuse reqwest::Version;\n\nuse thiserror::Error;\n\n\n\n#[derive(Error, Debug)]\n\npub enum MamimiError {\n\n #[error(transparent)]\n\n HttpError(#[from] reqwest::Error),\n\n #[error(transparent)]\n\n IoError(#[from] std::io::Error),\n\n #[error(\"Requested version {version} is not currently installed\")]\n\n VersionNotFound { version: InputVersion },\n\n}\n\n\n\npub struct Global {\n", "file_path": "src/commands/global.rs", "rank": 92, "score": 29180.439959492647 }, { "content": " MamimipathNotFound,\n\n}\n\n\n\npub struct Uninstall {\n\n version: Option<InputVersion>,\n\n}\n\n\n\nimpl super::command::Command for Uninstall {\n\n type Error = MamimiError;\n\n\n\n fn apply(&self, &config: MamimiConfig) -> Result<(), Self::Error> {}\n\n}\n", "file_path": "src/commands/uninstall.rs", "rank": 93, "score": 22.890159653661634 }, { "content": " Err(MamimiError::CannotInferVersion)\n\n }\n\n }\n\n }) {\n\n Ok(version) => version,\n\n Err(result) => result?,\n\n };\n\n debug!(\"Use {} as the current version\", current_python_version);\n\n if !&config\n\n .python_versions_dir()\n\n .join(current_python_version.to_string())\n\n .exists()\n\n {\n\n return Err(MamimiError::VersionNotFound {\n\n version: current_python_version,\n\n });\n\n }\n\n replace_symlink(\n\n &config\n\n .python_versions_dir()\n", "file_path": "src/commands/local.rs", "rank": 94, "score": 21.765241950756018 }, { "content": "#[derive(Debug, PartialEq, PartialOrd, Eq, Ord, Clone)]\n\npub enum LogLevel {\n\n Quiet,\n\n Error,\n\n Info,\n\n}\n\n\n\nimpl LogLevel {\n\n pub fn is_writable(&self, logging: &Self) -> bool {\n\n use std::cmp::Ordering;\n\n matches!(self.cmp(logging), Ordering::Greater | Ordering::Equal)\n\n }\n\n\n\n pub fn write_for(&self, logging: &Self) -> Box<dyn std::io::Write> {\n\n if self.is_writable(logging) {\n\n match logging {\n\n Self::Error => Box::from(std::io::stderr()),\n\n _ => Box::from(std::io::stdout()),\n\n }\n\n } else {\n", "file_path": "src/log_level.rs", "rank": 95, "score": 20.882746531121676 }, { "content": "#![warn(rust_2021_idioms, clippy::all, clippy::pedantic)]\n\n#![allow(\n\n clippy::enum_variant_names,\n\n clippy::large_enum_variant,\n\n clippy::module_name_repetitions,\n\n clippy::similar_names\n\n)]\n\n\n\nmod alias;\n\nmod archive;\n\nmod cli;\n\nmod commands;\n\nmod config;\n\nmod current_python_version;\n\nmod input_version;\n\nmod log_level;\n\nmod path_ext;\n\nmod python_version;\n\nmod remote_python_index;\n\nmod shell;\n\nmod symlink;\n\nmod system_info;\n\nmod system_version;\n\nmod version_file_strategy;\n\nmod version_files;\n\n\n\n#[macro_use]\n\nmod directories;\n", "file_path": "src/lib.rs", "rank": 96, "score": 20.797586085045115 }, { "content": "pub struct Init {\n\n /// The shell syntax to use. Infers when missing.\n\n #[clap(long)]\n\n #[clap(possible_values=AVAILABLE_SHELLS)]\n\n shell: Option<Box<dyn Shell>>,\n\n /// Deprecated. This is the default now.\n\n #[clap(long, hide = true)]\n\n multi: bool,\n\n /// Print the script to change Node versions every directory change\n\n #[clap(long)]\n\n use_on_cd: bool,\n\n}\n\n\n\nimpl Command for Init {\n\n type Error = MamimiError;\n\n\n\n fn apply(&self, config: &MamimiConfig) -> Result<(), Self::Error> {\n\n if self.multi {\n\n outln!(\n\n config,\n", "file_path": "src/commands/init.rs", "rank": 97, "score": 19.656452603272818 }, { "content": "use crate::commands;\n\nuse crate::commands::command::Command;\n\nuse crate::config::MamimiConfig;\n\nuse clap::Parser;\n\n\n\n/// Blazingly falt python manager\n\n#[derive(clap::Parser, Debug, Parser, Debug)]\n\n#[clap(name=\"mamimi\",version=env!(\"CARGO_PKG_VERSION\"),bin_name=\"mamimi\")]\n\npub struct Cli {\n\n #[clap(flatten)]\n\n pub config: MamimiConfig,\n\n #[clap(subcommand)]\n\n pub subcmd: SubCommand,\n\n}\n\n\n\n#[derive(clap::Parser, Debug)]\n\npub enum SubCommand {\n\n /// Sets environment variables for initializing mamimi\n\n #[clap(name = \"init\")]\n\n Init(commands::init::Init),\n", "file_path": "src/cli.rs", "rank": 98, "score": 19.328781975312033 }, { "content": "impl SubCommand {\n\n pub fn call(self, config: MamimiConfig) {\n\n match self {\n\n Self::Init(cmd) => cmd.call(&config),\n\n Self::Install(cmd) => cmd.call(&config),\n\n Self::Uninstall(cmd) => cmd.call(config),\n\n Self::Versions(cmd) => cmd.call(config),\n\n Self::Local(cmd) => cmd.call(config),\n\n Self::Global(cmd) => cmd.call(&config),\n\n Self::Completions(cmd) => cmd.call(&config),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 99, "score": 19.28722773678568 } ]
Rust
src/tests/live_mocker.rs
w3f/polkadot-registrar-bot
3c5aa36cf5de8edae0ac434947eb585b7ca92c75
use crate::adapters::tests::MessageInjector; use crate::adapters::AdapterListener; use crate::database::Database; use crate::primitives::{ ExpectedMessage, ExternalMessage, ExternalMessageType, JudgementState, MessageId, Timestamp, }; use crate::tests::F; use crate::{config_session_notifier, DatabaseConfig, DisplayNameConfig, NotifierConfig, Result}; use rand::{thread_rng, Rng}; use tokio::time::{sleep, Duration}; #[actix::test] #[ignore] async fn run_mocker() -> Result<()> { tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .with_env_filter("system") .init(); let mut rng = thread_rng(); let db_config = DatabaseConfig { uri: "mongodb://localhost:27017".to_string(), name: format!("registrar_test_{}", rng.gen_range(u32::MIN..u32::MAX)), }; let notifier_config = NotifierConfig { api_address: "localhost:8888".to_string(), display_name: DisplayNameConfig { enabled: true, limit: 0.85, }, }; info!("Starting mock adapter and session notifier instances"); let db = Database::new(&db_config.uri, &db_config.name).await?; config_session_notifier(db.clone(), notifier_config).await?; let injector = MessageInjector::new(); let listener = AdapterListener::new(db.clone()).await; listener.start_message_adapter(injector.clone(), 1).await; info!("Mocker setup completed"); let mut alice = JudgementState::alice(); *alice .get_field_mut(&F::ALICE_DISPLAY_NAME()) .expected_display_name_check_mut() .0 = true; info!("INSERTING IDENTITY: Alice (1a2YiGNu1UUhJtihq8961c7FZtWGQuWDVMWTNBKJdmpGhZP)"); db.add_judgement_request(&alice).await.unwrap(); let mut rng = thread_rng(); loop { let ty_msg: u32 = rng.gen_range(0..2); let ty_validity = rng.gen_range(0..1); let reset = rng.gen_range(0..5); if reset == 0 { warn!("Resetting Identity"); db.delete_judgement(&alice.context).await.unwrap(); alice = JudgementState::alice(); *alice .get_field_mut(&F::ALICE_DISPLAY_NAME()) .expected_display_name_check_mut() .0 = true; db.add_judgement_request(&alice).await.unwrap(); } let (origin, values) = match ty_msg { 0 => { (ExternalMessageType::Email("[email protected]".to_string()), { match ty_validity { 0 => alice .get_field(&F::ALICE_EMAIL()) .expected_message() .to_message_parts(), 1 => ExpectedMessage::random().to_message_parts(), _ => panic!(), } }) } 1 => { (ExternalMessageType::Twitter("@alice".to_string()), { match ty_validity { 0 => alice .get_field(&F::ALICE_TWITTER()) .expected_message() .to_message_parts(), 1 => ExpectedMessage::random().to_message_parts(), _ => panic!(), } }) } 2 => { ( ExternalMessageType::Matrix("@alice:matrix.org".to_string()), { match ty_validity { 0 => alice .get_field(&F::ALICE_MATRIX()) .expected_message() .to_message_parts(), 1 => ExpectedMessage::random().to_message_parts(), _ => panic!(), } }, ) } _ => panic!(), }; injector .send(ExternalMessage { origin, id: MessageId::from(0u32), timestamp: Timestamp::now(), values, }) .await; sleep(Duration::from_secs(2)).await; } }
use crate::adapters::tests::MessageInjector; use crate::adapters::AdapterListener; use crate::database::Database; use crate::primitives::{ ExpectedMessage, ExternalMessage, ExternalMessageType, JudgementState, MessageId, Timestamp, }; use crate::tests::F; use crate::{config_session_notifier, DatabaseConfig, DisplayNameConfig, NotifierConfig, Result}; use rand::{thread_rng, Rng}; use tokio::time::{sleep, Duration}; #[actix::test] #[ignore] async fn run_mocker() -> Result<()> { tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .with_env_filter("system") .init(); let mut rng = thread_rng(); let db_config = DatabaseConfig { uri: "mongodb://localhost:27017".to_string(), name: format!("registra
r_test_{}", rng.gen_range(u32::MIN..u32::MAX)), }; let notifier_config = NotifierConfig { api_address: "localhost:8888".to_string(), display_name: DisplayNameConfig { enabled: true, limit: 0.85, }, }; info!("Starting mock adapter and session notifier instances"); let db = Database::new(&db_config.uri, &db_config.name).await?; config_session_notifier(db.clone(), notifier_config).await?; let injector = MessageInjector::new(); let listener = AdapterListener::new(db.clone()).await; listener.start_message_adapter(injector.clone(), 1).await; info!("Mocker setup completed"); let mut alice = JudgementState::alice(); *alice .get_field_mut(&F::ALICE_DISPLAY_NAME()) .expected_display_name_check_mut() .0 = true; info!("INSERTING IDENTITY: Alice (1a2YiGNu1UUhJtihq8961c7FZtWGQuWDVMWTNBKJdmpGhZP)"); db.add_judgement_request(&alice).await.unwrap(); let mut rng = thread_rng(); loop { let ty_msg: u32 = rng.gen_range(0..2); let ty_validity = rng.gen_range(0..1); let reset = rng.gen_range(0..5); if reset == 0 { warn!("Resetting Identity"); db.delete_judgement(&alice.context).await.unwrap(); alice = JudgementState::alice(); *alice .get_field_mut(&F::ALICE_DISPLAY_NAME()) .expected_display_name_check_mut() .0 = true; db.add_judgement_request(&alice).await.unwrap(); } let (origin, values) = match ty_msg { 0 => { (ExternalMessageType::Email("[email protected]".to_string()), { match ty_validity { 0 => alice .get_field(&F::ALICE_EMAIL()) .expected_message() .to_message_parts(), 1 => ExpectedMessage::random().to_message_parts(), _ => panic!(), } }) } 1 => { (ExternalMessageType::Twitter("@alice".to_string()), { match ty_validity { 0 => alice .get_field(&F::ALICE_TWITTER()) .expected_message() .to_message_parts(), 1 => ExpectedMessage::random().to_message_parts(), _ => panic!(), } }) } 2 => { ( ExternalMessageType::Matrix("@alice:matrix.org".to_string()), { match ty_validity { 0 => alice .get_field(&F::ALICE_MATRIX()) .expected_message() .to_message_parts(), 1 => ExpectedMessage::random().to_message_parts(), _ => panic!(), } }, ) } _ => panic!(), }; injector .send(ExternalMessage { origin, id: MessageId::from(0u32), timestamp: Timestamp::now(), values, }) .await; sleep(Duration::from_secs(2)).await; } }
function_block-function_prefixed
[ { "content": "fn try_decode_hex(display_name: &mut String) {\n\n if display_name.starts_with(\"0x\") {\n\n // Might be a false positive. Leave it as is if it cannot be decoded.\n\n if let Ok(name) = hex::decode(&display_name[2..]) {\n\n if let Ok(name) = String::from_utf8(name) {\n\n *display_name = name;\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Message)]\n\n#[rtype(result = \"crate::Result<()>\")]\n\npub enum WatcherMessage {\n\n Ack(AckResponse),\n\n NewJudgementRequest(JudgementRequest),\n\n PendingJudgementsRequests(Vec<JudgementRequest>),\n\n ActiveDisplayNames(Vec<DisplayNameEntryRaw>),\n\n}\n\n\n\n#[derive(Debug, Clone, Message)]\n\n#[rtype(result = \"crate::Result<()>\")]\n\npub enum ClientCommand {\n\n ProvideJudgement(IdentityContext),\n\n RequestPendingJudgements,\n\n RequestDisplayNames,\n\n Ping,\n\n}\n\n\n", "file_path": "src/connector.rs", "rank": 0, "score": 107083.8355557614 }, { "content": "fn config() -> DisplayNameConfig {\n\n DisplayNameConfig {\n\n enabled: true,\n\n limit: 0.85,\n\n }\n\n}\n\n\n\n#[actix::test]\n\nasync fn valid_display_name() {\n\n let (db, connector, mut api, _) = new_env().await;\n\n let verifier = DisplayNameVerifier::new(db.clone(), config());\n\n let mut stream = api.ws_at(\"/api/account_status\").await.unwrap();\n\n\n\n // Insert judgement request.\n\n connector.inject(alice_judgement_request()).await;\n\n let states = connector.inserted_states().await;\n\n let mut alice = states[0].clone();\n\n verifier.verify_display_name(&alice).await.unwrap();\n\n\n\n // Subscribe to endpoint.\n", "file_path": "src/tests/display_name_verification.rs", "rank": 1, "score": 97071.94281184922 }, { "content": "fn open_config() -> Result<Config> {\n\n // Open config file.\n\n let content = fs::read_to_string(\"config.yaml\")\n\n .or_else(|_| fs::read_to_string(\"/etc/registrar/config.yaml\"))\n\n .map_err(|_| {\n\n anyhow!(\"Failed to open config at 'config.yaml' or '/etc/registrar/config.yaml'.\")\n\n })?;\n\n\n\n // Parse config file as JSON.\n\n let config = serde_yaml::from_str::<Config>(&content)\n\n .map_err(|err| anyhow!(\"Failed to parse config: {:?}\", err))?;\n\n\n\n Ok(config)\n\n}\n\n\n\nasync fn config_adapter_listener(db: Database, config: AdapterConfig) -> Result<()> {\n\n let watchers = config.watcher.clone();\n\n let dn_config = config.display_name.clone();\n\n run_adapters(config.clone(), db.clone()).await?;\n\n run_connector(db, watchers, dn_config).await\n", "file_path": "src/lib.rs", "rank": 2, "score": 89242.58649749053 }, { "content": "fn gen_timestamp() -> u64 {\n\n let start = SystemTime::now();\n\n start\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"Time went backwards\")\n\n .as_secs()\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct TwitterClient {\n\n client: Client,\n\n consumer_key: String,\n\n consumer_secret: String,\n\n token: String,\n\n token_secret: String,\n\n twitter_ids: HashMap<TwitterId, String>,\n\n // Keep track of messages.\n\n cache: HashSet<MessageId>,\n\n}\n\n\n", "file_path": "src/adapters/twitter.rs", "rank": 3, "score": 78167.21973482675 }, { "content": "fn is_too_similar(existing: &str, new: &str, limit: f64) -> bool {\n\n let name_str = existing.to_lowercase();\n\n let account_str = new.to_lowercase();\n\n\n\n let similarities = [\n\n jaro(&name_str, &account_str),\n\n jaro_words(&name_str, &account_str, &[\" \", \"-\", \"_\"]),\n\n ];\n\n\n\n similarities.iter().any(|&s| s > limit)\n\n}\n\n\n", "file_path": "src/display_name.rs", "rank": 4, "score": 59703.13923584939 }, { "content": "fn jaro_words(left: &str, right: &str, delimiter: &[&str]) -> f64 {\n\n fn splitter<'a>(string: &'a str, delimiter: &[&str]) -> Vec<&'a str> {\n\n let mut all = vec![];\n\n\n\n for del in delimiter {\n\n let mut words: Vec<&str> = string\n\n .split(del)\n\n .map(|s| s.trim())\n\n .filter(|s| !s.is_empty())\n\n .collect();\n\n\n\n all.append(&mut words);\n\n }\n\n\n\n all\n\n }\n\n\n\n let left_words = splitter(left, delimiter);\n\n let right_words = splitter(right, delimiter);\n\n\n", "file_path": "src/display_name.rs", "rank": 5, "score": 58040.77381133664 }, { "content": "export interface CheckDisplayNameResult {\n\n type: string;\n\n value: any;\n", "file_path": "www/src/json.ts", "rank": 6, "score": 50681.69699394974 }, { "content": "fn gen_nonce() -> String {\n\n let random: [u8; 16] = thread_rng().gen();\n\n hex::encode(random)\n\n}\n\n\n", "file_path": "src/adapters/twitter.rs", "rank": 7, "score": 46687.41923325757 }, { "content": "pub fn bob_judgement_request() -> WatcherMessage {\n\n WatcherMessage::new_judgement_request(JudgementRequest::bob())\n\n}\n\n\n\n// async fn new_env() -> (TestServer, ConnectorMocker, MessageInjector) {\n\nasync fn new_env() -> (Database, ConnectorMocker, TestServer, MessageInjector) {\n\n // Setup MongoDb database.\n\n let random: u32 = thread_rng().gen_range(u32::MIN..u32::MAX);\n\n let db = Database::new(\n\n \"mongodb://localhost:27017/\",\n\n &format!(\"registrar_test_{}\", random),\n\n )\n\n .await\n\n .unwrap();\n\n\n\n // Setup API\n\n let (server, actor) = run_test_server(db.clone()).await;\n\n\n\n // Setup message verifier and injector.\n\n let injector = MessageInjector::new();\n", "file_path": "src/tests/mod.rs", "rank": 8, "score": 41074.689027450804 }, { "content": "pub fn alice_judgement_request() -> WatcherMessage {\n\n WatcherMessage::new_judgement_request(JudgementRequest::alice())\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 9, "score": 41074.689027450804 }, { "content": "/// Convenience function for creating a full identity context when only the\n\n/// address itself is present. Only supports Kusama and Polkadot for now.\n\npub fn create_context(address: ChainAddress) -> IdentityContext {\n\n let chain = if address.as_str().starts_with('1') {\n\n ChainName::Polkadot\n\n } else {\n\n ChainName::Kusama\n\n };\n\n\n\n IdentityContext { address, chain }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::primitives::JudgementState;\n\n\n\n #[test]\n\n fn command_status() {\n\n let resp = Command::from_str(\"status Alice\").unwrap();\n\n assert_eq!(\n\n resp,\n", "file_path": "src/adapters/admin.rs", "rank": 10, "score": 36219.999919922935 }, { "content": "use crate::connector::DisplayNameEntry;\n\nuse crate::database::Database;\n\nuse crate::primitives::{ChainName, IdentityContext, JudgementState};\n\nuse crate::{DisplayNameConfig, Result};\n\nuse strsim::jaro;\n\n\n\nconst VIOLATIONS_CAP: usize = 5;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct DisplayNameVerifier {\n\n db: Database,\n\n config: DisplayNameConfig,\n\n}\n\n\n\nimpl DisplayNameVerifier {\n\n pub fn new(db: Database, config: DisplayNameConfig) -> Self {\n\n DisplayNameVerifier { db, config }\n\n }\n\n pub async fn check_similarities(\n\n &self,\n", "file_path": "src/display_name.rs", "rank": 11, "score": 35366.951455406495 }, { "content": " break;\n\n }\n\n\n\n violations.push(existing);\n\n }\n\n }\n\n\n\n Ok(violations)\n\n }\n\n pub async fn verify_display_name(&self, state: &JudgementState) -> Result<()> {\n\n if !self.config.enabled {\n\n return Ok(());\n\n }\n\n\n\n let name = if let Some(name) = state.display_name() {\n\n name\n\n } else {\n\n return Ok(());\n\n };\n\n\n", "file_path": "src/display_name.rs", "rank": 12, "score": 35364.38318430528 }, { "content": " name: &str,\n\n chain: ChainName,\n\n // Skip comparison for this account, usually for the issuer itself\n\n // (required when re-requesting judgement).\n\n skip: Option<&IdentityContext>,\n\n ) -> Result<Vec<DisplayNameEntry>> {\n\n let current = self.db.fetch_display_names(chain).await?;\n\n\n\n let mut violations = vec![];\n\n for existing in current {\n\n if let Some(to_skip) = skip {\n\n // Skip account if specified.\n\n if &existing.context == to_skip {\n\n continue;\n\n }\n\n }\n\n\n\n if is_too_similar(name, &existing.display_name, self.config.limit) {\n\n // Only show up to `VIOLATIONS_CAP` violations.\n\n if violations.len() == VIOLATIONS_CAP {\n", "file_path": "src/display_name.rs", "rank": 13, "score": 35362.51735082216 }, { "content": " let violations = self\n\n .check_similarities(name, state.context.chain, Some(&state.context))\n\n .await?;\n\n\n\n if !violations.is_empty() {\n\n self.db\n\n .insert_display_name_violations(&state.context, &violations)\n\n .await?;\n\n } else {\n\n self.db.set_display_name_valid(state).await?;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/display_name.rs", "rank": 14, "score": 35357.68551859916 }, { "content": " let mut total = 0.0;\n\n\n\n for left_word in &left_words {\n\n let mut temp = 0.0;\n\n\n\n for right_word in &right_words {\n\n let sim = jaro(left_word, right_word);\n\n\n\n if sim > temp {\n\n temp = sim;\n\n }\n\n }\n\n\n\n total += temp;\n\n }\n\n\n\n total as f64 / left_words.len().max(right_words.len()) as f64\n\n}\n", "file_path": "src/display_name.rs", "rank": 15, "score": 35357.19341340155 }, { "content": "use super::JsonResult;\n\nuse crate::connector::DisplayNameEntry;\n\nuse crate::database::Database;\n\nuse crate::primitives::ChainName;\n\nuse crate::{display_name::DisplayNameVerifier, DisplayNameConfig};\n\nuse actix::prelude::*;\n\nuse actix_web::{web, HttpResponse};\n\n\n\npub struct DisplayNameChecker {\n\n verifier: DisplayNameVerifier,\n\n}\n\n\n\nimpl Default for DisplayNameChecker {\n\n fn default() -> Self {\n\n panic!(\"DisplayNameChecker is not initialized\");\n\n }\n\n}\n\n\n\nimpl DisplayNameChecker {\n\n pub fn new(db: Database, config: DisplayNameConfig) -> Self {\n", "file_path": "src/api/display_name_check.rs", "rank": 16, "score": 32471.94885576872 }, { "content": "use super::*;\n\nuse crate::api::{JsonResult, ResponseAccountState};\n\nuse crate::connector::DisplayNameEntry;\n\nuse crate::display_name::DisplayNameVerifier;\n\nuse crate::primitives::{IdentityContext, IdentityFieldValue};\n\nuse crate::DisplayNameConfig;\n\nuse futures::{SinkExt, StreamExt};\n\n\n\nimpl From<&str> for DisplayNameEntry {\n\n fn from(val: &str) -> Self {\n\n DisplayNameEntry {\n\n display_name: val.to_string(),\n\n // Filler value.\n\n context: IdentityContext::bob(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/tests/display_name_verification.rs", "rank": 17, "score": 32471.917571951402 }, { "content": " DisplayNameChecker {\n\n verifier: DisplayNameVerifier::new(db, config),\n\n }\n\n }\n\n}\n\n\n\nimpl SystemService for DisplayNameChecker {}\n\nimpl Supervised for DisplayNameChecker {}\n\n\n\nimpl Actor for DisplayNameChecker {\n\n type Context = Context<Self>;\n\n}\n\n\n\nimpl Handler<CheckDisplayName> for DisplayNameChecker {\n\n type Result = ResponseActFuture<Self, JsonResult<Outcome>>;\n\n\n\n fn handle(&mut self, msg: CheckDisplayName, _ctx: &mut Self::Context) -> Self::Result {\n\n let verifier = self.verifier.clone();\n\n\n\n Box::pin(\n", "file_path": "src/api/display_name_check.rs", "rank": 18, "score": 32471.849759232467 }, { "content": " let mut alice = states[0].clone();\n\n verifier.verify_display_name(&alice).await.unwrap();\n\n\n\n // Subscribe to endpoint.\n\n stream.send(IdentityContext::alice().to_ws()).await.unwrap();\n\n\n\n // Set expected result.\n\n let field = alice.get_field_mut(&IdentityFieldValue::DisplayName(\"Alice\".to_string()));\n\n let (passed, violations) = field.expected_display_name_check_mut();\n\n *passed = false;\n\n *violations = names;\n\n\n\n let expected = ResponseAccountState {\n\n state: alice.into(),\n\n // The UI already shows invalid display names in a specific way,\n\n // notification is not required.\n\n notifications: vec![],\n\n };\n\n\n\n // Check expected state.\n\n let resp: JsonResult<ResponseAccountState> = stream.next().await.into();\n\n assert_eq!(resp, JsonResult::Ok(expected));\n\n}\n", "file_path": "src/tests/display_name_verification.rs", "rank": 19, "score": 32471.163108754627 }, { "content": " stream.send(IdentityContext::alice().to_ws()).await.unwrap();\n\n\n\n // Set expected result.\n\n let field = alice.get_field_mut(&IdentityFieldValue::DisplayName(\"Alice\".to_string()));\n\n let (passed, violations) = field.expected_display_name_check_mut();\n\n *passed = true;\n\n *violations = vec![];\n\n\n\n let expected = ResponseAccountState {\n\n state: alice.into(),\n\n // The UI already shows invalid display names in a specific way,\n\n // notification is not required.\n\n notifications: vec![],\n\n };\n\n\n\n // Check current state.\n\n let resp: JsonResult<ResponseAccountState> = stream.next().await.into();\n\n assert_eq!(resp, JsonResult::Ok(expected));\n\n}\n\n\n", "file_path": "src/tests/display_name_verification.rs", "rank": 20, "score": 32470.570906324563 }, { "content": "#[actix::test]\n\nasync fn invalid_display_name() {\n\n let (db, connector, mut api, _) = new_env().await;\n\n let verifier = DisplayNameVerifier::new(db.clone(), config());\n\n let mut stream = api.ws_at(\"/api/account_status\").await.unwrap();\n\n\n\n // Pre-fill database with active display names\n\n let names = vec![\n\n DisplayNameEntry::from(\"Alice\"),\n\n DisplayNameEntry::from(\"alice\"),\n\n DisplayNameEntry::from(\"Alicee\"),\n\n ];\n\n\n\n for name in &names {\n\n db.insert_display_name(name).await.unwrap();\n\n }\n\n\n\n // Insert judgement request.\n\n connector.inject(alice_judgement_request()).await;\n\n let states = connector.inserted_states().await;\n", "file_path": "src/tests/display_name_verification.rs", "rank": 21, "score": 32470.353564503625 }, { "content": " )\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"snake_case\", tag = \"type\", content = \"value\")]\n\npub enum Outcome {\n\n Ok,\n\n Violations(Vec<DisplayNameEntry>),\n\n}\n\n\n\n#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize, Message)]\n\n#[rtype(result = \"JsonResult<Outcome>\")]\n\npub struct CheckDisplayName {\n\n pub check: String,\n\n pub chain: ChainName,\n\n}\n\n\n\npub async fn check_display_name(req: web::Json<CheckDisplayName>) -> HttpResponse {\n\n HttpResponse::Ok().json(\n\n DisplayNameChecker::from_registry()\n\n .send(req.into_inner())\n\n .await\n\n .unwrap(),\n\n )\n\n}\n", "file_path": "src/api/display_name_check.rs", "rank": 22, "score": 32469.854323663945 }, { "content": " async move {\n\n trace!(\"Received a similarities check: {:?}\", msg);\n\n verifier\n\n .check_similarities(msg.check.as_str(), msg.chain, None)\n\n .await\n\n .map(|violations| {\n\n let outcome = if violations.is_empty() {\n\n Outcome::Ok\n\n } else {\n\n Outcome::Violations(violations)\n\n };\n\n\n\n JsonResult::Ok(outcome)\n\n })\n\n .map_err(|err| {\n\n error!(\"Failed to check for display name similarities: {:?}\", err)\n\n })\n\n .unwrap_or_else(|_| JsonResult::Err(\"Backend error, contact admin\".to_string()))\n\n }\n\n .into_actor(self),\n", "file_path": "src/api/display_name_check.rs", "rank": 23, "score": 32468.79246156198 }, { "content": "type Subscriber = Recipient<JsonResult<ResponseAccountState>>;\n\n\n\n#[derive(Clone, Debug, Message)]\n\n#[rtype(result = \"()\")]\n\npub struct SubscribeAccountState {\n\n pub subscriber: Subscriber,\n\n pub id_context: IdentityContext,\n\n}\n\n\n\n#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize, Message)]\n\n#[rtype(result = \"()\")]\n\npub struct NotifyAccountState {\n\n pub state: JudgementStateBlanked,\n\n pub notifications: Vec<NotificationMessage>,\n\n}\n\n\n\n// Identical to `NotifyAccountState`, but gets sent from the server to the\n\n// session for type-safety purposes.\n\n#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize, Message)]\n\n#[rtype(result = \"()\")]\n", "file_path": "src/api/judgement_state.rs", "rank": 24, "score": 26284.346072151526 }, { "content": "export interface DisplayNameChallenge {\n\n passed: boolean;\n\n violations: Violation[];\n", "file_path": "www/src/json.ts", "rank": 25, "score": 26062.29576610095 }, { "content": " handleDisplayNameCheck(data: GenericMessage, display_name: string) {\n\n this.manager.wipeIntroduction();\n\n\n\n if (data.type == \"ok\") {\n\n let check: CheckDisplayNameResult = data.message;\n\n if (check.type == \"ok\") {\n\n this.manager.setDisplayNameVerification(display_name, BadgeValid);\n\n } else if (check.type = \"violations\") {\n\n let violations: Violation[] = check.value;\n\n this.manager.setDisplayNameViolation(display_name, violations, false);\n\n } else {\n\n // Should never occur.\n\n this.notifications.unexpectedError(\"pdnc#1\")\n\n }\n\n } else if (data.type == \"err\") {\n\n // Should never occur.\n\n this.notifications.unexpectedError(\"pdnc#2\")\n\n } else {\n\n // Should never occur.\n\n this.notifications.unexpectedError(\"pdnc#3\")\n\n }\n\n\n\n this.manager.resetButton();\n\n this.manager.wipeLiveUpdateInfo();\n\n this.manager.wipeVerificationOverviewContent();\n\n this.manager.wipeEmailSecondChallengeContent();\n\n this.manager.wipeUnsupportedContent();\n", "file_path": "www/src/index.ts", "rank": 26, "score": 23731.735520355636 }, { "content": " setDisplayNameVerification(name: string, validity: string) {\n\n this.div_display_name_overview.innerHTML = `\n\n <div class=\"col-10 \">\n\n <h2>Display name check</h2>\n\n <p>The display name <strong>${name}</strong> is ${validity}</p>\n\n </div>\n\n `;\n", "file_path": "www/src/content.ts", "rank": 27, "score": 23724.203731100664 }, { "content": " setDisplayNameViolation(name: string, violations: Violation[], show_hint: boolean) {\n\n let listed = \"\";\n\n for (let v of violations) {\n\n listed += `<li>\"${v.display_name}\" (by account <em>${v.context.address}</em>)</li>`\n\n }\n\n\n\n let hint = \"\";\n\n if (show_hint) {\n\n hint = `<p><strong>Hint:</strong> You can check for valid display names by selecting <em>\"Validate Display Name\"</em> in the search bar.</p>`\n\n }\n\n\n\n this.div_display_name_overview.innerHTML = `\n\n <div class=\"col-10 \">\n\n <h2>Display name check</h2>\n\n <p>The display name <strong>${name}</strong> is ${BadgeInvalid}. It's too similar to (an) existing display name(s):</p>\n\n <ul>\n\n ${listed}\n\n </ul>\n\n ${hint}\n\n </div>\n\n `;\n", "file_path": "www/src/content.ts", "rank": 28, "score": 23724.203731100664 }, { "content": "use crate::api::{LookupServer, NotifyAccountState};\n\nuse crate::database::Database;\n\nuse crate::primitives::{IdentityContext, JudgementState, Timestamp};\n\nuse crate::Result;\n\nuse actix::prelude::*;\n\nuse std::collections::HashMap;\n\nuse tokio::time::{sleep, Duration};\n\n\n\npub async fn run_session_notifier(mut db: Database, server: Addr<LookupServer>) {\n\n async fn local(\n\n db: &mut Database,\n\n server: &Addr<LookupServer>,\n\n event_counter: &mut u64,\n\n ) -> Result<()> {\n\n let (events, new_counter) = db.fetch_events(*event_counter).await?;\n\n let mut cache: HashMap<IdentityContext, JudgementState> = HashMap::new();\n\n\n\n for event in events {\n\n let state = match cache.get(event.context()) {\n\n Some(state) => state.clone(),\n", "file_path": "src/notifier.rs", "rank": 30, "score": 17.807090542878406 }, { "content": " sleep(Duration::from_secs(crate::tests::TEST_TIMEOUT)).await;\n\n }\n\n }\n\n\n\n #[async_trait]\n\n impl Adapter for MessageInjector {\n\n type MessageType = ();\n\n\n\n fn name(&self) -> &'static str {\n\n \"test_state_injector\"\n\n }\n\n async fn fetch_messages(&mut self) -> Result<Vec<ExternalMessage>> {\n\n let mut lock = self.messages.lock().await;\n\n Ok(std::mem::take(&mut *lock))\n\n }\n\n async fn send_message(&mut self, _to: &str, _content: Self::MessageType) -> Result<()> {\n\n unimplemented!()\n\n }\n\n }\n\n}\n", "file_path": "src/adapters/mod.rs", "rank": 31, "score": 16.549242433912898 }, { "content": "use crate::database::Database;\n\nuse crate::primitives::{\n\n ExpectedMessage, ExternalMessage, IdentityFieldValue, NotificationMessage, Timestamp,\n\n};\n\nuse crate::{AdapterConfig, Result};\n\nuse tokio::time::{interval, Duration};\n\nuse tracing::Instrument;\n\n\n\npub mod admin;\n\npub mod email;\n\npub mod matrix;\n\npub mod twitter;\n\n\n\npub async fn run_adapters(config: AdapterConfig, db: Database) -> Result<()> {\n\n let listener = AdapterListener::new(db.clone()).await;\n\n // Convenience flat for logging\n\n let mut started = false;\n\n\n\n // Deconstruct struct to get around borrowing violations.\n\n let AdapterConfig {\n", "file_path": "src/adapters/mod.rs", "rank": 32, "score": 16.353006907564595 }, { "content": "#[macro_use]\n\nextern crate tracing;\n\n#[macro_use]\n\nextern crate anyhow;\n\n#[macro_use]\n\nextern crate serde;\n\n#[macro_use]\n\nextern crate async_trait;\n\n\n\nuse actix::clock::sleep;\n\nuse adapters::matrix::MatrixHandle;\n\nuse primitives::ChainName;\n\nuse std::fs;\n\nuse std::time::Duration;\n\n\n\npub type Result<T> = std::result::Result<T, anyhow::Error>;\n\n\n\nuse adapters::run_adapters;\n\nuse api::run_rest_api_server;\n\nuse connector::run_connector;\n", "file_path": "src/lib.rs", "rank": 33, "score": 15.76507791188882 }, { "content": "use crate::adapters::admin::RawFieldName;\n\nuse crate::api::VerifyChallenge;\n\nuse crate::connector::DisplayNameEntry;\n\nuse crate::primitives::{\n\n ChainName, ChallengeType, Event, ExpectedMessage, ExternalMessage, IdentityContext,\n\n IdentityFieldValue, JudgementState, NotificationMessage, Timestamp,\n\n};\n\nuse crate::Result;\n\nuse bson::{doc, from_document, to_bson, to_document, Bson, Document};\n\nuse futures::StreamExt;\n\nuse mongodb::options::UpdateOptions;\n\nuse mongodb::{Client, Database as MongoDb};\n\nuse rand::{thread_rng, Rng};\n\nuse serde::Serialize;\n\n\n\nconst IDENTITY_COLLECTION: &str = \"identities\";\n\nconst EVENT_COLLECTION: &str = \"event_log\";\n\nconst DISPLAY_NAMES: &str = \"display_names\";\n\n\n\nconst DANGLING_THRESHOLD: u64 = 3600; // one hour\n\n\n\n/// Convenience trait. Converts a value to BSON.\n", "file_path": "src/database.rs", "rank": 34, "score": 15.441421882540876 }, { "content": "use crate::display_name::DisplayNameVerifier;\n\nuse crate::primitives::{\n\n ChainAddress, ChainName, IdentityContext, IdentityFieldValue, JudgementState, Timestamp,\n\n};\n\nuse crate::{Database, DisplayNameConfig, Result, WatcherConfig};\n\nuse actix::io::SinkWrite;\n\nuse actix::io::WriteHandler;\n\nuse actix::prelude::*;\n\nuse actix_codec::Framed;\n\nuse awc::{\n\n error::WsProtocolError,\n\n ws::{Codec, Frame, Message},\n\n BoxedSocket, Client,\n\n};\n\nuse futures::stream::{SplitSink, StreamExt};\n\nuse std::collections::HashMap;\n\nuse std::sync::Arc;\n\nuse std::time::Duration;\n\nuse tokio::sync::mpsc::{self, UnboundedSender};\n\nuse tokio::sync::RwLock;\n", "file_path": "src/connector.rs", "rank": 35, "score": 14.611451966584735 }, { "content": " }\n\n\n\n Ok(messages)\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl Adapter for TwitterClient {\n\n type MessageType = ();\n\n\n\n fn name(&self) -> &'static str {\n\n \"Twitter\"\n\n }\n\n async fn fetch_messages(&mut self) -> Result<Vec<ExternalMessage>> {\n\n self.request_messages().await\n\n }\n\n async fn send_message(&mut self, _to: &str, _content: Self::MessageType) -> Result<()> {\n\n unimplemented!()\n\n }\n\n}\n", "file_path": "src/adapters/twitter.rs", "rank": 36, "score": 14.33521590071381 }, { "content": "use super::*;\n\nuse tokio::time::{sleep, Duration};\n\n\n\n#[actix::test]\n\nasync fn background_outgoing_watcher_messages() {\n\n let (_db, mut connector, _api, _inj) = new_env().await;\n\n\n\n // Wait until enough messages have been sent to the Watcher (mocked).\n\n sleep(Duration::from_secs(10)).await;\n\n\n\n let (_out, counter) = connector.outgoing();\n\n assert!(counter.provide_judgement == 0);\n\n assert!(counter.request_pending_judgements > 5);\n\n assert!(counter.request_display_names > 5);\n\n assert!(counter.ping == 0);\n\n}\n\n\n\n// TODO: Test others, including judgement candidates\n", "file_path": "src/tests/background_tasks.rs", "rank": 37, "score": 14.197836430572272 }, { "content": "impl Adapter for MatrixClient {\n\n type MessageType = ();\n\n\n\n fn name(&self) -> &'static str {\n\n \"Matrix\"\n\n }\n\n async fn fetch_messages(&mut self) -> Result<Vec<ExternalMessage>> {\n\n let mut lock = self.messages.lock().await;\n\n // Return messages and wipe inner field.\n\n Ok(std::mem::take(&mut *lock))\n\n }\n\n async fn send_message(&mut self, _to: &str, _content: Self::MessageType) -> Result<()> {\n\n unimplemented!()\n\n }\n\n}\n", "file_path": "src/adapters/matrix.rs", "rank": 39, "score": 13.884158580788576 }, { "content": " }\n\n pub async fn fetch_display_names(&self, chain: ChainName) -> Result<Vec<DisplayNameEntry>> {\n\n let coll = self.db.collection::<DisplayNameEntry>(DISPLAY_NAMES);\n\n\n\n let mut cursor = coll\n\n .find(\n\n doc! {\n\n \"context.chain\": chain.to_bson()?,\n\n },\n\n None,\n\n )\n\n .await?;\n\n\n\n let mut names = vec![];\n\n while let Some(doc) = cursor.next().await {\n\n names.push(doc?);\n\n }\n\n\n\n Ok(names)\n\n }\n", "file_path": "src/database.rs", "rank": 40, "score": 13.742940841385852 }, { "content": " message\n\n ))\n\n .build()?;\n\n\n\n let _ = smtp.send(email.into())?;\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl Adapter for EmailClient {\n\n type MessageType = ExpectedMessage;\n\n\n\n fn name(&self) -> &'static str {\n\n \"email\"\n\n }\n\n async fn fetch_messages(&mut self) -> Result<Vec<ExternalMessage>> {\n\n self.request_messages()\n\n }\n\n async fn send_message(&mut self, to: &str, content: Self::MessageType) -> Result<()> {\n\n Self::send_message(self, to, content.value.as_str()).await\n\n }\n\n}\n", "file_path": "src/adapters/email.rs", "rank": 41, "score": 13.48105082211293 }, { "content": "use crate::adapters::tests::MessageInjector;\n\nuse crate::adapters::AdapterListener;\n\nuse crate::api::JsonResult;\n\nuse crate::connector::{AccountType, JudgementRequest, WatcherMessage};\n\nuse crate::database::Database;\n\nuse crate::notifier::run_session_notifier;\n\nuse crate::primitives::IdentityFieldValue;\n\nuse crate::{api::tests::run_test_server, connector::tests::ConnectorMocker};\n\nuse actix_http::ws::{Frame, ProtocolError};\n\nuse actix_test::TestServer;\n\nuse actix_web_actors::ws::Message;\n\nuse rand::{thread_rng, Rng};\n\nuse serde::de::DeserializeOwned;\n\nuse serde::Serialize;\n\nuse tokio::time::{sleep, Duration};\n\n\n\nmod api_judgement_state;\n\nmod background_tasks;\n\nmod display_name_verification;\n\nmod explicit;\n", "file_path": "src/tests/mod.rs", "rank": 42, "score": 13.476128084132965 }, { "content": " actix::spawn(async move {\n\n let _ = server.run().await;\n\n });\n\n\n\n Ok(actor)\n\n}\n\n\n\nasync fn account_status_server_route(\n\n req: HttpRequest,\n\n stream: web::Payload,\n\n) -> std::result::Result<HttpResponse, ActixError> {\n\n ws::start(WsAccountStatusSession::default(), &req, stream)\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use super::*;\n\n use crate::database::Database;\n\n use crate::DisplayNameConfig;\n\n use actix_test::{start, TestServer};\n", "file_path": "src/api/mod.rs", "rank": 43, "score": 13.286841749810069 }, { "content": " // Not active request exists.\n\n Ok(None)\n\n }\n\n }\n\n pub async fn fetch_judgement_candidates(\n\n &self,\n\n network: ChainName,\n\n ) -> Result<Vec<JudgementState>> {\n\n let coll = self.db.collection::<JudgementState>(IDENTITY_COLLECTION);\n\n\n\n let mut cursor = coll\n\n .find(\n\n doc! {\n\n \"context.chain\": network.as_str().to_bson()?,\n\n \"is_fully_verified\": true,\n\n \"judgement_submitted\": false,\n\n \"issue_judgement_at\": {\n\n \"$lt\": Timestamp::now().to_bson()?,\n\n }\n\n },\n", "file_path": "src/database.rs", "rank": 44, "score": 13.25158585675759 }, { "content": "use super::*;\n\nuse crate::api::VerifyChallenge;\n\nuse crate::api::{JsonResult, ResponseAccountState};\n\nuse crate::connector::WatcherMessage;\n\nuse crate::primitives::{\n\n ExpectedMessage, ExternalMessage, ExternalMessageType, IdentityContext, MessageId,\n\n NotificationMessage, Timestamp,\n\n};\n\nuse actix_http::StatusCode;\n\nuse futures::{FutureExt, SinkExt, StreamExt};\n\n\n\n#[actix::test]\n\nasync fn current_judgement_state_single_identity() {\n\n let (_db, connector, mut api, _inj) = new_env().await;\n\n let mut stream = api.ws_at(\"/api/account_status\").await.unwrap();\n\n\n\n // Insert judgement request.\n\n connector.inject(alice_judgement_request()).await;\n\n let states = connector.inserted_states().await;\n\n let alice = states[0].clone();\n", "file_path": "src/tests/api_judgement_state.rs", "rank": 45, "score": 13.146649162854091 }, { "content": " pub async fn new(uri: &str, db: &str) -> Result<Self> {\n\n Ok(Database {\n\n db: Client::with_uri_str(uri).await?.database(db),\n\n })\n\n }\n\n /// Simply checks if a connection could be established to the database.\n\n pub async fn connectivity_check(&self) -> Result<()> {\n\n self.db\n\n .list_collection_names(None)\n\n .await\n\n .map_err(|err| anyhow!(\"Failed to connect to database: {:?}\", err))\n\n .map(|_| ())\n\n }\n\n pub async fn add_judgement_request(&self, request: &JudgementState) -> Result<bool> {\n\n let coll = self.db.collection(IDENTITY_COLLECTION);\n\n\n\n // Check if a request of the same address exists yet (occurs when a\n\n // field gets updated during pending judgement process).\n\n let doc = coll\n\n .find_one(\n", "file_path": "src/database.rs", "rank": 46, "score": 13.097642972760354 }, { "content": " pub async fn insert_display_name(&self, name: &DisplayNameEntry) -> Result<()> {\n\n let coll = self.db.collection::<DisplayNameEntry>(DISPLAY_NAMES);\n\n\n\n coll.update_one(\n\n doc! {\n\n \"display_name\": name.display_name.to_bson()?,\n\n \"context\": name.context.to_bson()?,\n\n },\n\n doc! {\n\n \"$setOnInsert\": name.to_bson()?,\n\n },\n\n {\n\n let mut opt = UpdateOptions::default();\n\n opt.upsert = Some(true);\n\n Some(opt)\n\n },\n\n )\n\n .await?;\n\n\n\n Ok(())\n", "file_path": "src/database.rs", "rank": 47, "score": 12.610300316067903 }, { "content": " }\n\n }\n\n }\n\n\n\n // Check if the identity is fully verified.\n\n self.process_fully_verified(&id_state).await?;\n\n }\n\n\n\n Ok(())\n\n }\n\n /// Check if all fields have been verified.\n\n async fn process_fully_verified(&self, state: &JudgementState) -> Result<()> {\n\n let coll = self.db.collection::<JudgementState>(IDENTITY_COLLECTION);\n\n\n\n if state.check_full_verification() {\n\n // Create a timed delay for issuing judgments. Between 30 seconds to\n\n // 5 minutes. This is used to prevent timing attacks where a user\n\n // updates the identity right before the judgement is issued.\n\n let now = Timestamp::now();\n\n let offset = thread_rng().gen_range(30..300);\n", "file_path": "src/database.rs", "rank": 48, "score": 12.37714751281079 }, { "content": " // Request pending judgements every couple of seconds.\n\n fn start_pending_judgements_task(&self, ctx: &mut Context<Self>) {\n\n info!(\"Starting pending judgement requester background task\");\n\n\n\n ctx.run_interval(\n\n Duration::new(PENDING_JUDGEMENTS_INTERVAL, 0),\n\n |_act, ctx| {\n\n ctx.address()\n\n .do_send(ClientCommand::RequestPendingJudgements)\n\n },\n\n );\n\n }\n\n // Request actively used display names every couple of seconds.\n\n fn start_active_display_names_task(&self, ctx: &mut Context<Self>) {\n\n info!(\"Starting display name requester background task\");\n\n\n\n ctx.run_interval(Duration::new(DISPLAY_NAMES_INTERVAL, 0), |_act, ctx| {\n\n ctx.address().do_send(ClientCommand::RequestDisplayNames)\n\n });\n\n }\n", "file_path": "src/connector.rs", "rank": 49, "score": 12.273345537964344 }, { "content": " None,\n\n )\n\n .await?;\n\n\n\n let mut completed = vec![];\n\n while let Some(state) = cursor.next().await {\n\n completed.push(state?);\n\n }\n\n\n\n Ok(completed)\n\n }\n\n // (Warning) This fully verifies the identity without having to verify\n\n // individual fields.\n\n pub async fn full_manual_verification(&self, context: &IdentityContext) -> Result<bool> {\n\n let coll = self.db.collection::<JudgementState>(IDENTITY_COLLECTION);\n\n\n\n // Create a timed delay for issuing judgments. Between 30 seconds to\n\n // 5 minutes. This is used to prevent timing attacks where a user\n\n // updates the identity right before the judgement is issued.\n\n let now = Timestamp::now();\n", "file_path": "src/database.rs", "rank": 50, "score": 12.083492034696228 }, { "content": "use crate::adapters::Adapter;\n\nuse crate::primitives::{ExternalMessage, ExternalMessageType, MessageId, Timestamp};\n\nuse crate::Result;\n\nuse hmac::{Hmac, Mac};\n\nuse rand::{thread_rng, Rng};\n\nuse reqwest::header::{self, HeaderValue};\n\nuse reqwest::{Client, Request};\n\nuse serde::de::DeserializeOwned;\n\nuse serde::Serialize;\n\nuse sha1::Sha1;\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::convert::{TryFrom, TryInto};\n\nuse std::time::{SystemTime, UNIX_EPOCH};\n\nuse std::{cmp::Ordering, hash::Hash};\n\n\n\n#[derive(Clone, Debug, Eq, PartialEq)]\n\npub struct ReceivedMessageContext {\n\n sender: TwitterId,\n\n id: u64,\n\n message: String,\n", "file_path": "src/adapters/twitter.rs", "rank": 51, "score": 12.069647577937449 }, { "content": " }\n\n pub async fn start_message_adapter<T>(&self, mut adapter: T, timeout: u64)\n\n where\n\n T: 'static + Adapter + Send,\n\n <T as Adapter>::MessageType: From<ExpectedMessage>,\n\n {\n\n let mut interval = interval(Duration::from_secs(timeout));\n\n\n\n let mut db = self.db.clone();\n\n let mut event_counter = Timestamp::now().raw();\n\n actix::spawn(async move {\n\n loop {\n\n // Timeout (skipped the first time);\n\n interval.tick().await;\n\n\n\n // Fetch message and send it to the listener, if any.\n\n match adapter.fetch_messages().await {\n\n Ok(messages) => {\n\n for message in messages {\n\n debug!(\"Processing message from: {:?}\", message.origin);\n", "file_path": "src/adapters/mod.rs", "rank": 53, "score": 11.913709928012931 }, { "content": " });\n\n\n\n self.cache.insert(id);\n\n }\n\n\n\n Ok(parsed_messages)\n\n }\n\n /// Creates a signature as documented here:\n\n /// https://developer.twitter.com/en/docs/authentication/oauth-1-0a/creating-a-signature\n\n fn authenticate_request(\n\n &self,\n\n url: &str,\n\n request: &mut Request,\n\n params: Option<&[(&str, &str)]>,\n\n ) -> Result<()> {\n\n use urlencoding::encode;\n\n\n\n // Prepare required data.\n\n let nonce = gen_nonce();\n\n let timestamp = gen_timestamp().to_string();\n", "file_path": "src/adapters/twitter.rs", "rank": 54, "score": 11.767452741767723 }, { "content": "}\n\n\n\nasync fn config_session_notifier(db: Database, not_config: NotifierConfig) -> Result<()> {\n\n let lookup = run_rest_api_server(not_config, db.clone()).await?;\n\n\n\n actix::spawn(async move { run_session_notifier(db, lookup).await });\n\n\n\n Ok(())\n\n}\n\n\n\npub async fn run() -> Result<()> {\n\n let root = open_config()?;\n\n let (db_config, instance) = (root.db, root.instance);\n\n\n\n info!(\"Initializing connection to database\");\n\n let db = Database::new(&db_config.uri, &db_config.name).await?;\n\n db.connectivity_check().await?;\n\n\n\n match instance {\n\n InstanceType::AdapterListener(config) => {\n", "file_path": "src/lib.rs", "rank": 55, "score": 11.344779405318814 }, { "content": "use crate::primitives::{ChainAddress, ChainName, IdentityContext, JudgementStateBlanked};\n\nuse crate::Database;\n\nuse std::str::FromStr;\n\n\n\npub type Result<T> = std::result::Result<T, Response>;\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq)]\n\npub enum Command {\n\n Status(ChainAddress),\n\n Verify(ChainAddress, Vec<RawFieldName>),\n\n Help,\n\n}\n\n\n\nimpl FromStr for Command {\n\n type Err = Response;\n\n\n\n fn from_str(s: &str) -> Result<Self> {\n\n // Convenience handler.\n\n let s = s.trim().replace(\" \", \" \");\n\n\n", "file_path": "src/adapters/admin.rs", "rank": 56, "score": 11.223617417286496 }, { "content": " let expected = ResponseAccountState {\n\n state: alice.clone().into(),\n\n notifications: vec![NotificationMessage::ManuallyVerified {\n\n context: alice.context.clone(),\n\n field: RawFieldName::Web,\n\n }],\n\n };\n\n\n\n let resp: JsonResult<ResponseAccountState> = stream.next().await.into();\n\n assert_eq!(resp, JsonResult::Ok(expected));\n\n\n\n // Empty stream.\n\n assert!(stream.next().now_or_never().is_none());\n\n}\n\n\n\n#[actix::test]\n\nasync fn command_verify_all() {\n\n let (db, connector, mut api, _) = new_env().await;\n\n let mut stream = api.ws_at(\"/api/account_status\").await.unwrap();\n\n\n", "file_path": "src/tests/process_admin_cmds.rs", "rank": 57, "score": 10.727328846818864 }, { "content": "// Makes it easier to read.\n\n#![allow(clippy::bool_assert_comparison)]\n\n\n\nuse super::*;\n\nuse crate::primitives::JudgementState;\n\n\n\n#[actix::test]\n\nasync fn default_state() {\n\n let alice = JudgementState::alice();\n\n\n\n assert_eq!(alice.is_fully_verified, false);\n\n assert_eq!(alice.completion_timestamp, None);\n\n assert_eq!(alice.judgement_submitted, false);\n\n assert_eq!(alice.issue_judgement_at, None);\n\n assert_eq!(\n\n alice\n\n .get_field(&F::ALICE_DISPLAY_NAME())\n\n .challenge\n\n .is_verified(),\n\n false\n", "file_path": "src/tests/explicit.rs", "rank": 58, "score": 10.661445940156925 }, { "content": "\n\n // Create OAuth 1.0 fields.\n\n let mut fields = vec![\n\n (\"oauth_consumer_key\", self.consumer_key.as_str()),\n\n (\"oauth_nonce\", nonce.as_str()),\n\n (\"oauth_signature_method\", \"HMAC-SHA1\"),\n\n (\"oauth_timestamp\", &timestamp),\n\n (\"oauth_token\", self.token.as_str()),\n\n (\"oauth_version\", \"1.0\"),\n\n ];\n\n\n\n if let Some(params) = params {\n\n fields.append(&mut params.to_vec());\n\n }\n\n\n\n fields.sort_by(|(a, _), (b, _)| a.cmp(b));\n\n\n\n let mut params = String::new();\n\n for (name, val) in &fields {\n\n params.push_str(&format!(\"{}={}&\", encode(name), encode(val)));\n", "file_path": "src/adapters/twitter.rs", "rank": 59, "score": 10.576703308747595 }, { "content": " state: alice.clone().into(),\n\n notifications: vec![NotificationMessage::ManuallyVerified {\n\n context: alice.context.clone(),\n\n field: RawFieldName::Twitter,\n\n }],\n\n };\n\n\n\n let resp: JsonResult<ResponseAccountState> = stream.next().await.into();\n\n assert_eq!(resp, JsonResult::Ok(expected));\n\n\n\n // Empty stream.\n\n assert!(stream.next().now_or_never().is_none());\n\n}\n\n\n\n#[actix::test]\n\nasync fn command_verify_unsupported_field() {\n\n let (db, connector, mut api, _) = new_env().await;\n\n let mut stream = api.ws_at(\"/api/account_status\").await.unwrap();\n\n\n\n // Insert judgement state with unsupported entry.\n", "file_path": "src/tests/process_admin_cmds.rs", "rank": 60, "score": 10.549153667367454 }, { "content": " true\n\n );\n\n assert_eq!(\n\n alice\n\n .get_field(&F::ALICE_EMAIL())\n\n .expected_second()\n\n .is_verified,\n\n true\n\n );\n\n}\n\n\n\n#[actix::test]\n\nasync fn set_verified_all() {\n\n let mut alice = JudgementState::alice();\n\n\n\n *alice\n\n .get_field_mut(&F::ALICE_DISPLAY_NAME())\n\n .expected_display_name_check_mut()\n\n .0 = true;\n\n alice\n", "file_path": "src/tests/explicit.rs", "rank": 61, "score": 10.543273273025445 }, { "content": "\n\n // Other judgement states must be unaffected (Bob).\n\n stream.send(IdentityContext::bob().to_ws()).await.unwrap();\n\n\n\n let resp: JsonResult<ResponseAccountState> = stream.next().await.into();\n\n assert_eq!(\n\n resp,\n\n JsonResult::Ok(ResponseAccountState::with_no_notifications(bob.clone()))\n\n );\n\n\n\n // Empty stream.\n\n assert!(stream.next().now_or_never().is_none());\n\n}\n\n\n\n#[actix::test]\n\nasync fn verify_valid_message_duplicate_account_name() {\n\n let (_db, connector, mut api, injector) = new_env().await;\n\n let mut stream = api.ws_at(\"/api/account_status\").await.unwrap();\n\n\n\n // Insert judgement requests.\n", "file_path": "src/tests/api_judgement_state.rs", "rank": 62, "score": 10.491108619461773 }, { "content": "use std::collections::HashSet;\n\n\n\nuse crate::adapters::Adapter;\n\nuse crate::primitives::{\n\n ExpectedMessage, ExternalMessage, ExternalMessageType, MessageId, Timestamp,\n\n};\n\nuse crate::Result;\n\nuse lettre::smtp::authentication::Credentials;\n\nuse lettre::smtp::SmtpClient;\n\nuse lettre::Transport;\n\nuse lettre_email::EmailBuilder;\n\n\n", "file_path": "src/adapters/email.rs", "rank": 63, "score": 10.465081775158573 }, { "content": "use crate::adapters::admin::{process_admin, Command, Response};\n\nuse crate::adapters::Adapter;\n\nuse crate::primitives::{ExternalMessage, ExternalMessageType, Timestamp};\n\nuse crate::{Database, Result};\n\nuse matrix_sdk::events::room::member::MemberEventContent;\n\nuse matrix_sdk::events::room::message::MessageEventContent;\n\nuse matrix_sdk::events::{AnyMessageEventContent, StrippedStateEvent, SyncMessageEvent};\n\nuse matrix_sdk::room::Room;\n\nuse matrix_sdk::{Client, ClientConfig, EventHandler, SyncSettings};\n\nuse ruma::events::room::message::{MessageType, TextMessageEventContent};\n\nuse std::str::FromStr;\n\nuse std::sync::Arc;\n\nuse tokio::sync::Mutex;\n\nuse tokio::time::{self, Duration};\n\nuse url::Url;\n\n\n\nconst REJOIN_DELAY: u64 = 10;\n\nconst REJOIN_MAX_ATTEMPTS: usize = 5;\n\n\n\n#[derive(Clone)]\n", "file_path": "src/adapters/matrix.rs", "rank": 64, "score": 10.346283530951164 }, { "content": " .expected_message_mut()\n\n .verify_message(&msg);\n\n assert!(changed);\n\n\n\n db.verify_message(&msg).await.unwrap();\n\n\n\n // Check updated state with notification.\n\n // Identity is fully verified now.\n\n\n\n let resp: JsonResult<ResponseAccountState> = stream_alice.next().await.into();\n\n // The completion timestamp is not that important, as long as it's `Some`.\n\n let completion_timestamp = match &resp {\n\n JsonResult::Ok(r) => r.state.completion_timestamp.clone(),\n\n _ => panic!(),\n\n };\n\n\n\n assert!(completion_timestamp.is_some());\n\n alice.is_fully_verified = true;\n\n alice.completion_timestamp = completion_timestamp;\n\n\n", "file_path": "src/tests/api_judgement_state.rs", "rank": 65, "score": 10.156394193612824 }, { "content": "use self::judgement_state::WsAccountStatusSession;\n\nuse crate::database::Database;\n\nuse crate::{NotifierConfig, Result};\n\nuse actix::prelude::*;\n\nuse actix::registry::SystemRegistry;\n\nuse actix_cors::Cors;\n\nuse actix_web::{web, App, Error as ActixError, HttpRequest, HttpResponse, HttpServer};\n\nuse actix_web_actors::ws;\n\nuse display_name_check::{check_display_name, DisplayNameChecker};\n\nuse second_challenge::{verify_second_challenge, SecondChallengeVerifier};\n\n\n\nmod display_name_check;\n\nmod judgement_state;\n\nmod second_challenge;\n\n\n\n// Reexport\n\npub use self::judgement_state::{LookupServer, NotifyAccountState, ResponseAccountState};\n\npub use self::second_challenge::VerifyChallenge;\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize, Message)]\n", "file_path": "src/api/mod.rs", "rank": 66, "score": 10.149052308238062 }, { "content": "}\n\n\n\nimpl ExpectedMessage {\n\n pub fn random() -> Self {\n\n use rand::{thread_rng, Rng};\n\n\n\n let random: [u8; 16] = thread_rng().gen();\n\n ExpectedMessage {\n\n value: hex::encode(random),\n\n is_verified: false,\n\n }\n\n }\n\n pub fn verify_message(&mut self, message: &ExternalMessage) -> bool {\n\n for value in &message.values {\n\n if value.0.contains(&self.value) {\n\n self.set_verified();\n\n return true;\n\n }\n\n }\n\n\n", "file_path": "src/primitives.rs", "rank": 67, "score": 9.944228312380325 }, { "content": "use super::*;\n\nuse crate::adapters::admin::{process_admin, Command, RawFieldName, Response};\n\nuse crate::api::{JsonResult, ResponseAccountState};\n\nuse crate::primitives::{\n\n IdentityContext, IdentityFieldValue, JudgementStateBlanked, NotificationMessage,\n\n};\n\nuse futures::{FutureExt, SinkExt, StreamExt};\n\n\n\n#[actix::test]\n\nasync fn command_status() {\n\n let (db, connector, _api, _) = new_env().await;\n\n\n\n // Insert judgement request.\n\n connector.inject(alice_judgement_request()).await;\n\n let states = connector.inserted_states().await;\n\n let alice = states[0].clone();\n\n\n\n // Request status.\n\n let res = process_admin(&db, Command::Status(alice.context.address.clone())).await;\n\n assert_eq!(res, Response::Status(JudgementStateBlanked::from(alice)));\n", "file_path": "src/tests/process_admin_cmds.rs", "rank": 68, "score": 9.896545248796128 }, { "content": " };\n\n\n\n let resp: JsonResult<ResponseAccountState> = stream_alice.next().await.into();\n\n assert_eq!(resp, JsonResult::Ok(exp_resp));\n\n\n\n // Verify Twitter.\n\n let msg = ExternalMessage {\n\n origin: ExternalMessageType::Twitter(\"@alice\".to_string()),\n\n id: MessageId::from(0u32),\n\n timestamp: Timestamp::now(),\n\n values: alice\n\n .get_field(&F::ALICE_TWITTER())\n\n .expected_message()\n\n .to_message_parts(),\n\n };\n\n\n\n let changed = alice\n\n .get_field_mut(&F::ALICE_TWITTER())\n\n .expected_message_mut()\n\n .verify_message(&msg);\n", "file_path": "src/tests/api_judgement_state.rs", "rank": 69, "score": 9.766987620966706 }, { "content": " Ok(())\n\n }.into_actor(self)\n\n )\n\n }\n\n}\n\n\n\n/// Handle websocket messages received from the Watcher. Those messages will be\n\n/// forwarded to the `Handler<WatcherMessage>` implementation.\n\nimpl StreamHandler<std::result::Result<Frame, WsProtocolError>> for Connector {\n\n fn handle(\n\n &mut self,\n\n msg: std::result::Result<Frame, WsProtocolError>,\n\n ctx: &mut Context<Self>,\n\n ) {\n\n async fn local(\n\n conn: Addr<Connector>,\n\n msg: std::result::Result<Frame, WsProtocolError>,\n\n ) -> Result<()> {\n\n let parsed: ResponseMessage<serde_json::Value> = match msg {\n\n Ok(Frame::Text(txt)) => serde_json::from_slice(&txt)?,\n", "file_path": "src/connector.rs", "rank": 70, "score": 9.709526487863156 }, { "content": "#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub struct Timestamp(u64);\n\n\n\nimpl Timestamp {\n\n pub fn now() -> Self {\n\n use std::time::{SystemTime, UNIX_EPOCH};\n\n\n\n let start = SystemTime::now();\n\n let time = start\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"Failed to calculate UNIX time\")\n\n .as_secs();\n\n\n\n Timestamp(time)\n\n }\n\n pub fn with_offset(offset: u64) -> Self {\n\n let now = Self::now();\n\n Timestamp(now.0 + offset)\n\n }\n", "file_path": "src/primitives.rs", "rank": 71, "score": 9.466897271343141 }, { "content": " Twitter,\n\n Matrix,\n\n // Represents the full identity\n\n All,\n\n}\n\n\n\nimpl std::fmt::Display for RawFieldName {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{}\", {\n\n match self {\n\n RawFieldName::LegalName => \"legal_name\",\n\n RawFieldName::DisplayName => \"display_name\",\n\n RawFieldName::Email => \"email\",\n\n RawFieldName::Web => \"web\",\n\n RawFieldName::Twitter => \"twitter\",\n\n RawFieldName::Matrix => \"matrix\",\n\n RawFieldName::All => \"all\",\n\n }\n\n })\n\n }\n", "file_path": "src/adapters/admin.rs", "rank": 72, "score": 9.458510360182249 }, { "content": "#[serde(rename_all = \"snake_case\", tag = \"type\", content = \"message\")]\n\n#[rtype(result = \"()\")]\n\npub enum JsonResult<T> {\n\n Ok(T),\n\n Err(String),\n\n}\n\n\n\nasync fn healthcheck() -> HttpResponse {\n\n HttpResponse::Ok().body(\"OK\")\n\n}\n\n\n\npub async fn run_rest_api_server(\n\n config: NotifierConfig,\n\n db: Database,\n\n) -> Result<Addr<LookupServer>> {\n\n // Add configured actor to the registry.\n\n let actor = LookupServer::new(db.clone()).start();\n\n SystemRegistry::set(actor.clone());\n\n SystemRegistry::set(SecondChallengeVerifier::new(db.clone()).start());\n\n SystemRegistry::set(DisplayNameChecker::new(db, config.display_name).start());\n", "file_path": "src/api/mod.rs", "rank": 73, "score": 9.432192657185634 }, { "content": " } => {\n\n if let Some(second) = second {\n\n Ok(second.clone())\n\n } else {\n\n Err(anyhow!(\"No second challenge found for {:?}\", field))\n\n }\n\n }\n\n _ => Err(anyhow!(\"No second challenge found for {:?}\", field)),\n\n }\n\n } else {\n\n Err(anyhow!(\"No entry found for {:?}\", field))\n\n }\n\n }\n\n pub async fn fetch_events(\n\n &mut self,\n\n mut after: u64,\n\n ) -> Result<(Vec<NotificationMessage>, u64)> {\n\n let coll = self.db.collection(EVENT_COLLECTION);\n\n\n\n let mut cursor = coll\n", "file_path": "src/database.rs", "rank": 74, "score": 9.393903890928012 }, { "content": " assert_eq!(\n\n resp,\n\n JsonResult::Ok(ResponseAccountState::with_no_notifications(alice.clone()))\n\n );\n\n\n\n // Verify Display name (does not create notification).\n\n db.set_display_name_valid(&alice).await.unwrap();\n\n let passed = alice\n\n .get_field_mut(&F::ALICE_DISPLAY_NAME())\n\n .expected_display_name_check_mut()\n\n .0;\n\n *passed = true;\n\n\n\n // Check updated state with notification.\n\n let exp_resp = ResponseAccountState {\n\n state: alice.clone().into(),\n\n notifications: vec![NotificationMessage::FieldVerified {\n\n context: alice.context.clone(),\n\n field: F::ALICE_DISPLAY_NAME(),\n\n }],\n", "file_path": "src/tests/api_judgement_state.rs", "rank": 75, "score": 9.387019658379916 }, { "content": " }\n\n .into_actor(self),\n\n )\n\n }\n\n}\n\n\n\nimpl Handler<NotifyAccountState> for LookupServer {\n\n type Result = ResponseActFuture<Self, ()>;\n\n\n\n fn handle(&mut self, msg: NotifyAccountState, _ctx: &mut Self::Context) -> Self::Result {\n\n let sessions = Arc::clone(&self.sessions);\n\n\n\n Box::pin(\n\n async move {\n\n // Move all subscribers into a temporary storage. Subscribers who\n\n // still have an active session open will be added back later.\n\n let mut to_reinsert = vec![];\n\n\n\n if let Some(subscribers) = sessions.read().await.get(&msg.state.context) {\n\n // Notify each subscriber.\n", "file_path": "src/api/judgement_state.rs", "rank": 76, "score": 9.370005255807303 }, { "content": " debug!(\"Sending ping to Watcher over websocket stream\");\n\n\n\n sink.write(Message::Text(\"ping\".to_string().into()))\n\n .map_err(|err| anyhow!(\"failed to send ping over websocket: {:?}\", err))?;\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\n// Handle messages that were received from the Watcher.\n\nimpl Handler<WatcherMessage> for Connector {\n\n type Result = ResponseActFuture<Self, crate::Result<()>>;\n\n\n\n fn handle(&mut self, msg: WatcherMessage, _ctx: &mut Context<Self>) -> Self::Result {\n\n /// Handle a judgement request.\n\n async fn process_request(\n\n db: &Database,\n\n id: IdentityContext,\n", "file_path": "src/connector.rs", "rank": 77, "score": 9.316052214365522 }, { "content": " IdentityFieldValue::DisplayName(name) => name.as_str(),\n\n _ => panic!(\"Failed to get display name. This is a bug.\"),\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize, Message)]\n\n#[serde(rename_all = \"snake_case\")]\n\n#[rtype(result = \"()\")]\n\npub struct ExternalMessage {\n\n pub origin: ExternalMessageType,\n\n pub id: MessageId,\n\n pub timestamp: Timestamp,\n\n pub values: Vec<MessagePart>,\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"snake_case\", tag = \"type\", content = \"value\")]\n\npub enum ExternalMessageType {\n\n Email(String),\n", "file_path": "src/primitives.rs", "rank": 78, "score": 9.308584061672557 }, { "content": " )\n\n .await?;\n\n }\n\n\n\n Ok(())\n\n }\n\n pub async fn verify_second_challenge(&self, mut request: VerifyChallenge) -> Result<bool> {\n\n let coll = self.db.collection::<JudgementState>(IDENTITY_COLLECTION);\n\n\n\n let mut verified = false;\n\n\n\n // Trim received challenge, just in case.\n\n request.challenge = request.challenge.trim().to_string();\n\n\n\n // Query database.\n\n let mut cursor = coll\n\n .find(\n\n doc! {\n\n \"fields.value\": request.entry.to_bson()?,\n\n },\n", "file_path": "src/database.rs", "rank": 79, "score": 9.216236006305706 }, { "content": " *event_counter = new_counter;\n\n\n\n Ok(())\n\n }\n\n\n\n let mut event_counter = Timestamp::now().raw();\n\n loop {\n\n if let Err(err) = local(&mut db, &server, &mut event_counter).await {\n\n error!(\"Error in session notifier event loop: {:?}\", err);\n\n }\n\n\n\n // Fetch events based on intervals until [\"Change\n\n // Streams\"](https://docs.mongodb.com/manual/changeStreams/) are\n\n // implemented in the Rust MongoDb driver.\n\n sleep(Duration::from_secs(1)).await;\n\n }\n\n}\n", "file_path": "src/notifier.rs", "rank": 80, "score": 9.18359867178811 }, { "content": "}\n\n\n\nimpl SystemService for SecondChallengeVerifier {}\n\nimpl Supervised for SecondChallengeVerifier {}\n\n\n\nimpl Actor for SecondChallengeVerifier {\n\n type Context = Context<Self>;\n\n}\n\n\n\nimpl Handler<VerifyChallenge> for SecondChallengeVerifier {\n\n type Result = ResponseActFuture<Self, JsonResult<bool>>;\n\n\n\n fn handle(&mut self, msg: VerifyChallenge, _ctx: &mut Self::Context) -> Self::Result {\n\n let db = self.db.clone();\n\n\n\n Box::pin(\n\n async move {\n\n debug!(\"Received second challenge: {:?}\", msg);\n\n db.verify_second_challenge(msg)\n\n .await\n", "file_path": "src/api/second_challenge.rs", "rank": 81, "score": 9.176854200809043 }, { "content": " pub async fn inserted_states(&self) -> Vec<JudgementState> {\n\n let mut states = self.inserted_states.write().await;\n\n std::mem::take(&mut states)\n\n }\n\n /// A list of messages that were sent to the Watcher (mocked).\n\n pub fn outgoing(&mut self) -> (Vec<ClientCommand>, OutgoingCounter) {\n\n let mut outgoing = vec![];\n\n let mut counter = OutgoingCounter::default();\n\n\n\n while let Ok(msg) = self.queue.try_recv() {\n\n match msg {\n\n ClientCommand::ProvideJudgement(_) => counter.provide_judgement += 1,\n\n ClientCommand::RequestPendingJudgements => {\n\n counter.request_pending_judgements += 1\n\n }\n\n ClientCommand::RequestDisplayNames => counter.request_display_names += 1,\n\n ClientCommand::Ping => counter.ping += 1,\n\n }\n\n\n\n outgoing.push(msg);\n", "file_path": "src/connector.rs", "rank": 82, "score": 9.13797049362144 }, { "content": "impl Handler<ClientCommand> for Connector {\n\n type Result = crate::Result<()>;\n\n\n\n fn handle(&mut self, msg: ClientCommand, ctx: &mut Context<Self>) -> Self::Result {\n\n let span = debug_span!(\"handling_client_message\");\n\n\n\n // NOTE: make sure no async code comes after this.\n\n let _guard = span.enter();\n\n debug!(\n\n network = self.network.as_str(),\n\n endpoint = self.endpoint.as_str()\n\n );\n\n\n\n // If the sink (outgoing WS stream) is not configured (i.e. when\n\n // testing), send the client command to the channel.\n\n if self.sink.is_none() {\n\n warn!(\"Skipping message to Watcher, not configured (only occurs when testing)\");\n\n self.outgoing.send(msg).unwrap();\n\n return Ok(());\n\n }\n", "file_path": "src/connector.rs", "rank": 83, "score": 9.13143463508721 }, { "content": " field: state\n\n .fields\n\n .iter()\n\n .find(|field| matches!(field.value, IdentityFieldValue::DisplayName(_)))\n\n .map(|field| field.value.clone())\n\n .expect(\"Failed to retrieve display name. This is a bug\"),\n\n })\n\n .await?;\n\n\n\n self.process_fully_verified(state).await?;\n\n\n\n Ok(())\n\n }\n\n pub async fn insert_display_name_violations(\n\n &self,\n\n context: &IdentityContext,\n\n violations: &Vec<DisplayNameEntry>,\n\n ) -> Result<()> {\n\n let coll = self.db.collection::<()>(IDENTITY_COLLECTION);\n\n\n", "file_path": "src/database.rs", "rank": 84, "score": 9.105722899260162 }, { "content": " mut accounts: HashMap<AccountType, String>,\n\n dn_verifier: &DisplayNameVerifier,\n\n inserted_states: &Arc<RwLock<Vec<JudgementState>>>,\n\n ) -> Result<()> {\n\n // Decode display name if appropriate.\n\n if let Some((_, val)) = accounts\n\n .iter_mut()\n\n .find(|(ty, _)| *ty == &AccountType::DisplayName)\n\n {\n\n try_decode_hex(val);\n\n }\n\n\n\n let state = JudgementState::new(id, accounts.into_iter().map(|a| a.into()).collect());\n\n\n\n // Add the judgement state that's about to get inserted into the\n\n // local queue which is then fetched from the unit tests.\n\n #[cfg(not(test))]\n\n let _ = inserted_states;\n\n #[cfg(test)]\n\n {\n", "file_path": "src/connector.rs", "rank": 85, "score": 9.074915148287527 }, { "content": "\n\n *bob.get_field_mut(&F::ALICE_MATRIX()).failed_attempts_mut() = 1;\n\n\n\n let resp: JsonResult<ResponseAccountState> = stream.next().await.into();\n\n assert_eq!(\n\n resp,\n\n JsonResult::Ok(ResponseAccountState::with_no_notifications(bob.clone()))\n\n );\n\n\n\n // Empty stream.\n\n assert!(stream.next().now_or_never().is_none());\n\n}\n\n\n\n#[actix::test]\n\nasync fn verify_valid_message_awaiting_second_challenge() {\n\n let (_db, connector, mut api, injector) = new_env().await;\n\n let mut stream = api.ws_at(\"/api/account_status\").await.unwrap();\n\n\n\n // Insert judgement requests.\n\n connector.inject(alice_judgement_request()).await;\n", "file_path": "src/tests/api_judgement_state.rs", "rank": 86, "score": 9.03051566542111 }, { "content": " }\n\n pub async fn verify_message(&self, message: &ExternalMessage) -> Result<()> {\n\n let coll = self.db.collection(IDENTITY_COLLECTION);\n\n\n\n // Fetch the current field state based on the message origin.\n\n let mut cursor = coll\n\n .find(\n\n doc! {\n\n \"fields.value\": message.origin.to_bson()?,\n\n },\n\n None,\n\n )\n\n .await?;\n\n\n\n // If a field was found, update it.\n\n while let Some(doc) = cursor.next().await {\n\n let mut id_state: JudgementState = from_document(doc?)?;\n\n let field_state = id_state\n\n .fields\n\n .iter_mut()\n", "file_path": "src/database.rs", "rank": 87, "score": 8.93592106437844 }, { "content": " let lookup_results = self.lookup_twitter_id(Some(&to_lookup), None).await?;\n\n self.twitter_ids.extend(lookup_results);\n\n }\n\n\n\n // Parse all messages into `TwitterMessage`.\n\n let mut parsed_messages = vec![];\n\n for message in messages {\n\n let sender = self\n\n .twitter_ids\n\n .get(&message.sender)\n\n .ok_or_else(|| anyhow!(\"Failed to find Twitter handle based on Id\"))?\n\n .clone();\n\n\n\n let id = message.id.into();\n\n\n\n parsed_messages.push(ExternalMessage {\n\n origin: ExternalMessageType::Twitter(sender),\n\n id,\n\n timestamp: Timestamp::now(),\n\n values: vec![message.message.into()],\n", "file_path": "src/adapters/twitter.rs", "rank": 88, "score": 8.907123718261582 }, { "content": " if res.deleted_count != 1 {\n\n panic!()\n\n }\n\n\n\n Ok(())\n\n }\n\n pub async fn verify_manually(\n\n &self,\n\n context: &IdentityContext,\n\n field: &RawFieldName,\n\n // Whether it should check if the idenity has been fully verified.\n\n full_check: bool,\n\n ) -> Result<Option<()>> {\n\n let coll = self.db.collection::<JudgementState>(IDENTITY_COLLECTION);\n\n\n\n // Set the appropriate types for verification.\n\n let update = match field {\n\n // For \"ChallengeType::ExpectedMessage\".\n\n RawFieldName::Twitter | RawFieldName::Matrix => {\n\n doc! {\n", "file_path": "src/database.rs", "rank": 89, "score": 8.874809140130232 }, { "content": " assert_eq!(resp, JsonResult::Ok(expected));\n\n\n\n // Other judgement state must be unaffected (Bob).\n\n stream.send(IdentityContext::bob().to_ws()).await.unwrap();\n\n\n\n let resp: JsonResult<ResponseAccountState> = stream.next().await.into();\n\n assert_eq!(\n\n resp,\n\n JsonResult::Ok(ResponseAccountState::with_no_notifications(bob.clone()))\n\n );\n\n\n\n // Empty stream.\n\n assert!(stream.next().now_or_never().is_none());\n\n}\n\n\n\n#[actix::test]\n\nasync fn verify_full_identity() {\n\n let (db, connector, mut api, _injector) = new_env().await;\n\n let mut stream_alice = api.ws_at(\"/api/account_status\").await.unwrap();\n\n let mut stream_bob = api.ws_at(\"/api/account_status\").await.unwrap();\n", "file_path": "src/tests/api_judgement_state.rs", "rank": 90, "score": 8.866093400077563 }, { "content": " field: F::ALICE_EMAIL(),\n\n }],\n\n };\n\n\n\n let resp: JsonResult<ResponseAccountState> = stream_alice.next().await.into();\n\n assert_eq!(resp, JsonResult::Ok(exp_resp));\n\n\n\n // Verify Matrix.\n\n let msg = ExternalMessage {\n\n origin: ExternalMessageType::Matrix(\"@alice:matrix.org\".to_string()),\n\n id: MessageId::from(0u32),\n\n timestamp: Timestamp::now(),\n\n values: alice\n\n .get_field(&F::ALICE_MATRIX())\n\n .expected_message()\n\n .to_message_parts(),\n\n };\n\n\n\n let changed = alice\n\n .get_field_mut(&F::ALICE_MATRIX())\n", "file_path": "src/tests/api_judgement_state.rs", "rank": 91, "score": 8.858483461536313 }, { "content": " let mut l = inserted_states.write().await;\n\n (*l).push(state.clone());\n\n }\n\n\n\n // Insert identity into the database.\n\n let was_updated = db.add_judgement_request(&state).await?;\n\n // Only verify display name if there have been changes to the state.\n\n if was_updated {\n\n // Get the latest state.\n\n let state = db.fetch_judgement_state(&state.context).await?.expect(\n\n \"failed to fetch judgement state for display name verification. This is a bug.\",\n\n );\n\n\n\n dn_verifier.verify_display_name(&state).await?;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n // Update timestamp\n", "file_path": "src/connector.rs", "rank": 92, "score": 8.8176411734865 }, { "content": "pub async fn run_connector(\n\n db: Database,\n\n watchers: Vec<WatcherConfig>,\n\n dn_config: DisplayNameConfig,\n\n) -> Result<()> {\n\n if watchers.is_empty() {\n\n warn!(\"No watcher is configured. Cannot process any requests or issue judgments\");\n\n return Ok(());\n\n }\n\n\n\n for config in watchers {\n\n let span = info_span!(\"connector_initialization\");\n\n span.in_scope(|| {\n\n debug!(\n\n network = config.network.as_str(),\n\n endpoint = config.endpoint.as_str()\n\n );\n\n });\n\n\n\n async {\n", "file_path": "src/connector.rs", "rank": 93, "score": 8.790846100602485 }, { "content": " db,\n\n sessions: Default::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl SystemService for LookupServer {}\n\nimpl Supervised for LookupServer {}\n\n\n\nimpl Actor for LookupServer {\n\n type Context = Context<Self>;\n\n\n\n fn started(&mut self, ctx: &mut Self::Context) {\n\n self.subscribe_system_async::<NotifyAccountState>(ctx);\n\n }\n\n}\n\n\n\nimpl Handler<SubscribeAccountState> for LookupServer {\n\n type Result = ResponseActFuture<Self, ()>;\n\n\n", "file_path": "src/api/judgement_state.rs", "rank": 94, "score": 8.768890474906607 }, { "content": " pub async fn set_display_name_valid(&self, state: &JudgementState) -> Result<()> {\n\n let coll = self.db.collection::<()>(IDENTITY_COLLECTION);\n\n\n\n coll.update_one(\n\n doc! {\n\n \"context\": state.context.to_bson()?,\n\n \"fields.value.type\": \"display_name\",\n\n },\n\n doc! {\n\n \"$set\": {\n\n \"fields.$.challenge.content.passed\": true,\n\n }\n\n },\n\n None,\n\n )\n\n .await?;\n\n\n\n // Create event\n\n self.insert_event(NotificationMessage::FieldVerified {\n\n context: state.context.clone(),\n", "file_path": "src/database.rs", "rank": 95, "score": 8.70191455414253 }, { "content": " let resp: JsonResult<ResponseAccountState> = stream.next().await.into();\n\n assert_eq!(resp, JsonResult::Ok(expected));\n\n\n\n // Other judgement states must be unaffected (Bob).\n\n stream.send(IdentityContext::bob().to_ws()).await.unwrap();\n\n\n\n let resp: JsonResult<ResponseAccountState> = stream.next().await.into();\n\n assert_eq!(\n\n resp,\n\n JsonResult::Ok(ResponseAccountState::with_no_notifications(bob.clone()))\n\n );\n\n\n\n // Empty stream.\n\n assert!(stream.next().now_or_never().is_none());\n\n}\n\n\n\n#[actix::test]\n\nasync fn verify_invalid_message_bad_origin() {\n\n let (_db, connector, mut api, injector) = new_env().await;\n\n let mut stream = api.ws_at(\"/api/account_status\").await.unwrap();\n", "file_path": "src/tests/api_judgement_state.rs", "rank": 96, "score": 8.679022459194465 }, { "content": "}\n\n\n\n#[actix::test]\n\nasync fn command_verify_multiple_challenge_types() {\n\n let (db, connector, mut api, _) = new_env().await;\n\n let mut stream = api.ws_at(\"/api/account_status\").await.unwrap();\n\n\n\n // Insert judgement request.\n\n connector.inject(alice_judgement_request()).await;\n\n let states = connector.inserted_states().await;\n\n let mut alice = states[0].clone();\n\n\n\n // Subscribe to endpoint.\n\n stream.send(IdentityContext::alice().to_ws()).await.unwrap();\n\n\n\n // Check current state.\n\n let resp: JsonResult<ResponseAccountState> = stream.next().await.into();\n\n assert_eq!(\n\n resp,\n\n JsonResult::Ok(ResponseAccountState::with_no_notifications(alice.clone()))\n", "file_path": "src/tests/process_admin_cmds.rs", "rank": 97, "score": 8.628317546140135 }, { "content": "\n\n // Subscribe to endpoint.\n\n stream.send(IdentityContext::alice().to_ws()).await.unwrap();\n\n\n\n // Check current state.\n\n let resp: JsonResult<ResponseAccountState> = stream.next().await.into();\n\n assert_eq!(\n\n resp,\n\n JsonResult::Ok(ResponseAccountState::with_no_notifications(alice))\n\n );\n\n\n\n // Empty stream.\n\n assert!(stream.next().now_or_never().is_none());\n\n}\n\n\n\n#[actix::test]\n\nasync fn current_judgement_state_multiple_inserts() {\n\n let (_db, connector, mut api, _) = new_env().await;\n\n let mut stream = api.ws_at(\"/api/account_status\").await.unwrap();\n\n\n", "file_path": "src/tests/api_judgement_state.rs", "rank": 98, "score": 8.601418073446174 }, { "content": "\n\n debug!(\"Received message from {}\", event.sender);\n\n\n\n // Add external message to inner field. That field is then\n\n // fetched by the `Adapter` implementation.\n\n let mut lock = self.messages.lock().await;\n\n (*lock).push(ExternalMessage {\n\n origin: ExternalMessageType::Matrix(event.sender.to_string()),\n\n // A message UID is not relevant regarding a live\n\n // message listener. The Matrix SDK handles\n\n // synchronization.\n\n id: 0u32.into(),\n\n timestamp: Timestamp::now(),\n\n values: vec![msg_body.to_string().into()],\n\n });\n\n }\n\n }\n\n}\n\n\n\n#[async_trait]\n", "file_path": "src/adapters/matrix.rs", "rank": 99, "score": 8.480458248938586 } ]
Rust
signature_benchmark/src/wots_aes.rs
qkniep/PQ-FIDO_sd-MSS
27c49bcb82d86057ae4b4c7a429fa55671735de0
use core::convert::TryInto; use core::hash::Hasher; use aes::cipher::{generic_array::GenericArray, BlockEncrypt, NewBlockCipher}; use aes::Aes128; use getrandom; use siphasher::sip128::{Hasher128, SipHasher}; pub const W: usize = 256; pub const X: usize = 1; pub const N: usize = 128 / 8; /*/// Message digest length in bytes. const M: usize = 512 / 8; /// Length of the base `W` representation of a message of length `M`. const L1: usize = 128; /// Length of the base `W` checksum of a base `W` message of length `L1`. const L2: usize = 3; /// Number of function chains const L: usize = L1 + L2;*/ #[derive(Debug, Clone, PartialEq, PartialOrd, Hash, Default)] pub struct Wots { pub pk: [u8; N], sk: Vec<[u8; N]>, } #[derive(Debug, Clone, PartialEq, PartialOrd, Hash, Default)] pub struct WotsSignature { pub pk: [u8; N], pub msg_hash: [u8; N], pub signature: Vec<[u8; N]>, } impl Wots { pub fn new() -> Self { let seed: [u8; N] = rand_digest().unwrap(); return Self::from_seed(seed); } pub fn from_seed(mut seed: [u8; N]) -> Self { let mut sk = Vec::<[u8; N]>::with_capacity(N * X); let mut pk = Vec::<[u8; N]>::with_capacity(N * X); for _ in 0..N * X { let secret = prng(&mut seed); sk.push(secret); let public = chain(&secret, W - 1); pk.push(public); } let mut hasher = SipHasher::new(); for p in &pk { hasher.write(p); } let h = hasher.finish128(); let hash_bytes = h.as_bytes(); return Self { pk: hash_bytes, sk }; } pub fn sign(&self, input: &str) -> WotsSignature { let mut hasher = SipHasher::new(); hasher.write(input.as_bytes()); let h = hasher.finish128(); let hash_bytes = h.as_bytes(); let mut signature: Vec<[u8; N]> = Vec::with_capacity(N * X); let mut sig_cycles: Vec<usize> = Vec::with_capacity(N * X); for i in 0..N { let symbols = base_w(hash_bytes[i]); for s in 0..X { sig_cycles.push(symbols[s] as usize); let index = i * X + s; let sig: [u8; N] = chain(&self.sk[index], sig_cycles[index]); signature.push(sig); } } return WotsSignature { pk: self.pk.clone(), msg_hash: hash_bytes.clone(), signature, }; } } impl WotsSignature { pub fn verify(&self) -> bool { let mut i = 0; let mut pk = Vec::<[u8; N]>::with_capacity(N * X); for b in &self.msg_hash { for s in base_w(*b) { let cycles = W - 1 - (s as usize); pk.push(chain(&self.signature[i], cycles)); i += 1; } } let mut hasher = SipHasher::new(); for p in &pk { hasher.write(p); } let h = hasher.finish128(); let hash_bytes = h.as_bytes(); assert_eq!(self.pk, hash_bytes); return true; } } pub fn chain(input: &[u8; N], c: usize) -> [u8; N] { let mut output = *GenericArray::from_slice(input); let iv = GenericArray::from([0u8; N]); let cipher = Aes128::new(&iv); for _ in 0..c { let i = u128::from_be_bytes(output.as_slice().try_into().expect("wrong length")); cipher.encrypt_block(&mut output); let o = u128::from_be_bytes(output.as_slice().try_into().expect("wrong length")); let r = i ^ o; output = GenericArray::from(r.to_be_bytes()); } return output.as_slice().try_into().expect("wrong length"); } pub fn base_w(byte: u8) -> [u8; X] { let mut b = byte as usize; let mut symbols = [0u8; X]; for s in 0..X { symbols[X - 1 - s] = (b % W) as u8; b /= W; } return symbols; } fn rand_digest() -> Result<[u8; N], getrandom::Error> { let mut buf = [0u8; N]; getrandom::getrandom(&mut buf)?; Ok(buf) } pub fn prng(seed: &mut [u8; N]) -> [u8; N] { let mut output = *GenericArray::from_slice(seed); let iv = GenericArray::from([0u8; N]); let cipher = Aes128::new(&iv); cipher.encrypt_block(&mut output); let s = u128::from_be_bytes(*seed); let o = u128::from_be_bytes(output.as_slice().try_into().expect("wrong length")); let r = s ^ o; let new_seed = r.wrapping_add(s).wrapping_add(1); *seed = new_seed.to_be_bytes(); return r.to_be_bytes(); } #[cfg(test)] mod tests { use super::*; #[test] fn chain_test() { let start = [0u8; N]; let mid = chain(&start, 3); let end1 = chain(&mid, 7); let end2 = chain(&start, 10); assert_eq!(end1, end2); } #[test] fn sign_and_verify() { let wots = Wots::new(); let sig = wots.sign("hello world"); assert_eq!(sig.verify(), true); } }
use core::convert::TryInto; use core::hash::Hasher; use aes::cipher::{generic_array::GenericArray, BlockEncrypt, NewBlockCipher}; use aes::Aes128; use getrandom; use siphasher::sip128::{Hasher128, SipHasher}; pub const W: usize = 256; pub const X: usize = 1; pub const N: usize = 128 / 8; /*/// Message digest length in bytes. const M: usize = 512 / 8; /// Length of the base `W` representation of a message of length `M`. const L1: usize = 128; /// Length of the base `W` checksum of a base `W` message of length `L1`. const L2: usize = 3; /// Number of function chains const L: usize = L1 + L2;*/ #[derive(Debug, Clone, PartialEq, PartialOrd, Hash, Default)] pub struct Wots { pub pk: [u8; N], sk: Vec<[u8; N]>, } #[derive(Debug, Clone, PartialEq, PartialOrd, Hash, Default)] pub struct WotsSignature { pub pk: [u8; N], pub msg_hash: [u8; N], pub signature: Vec<[u8; N]>, } impl Wots { pub fn new() -> Self { let seed: [u8; N] = rand_digest().unwrap(); return Self::from_seed(seed); } pub fn from_seed(mut seed: [u8; N]) -> Self { let mut sk = Vec::<[u8; N]>::with_capacity(N * X); let mut pk = Vec::<[u8; N]>::with_capacity(N * X); for _ in 0..N * X { let secret = prng(&mut seed); sk.push(secret); let public = chain(&secret, W - 1); pk.push(public); } let mut hasher = SipHasher::new(); for p in &pk { hasher.write(p); } let h = hasher.finish128(); let hash_bytes = h.as_bytes(); return Self { pk: hash_bytes, sk }; } pub fn sign(&self, input: &str) -> WotsSignature { let mut hasher = SipHasher::new(); hasher.write(input.as_bytes()); let h = hasher.finish128(); let hash_bytes = h.as_bytes(); let mut signature: Vec<[u8; N]> = Vec::with_capacity(N * X); let mut sig_cycles: Vec<usize> = Vec::with_capacity(N * X); for i in 0..N { let symbols = base_w(hash_bytes[i]); for s in 0..X { sig_cycles.push(symbols[s] as usize); let index = i * X + s; let sig: [u8; N] = chain(&self.sk[index], sig_cycles[index]); signature.push(sig); } } return WotsSignature { pk: self.pk.clone(), msg_hash: hash_bytes.clone(), signature, }; } } impl WotsSignature { pub fn verify(&se
)?; Ok(buf) } pub fn prng(seed: &mut [u8; N]) -> [u8; N] { let mut output = *GenericArray::from_slice(seed); let iv = GenericArray::from([0u8; N]); let cipher = Aes128::new(&iv); cipher.encrypt_block(&mut output); let s = u128::from_be_bytes(*seed); let o = u128::from_be_bytes(output.as_slice().try_into().expect("wrong length")); let r = s ^ o; let new_seed = r.wrapping_add(s).wrapping_add(1); *seed = new_seed.to_be_bytes(); return r.to_be_bytes(); } #[cfg(test)] mod tests { use super::*; #[test] fn chain_test() { let start = [0u8; N]; let mid = chain(&start, 3); let end1 = chain(&mid, 7); let end2 = chain(&start, 10); assert_eq!(end1, end2); } #[test] fn sign_and_verify() { let wots = Wots::new(); let sig = wots.sign("hello world"); assert_eq!(sig.verify(), true); } }
lf) -> bool { let mut i = 0; let mut pk = Vec::<[u8; N]>::with_capacity(N * X); for b in &self.msg_hash { for s in base_w(*b) { let cycles = W - 1 - (s as usize); pk.push(chain(&self.signature[i], cycles)); i += 1; } } let mut hasher = SipHasher::new(); for p in &pk { hasher.write(p); } let h = hasher.finish128(); let hash_bytes = h.as_bytes(); assert_eq!(self.pk, hash_bytes); return true; } } pub fn chain(input: &[u8; N], c: usize) -> [u8; N] { let mut output = *GenericArray::from_slice(input); let iv = GenericArray::from([0u8; N]); let cipher = Aes128::new(&iv); for _ in 0..c { let i = u128::from_be_bytes(output.as_slice().try_into().expect("wrong length")); cipher.encrypt_block(&mut output); let o = u128::from_be_bytes(output.as_slice().try_into().expect("wrong length")); let r = i ^ o; output = GenericArray::from(r.to_be_bytes()); } return output.as_slice().try_into().expect("wrong length"); } pub fn base_w(byte: u8) -> [u8; X] { let mut b = byte as usize; let mut symbols = [0u8; X]; for s in 0..X { symbols[X - 1 - s] = (b % W) as u8; b /= W; } return symbols; } fn rand_digest() -> Result<[u8; N], getrandom::Error> { let mut buf = [0u8; N]; getrandom::getrandom(&mut buf
random
[ { "content": "/// Applies c cycles of the SHA-256/8N hash function to the input.\n\npub fn chain(input: &[u8; N], c: usize, chain: usize, start: usize, pk_seed: &[u8; N]) -> [u8; N] {\n\n let mut output = input.clone();\n\n\n\n for i in 0..c {\n\n let (key, bitmask) = prf2(&pk_seed, ((chain << 8) + (start + i)) as u32);\n\n for (i, byte) in bitmask.iter().enumerate() {\n\n output[i] ^= byte;\n\n }\n\n output = Sha256::digest(&[key, output].concat())[..N]\n\n .try_into()\n\n .unwrap();\n\n }\n\n\n\n return output;\n\n}\n\n\n", "file_path": "signature_benchmark/src/wots.rs", "rank": 0, "score": 329931.18066153704 }, { "content": "/// Signs a single message, without needing a Wots object.\n\n/// Generates secret values and bitmasks on-the-fly.\n\npub fn sign_once(msg: &str, sk_seed: &[u8; N], pk_hash: &[u8; N]) -> WotsSignature {\n\n let pk_seed = prf(sk_seed, L as u32);\n\n let cycles = cycles_for_msg(msg, pk_hash);\n\n\n\n // Calculate signature\n\n let mut signature: Vec<[u8; N]> = Vec::with_capacity(L);\n\n for (i, &c) in cycles.iter().enumerate() {\n\n let cyc = c as usize;\n\n let secret = prf(sk_seed, i as u32);\n\n signature.push(chain(&secret, cyc, i, 0, &pk_seed));\n\n }\n\n\n\n return WotsSignature {\n\n pk_hash: pk_hash.clone(),\n\n pk_seed: pk_seed,\n\n signature,\n\n };\n\n}\n\n\n", "file_path": "signature_benchmark/src/wots.rs", "rank": 1, "score": 305561.0024264166 }, { "content": "/// Applies c cycles of the Blake3-256/8N hash function to the input.\n\npub fn chain(input: &[u8; N], c: usize) -> [u8; N] {\n\n let mut output = input.clone();\n\n\n\n for _ in 0..c {\n\n output = blake3::hash(&output).as_bytes()[..N].try_into().unwrap();\n\n }\n\n\n\n return output;\n\n}\n\n\n", "file_path": "signature_benchmark/src/wots_blake3.rs", "rank": 2, "score": 303194.45175139466 }, { "content": "/// Applies c cycles of the SHA-256/8N hash function to the input.\n\npub fn chain(input: &[u8; N], c: usize) -> [u8; N] {\n\n let mut output = input.clone();\n\n\n\n for _ in 0..c {\n\n let tmp = &Sha256::hash(&output)[..N];\n\n output.copy_from_slice(tmp);\n\n }\n\n\n\n return output;\n\n}\n\n\n", "file_path": "signature_benchmark/src/wots_sha.rs", "rank": 3, "score": 303194.45175139466 }, { "content": "/// SHA-256/8N-based PRNG\n\n/// Returns a new random value and updates the seed in-place.\n\n// TODO support N != 128 / 8 = 16\n\npub fn prng(seed: &mut [u8; N]) -> [u8; N] {\n\n let mut output = [0u8; N];\n\n output.copy_from_slice(&Sha256::hash(&seed[..])[..N]);\n\n\n\n let s = u128::from_be_bytes(*seed);\n\n let o = u128::from_be_bytes(output[..].try_into().expect(\"wrong length\"));\n\n let r = s ^ o;\n\n let new_seed = r.wrapping_add(s).wrapping_add(1);\n\n *seed = new_seed.to_be_bytes();\n\n\n\n return r.to_be_bytes();\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn chain_test() {\n\n let start = [0u8; N];\n", "file_path": "signature_benchmark/src/wots_sha.rs", "rank": 5, "score": 282819.8629725428 }, { "content": "/// Convert a single byte into a sequence of character of base W,\n\n/// i.e. if W=16 returns 2 values in the range 0..=15.\n\npub fn base_w(byte: u8) -> [u8; X] {\n\n let mut b = byte as usize;\n\n let mut symbols = [0u8; X];\n\n\n\n for s in 0..X {\n\n symbols[X - 1 - s] = (b % W) as u8;\n\n b /= W;\n\n }\n\n\n\n return symbols;\n\n}\n\n\n", "file_path": "signature_benchmark/src/wots_sha.rs", "rank": 7, "score": 274282.9460994958 }, { "content": "/// Convert a single byte into a sequence of character of base W,\n\n/// i.e. if W=16 returns 2 values in the range 0..=15.\n\npub fn base_w(byte: u8) -> [u8; X] {\n\n let mut b = byte as usize;\n\n let mut symbols = [0u8; X];\n\n\n\n for s in 0..X {\n\n symbols[X - 1 - s] = (b % W) as u8;\n\n b /= W;\n\n }\n\n\n\n return symbols;\n\n}\n\n\n", "file_path": "signature_benchmark/src/wots_blake3.rs", "rank": 9, "score": 274282.94609949575 }, { "content": "/// Calculate the number of hash cycles applied to each base-w symbol, based on the given message.\n\npub fn cycles_for_msg(msg: &str, pk_hash: &[u8]) -> [u8; L] {\n\n let mut cycles = [0u8; L];\n\n\n\n // Hash input string together with public key hash\n\n let msg_hash = &Sha256::digest(msg.as_bytes())[..];\n\n let hash_bytes = &Sha256::digest(&[&pk_hash, msg_hash].concat())[..M];\n\n\n\n // Calculate message cycles\n\n cycles[0..L1].copy_from_slice(&base_w(hash_bytes, L1));\n\n\n\n // Calculate checksum\n\n let mut csum: u32 = cycles[..L1].iter().map(|&x| W as u32 - 1 - x as u32).sum();\n\n csum <<= 8 - ((L2 * LOG2_W) % 8);\n\n let csum_bytes = &csum.to_be_bytes()[4 - L2_BYTES..];\n\n cycles[L1..L].copy_from_slice(&base_w(csum_bytes, L2));\n\n\n\n return cycles;\n\n}\n\n\n", "file_path": "signature_benchmark/src/wots.rs", "rank": 10, "score": 270719.63448332576 }, { "content": "pub fn cycles_for_msg(msg: &str, pk_hash: &[u8]) -> [u8; L] {\n\n let mut cycles = [0u8; L];\n\n\n\n // Hash input string together with public key hash\n\n let msg_hash = blake3::hash(msg.as_bytes());\n\n let msg_hash_bytes = &msg_hash.as_bytes()[..];\n\n let hash = blake3::hash(&[&pk_hash, msg_hash_bytes].concat());\n\n let hash_bytes = &hash.as_bytes()[..M];\n\n\n\n // Calculate message signature\n\n for i in 0..M {\n\n cycles[i * X..(i + 1) * X].copy_from_slice(&base_w(hash_bytes[i]));\n\n }\n\n\n\n // Calculate checksum\n\n let mut csum: u32 = cycles[..L1].iter().map(|&x| W as u32 - 1 - x as u32).sum();\n\n csum <<= 8 - ((L2 * LOG2_W) % 8);\n\n let csum_bytes = &csum.to_be_bytes()[4 - L2..];\n\n for i in 0..L2 {\n\n cycles[(L1 + i) * X..(L1 + i + 1) * X].copy_from_slice(&base_w(csum_bytes[i]));\n\n }\n\n\n\n return cycles;\n\n}\n\n\n", "file_path": "signature_benchmark/src/wots_blake3.rs", "rank": 11, "score": 265681.2013121935 }, { "content": "/// AES-MMO Pseudo Random Number Generator\n\n/// Returns a new random value and updates the seed in-place.\n\npub fn prng(seed: &mut [u8; N]) -> [u8; N] {\n\n let mut output = *GenericArray::from_slice(seed);\n\n\n\n let iv = GenericArray::from([0u8; N]);\n\n let cipher = Aes128::new(&iv);\n\n cipher.encrypt_block(&mut output);\n\n\n\n let s = u128::from_be_bytes(*seed);\n\n let o = u128::from_be_bytes(output.as_slice().try_into().expect(\"wrong length\"));\n\n let r = s ^ o;\n\n let new_seed = r.wrapping_add(s).wrapping_add(1);\n\n *seed = new_seed.to_be_bytes();\n\n\n\n return r.to_be_bytes();\n\n}\n\n\n", "file_path": "signature_benchmark/src/merkle_aes.rs", "rank": 12, "score": 262947.21479577885 }, { "content": "/// SHA-256/8N-based PRF - with output length of 2N\n\n/// Used for deriving the hash function keys and bitmasks from the public seed.\n\n/// More efficient than two calls to prf(), at least for N <= 128 / 8.\n\npub fn prf2(seed: &[u8; N], counter: u32) -> ([u8; N], [u8; N]) {\n\n if N <= 128 / 8 {\n\n let mut data = seed.clone();\n\n for (i, byte) in counter.to_be_bytes().iter().enumerate() {\n\n data[i] ^= byte;\n\n }\n\n let output = Sha256::digest(&data);\n\n return (\n\n output[..N].try_into().unwrap(),\n\n output[N..].try_into().unwrap(),\n\n );\n\n } else {\n\n return (prf(seed, counter), prf(seed, !counter));\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "signature_benchmark/src/wots.rs", "rank": 13, "score": 245723.28016213054 }, { "content": "/// SHA-256/8N-based PRF\n\n/// Used for deriving the secret keys and the public seed from the secret seed.\n\npub fn prf(seed: &[u8; N], counter: u32) -> [u8; N] {\n\n let mut data = seed.clone();\n\n for (i, byte) in counter.to_be_bytes().iter().enumerate() {\n\n data[i] ^= byte;\n\n }\n\n return Sha256::digest(&data)[..N].try_into().unwrap();\n\n}\n\n\n", "file_path": "signature_benchmark/src/wots.rs", "rank": 14, "score": 242698.41949402489 }, { "content": "/// Blake3-256/8N-based PRF\n\npub fn prf(seed: &[u8; N], counter: u32) -> [u8; N] {\n\n // convert counter to bytes\n\n let mut counter_bytes = [0u8; N];\n\n counter_bytes[N - 4..].copy_from_slice(&counter.to_be_bytes());\n\n\n\n let mut hasher = blake3::Hasher::new();\n\n hasher.update(seed);\n\n hasher.update(&counter_bytes);\n\n return hasher.finalize().as_bytes()[..N].try_into().unwrap();\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn sign_and_verify() {\n\n let wots = Wots::new();\n\n let sig = wots.sign(\"hello world\");\n\n assert_eq!(sig.verify(\"hello world\"), true);\n", "file_path": "signature_benchmark/src/wots_blake3.rs", "rank": 15, "score": 238667.66937228944 }, { "content": "fn calculate_leaf(seed: &[u8; N], index: u32) -> [u8; N] {\n\n let mut index_bytes = [0u8; N];\n\n index_bytes[..4].copy_from_slice(&index.to_be_bytes());\n\n\n\n let secret = hash2(seed, &index_bytes);\n\n let wots = Wots::from_seed(secret);\n\n wots.pk_hash\n\n}\n\n\n", "file_path": "signature_benchmark/src/merkle.rs", "rank": 16, "score": 228252.40351108648 }, { "content": "fn hash_x_times(input: [u8; N], x: i32) -> [u8; N] {\n\n let mut output = input.clone();\n\n for _ in 0..x {\n\n let tmp = &Sha256::digest(&output);\n\n output.copy_from_slice(tmp);\n\n }\n\n output\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn basic() {\n\n let mut p = Proofer::new(10);\n\n let mut v = Verifier::new(p.n, p.pk());\n\n\n\n let ch = v.next_challenge();\n\n let pwd = p.reply(ch);\n", "file_path": "signature_benchmark/src/lamport_otp.rs", "rank": 17, "score": 228086.526943548 }, { "content": "/// Convert a byte slice into a sequence of characters of base W,\n\n/// i.e. if W=16 returns a Vec twice the input length containing values in the range 0..=15.\n\npub fn base_w(bytes: &[u8], len: usize) -> Vec<u8> {\n\n let mut b = bytes[bytes.len() - 1] as usize;\n\n let mut bi = bytes.len() - 1;\n\n let mut symbols = vec![0; len];\n\n let mut bits = 8;\n\n\n\n for i in (0..len).rev() {\n\n symbols[i] = (b % (1 << LOG2_W)) as u8;\n\n b /= 1 << LOG2_W;\n\n bits -= LOG2_W;\n\n if bits <= LOG2_W {\n\n bits += 8;\n\n if bi > 0 {\n\n bi -= 1;\n\n b += (bytes[bi as usize] as usize) << bits;\n\n }\n\n }\n\n }\n\n\n\n return symbols.to_vec();\n\n}\n\n\n", "file_path": "signature_benchmark/src/wots.rs", "rank": 18, "score": 223739.80165043336 }, { "content": "fn hash_n_times(input: [u8; N], n: i32) -> [u8; N] {\n\n let mut output = input.clone();\n\n for _ in 0..n {\n\n let tmp = &Sha256::digest(&output);\n\n output.copy_from_slice(tmp);\n\n }\n\n output\n\n}\n\n\n", "file_path": "signature_benchmark/src/park_otp.rs", "rank": 19, "score": 214403.84057381132 }, { "content": "/// Hashes two N-byte blocks into one.\n\npub fn hash2(in1: &[u8; N], in2: &[u8; N]) -> [u8; N] {\n\n Sha256::digest(&[&in1[..], &in2[..]].concat())[..N]\n\n .try_into()\n\n .unwrap()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn sign_and_verify() {\n\n let mut merkle = UpdatableMerkleKeypair::new(7, 7, false);\n\n let sig = merkle.sign(\"hello\");\n\n assert_eq!(sig.verify(\"hello\", merkle.pk), true);\n\n assert_eq!(sig.verify(\"world\", merkle.pk), false);\n\n\n\n let sig = merkle.sign(\"world\");\n\n assert_eq!(sig.verify(\"world\", merkle.pk), true);\n\n assert_eq!(sig.verify(\"hello\", merkle.pk), false);\n", "file_path": "signature_benchmark/src/merkle.rs", "rank": 21, "score": 196569.7076805319 }, { "content": "/// AES-MMO-based hash function\n\n/// Hashes two blocks into one.\n\npub fn hash2(in1: &[u8; N], in2: &[u8; N]) -> [u8; N] {\n\n let mut block1 = *GenericArray::from_slice(in1);\n\n let mut block2 = *GenericArray::from_slice(in2);\n\n\n\n let iv = GenericArray::from([0u8; N]);\n\n let cipher = Aes128::new(&iv);\n\n cipher.encrypt_block(&mut block1);\n\n\n\n let s = u128::from_be_bytes(*in1);\n\n let o = u128::from_be_bytes(block1.as_slice().try_into().expect(\"wrong length\"));\n\n let r = s ^ o;\n\n\n\n let cipher = Aes128::new(&GenericArray::from(r.to_be_bytes()));\n\n cipher.encrypt_block(&mut block2);\n\n let s = u128::from_be_bytes(block1.as_slice().try_into().expect(\"wrong length\"));\n\n let o = u128::from_be_bytes(block2.as_slice().try_into().expect(\"wrong length\"));\n\n let r = s ^ o;\n\n\n\n return r.to_be_bytes();\n\n}\n", "file_path": "signature_benchmark/src/merkle_aes.rs", "rank": 22, "score": 194026.3607090125 }, { "content": "/// Gets N bytes of high-entropy randomness from the OS.\n\nfn rand_digest() -> Result<[u8; N], getrandom::Error> {\n\n let mut buf = [0u8; N];\n\n getrandom::getrandom(&mut buf)?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "signature_benchmark/src/merkle.rs", "rank": 23, "score": 193970.00468149863 }, { "content": "/// Get high-entropy randomness of the length required as seed from OS.\n\nfn rand_seed() -> [u8; N] {\n\n let mut bytes = [0; N];\n\n let mut rng = thread_rng();\n\n rng.fill_bytes(&mut bytes);\n\n return bytes;\n\n}\n\n\n", "file_path": "signature_benchmark/src/wots_sha.rs", "rank": 24, "score": 191181.29806993392 }, { "content": "/// Get high-entropy randomness of the same length as the hash output from OS.\n\nfn rand_digest() -> Result<[u8; N], getrandom::Error> {\n\n let mut buf = [0u8; N];\n\n getrandom::getrandom(&mut buf)?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "signature_benchmark/src/merkle_aes.rs", "rank": 25, "score": 190069.89579381578 }, { "content": "fn calculate_z(x: [u8; N], y: [u8; N]) -> [u8; N] {\n\n let mut sha = Sha256::new();\n\n sha.update(x);\n\n sha.update(y);\n\n return sha.finalize().try_into().unwrap();\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn basic() {\n\n let mut p = Proofer::new();\n\n let mut v = Verifier::new(p.pk());\n\n\n\n let ch = v.next_challenge();\n\n let pwd = p.reply(ch);\n\n assert_eq!(v.verify(pwd), true);\n\n }\n", "file_path": "signature_benchmark/src/park_otp.rs", "rank": 26, "score": 183458.205642674 }, { "content": "/// Calculates the root hash of a given (sub-)tree.\n\n/// Panics if given a number of leaves that is not a power of two.\n\nfn calculate_root_and_cache(leaves: Vec<[u8; N]>, caching: usize) -> ([u8; N], Vec<[u8; N]>) {\n\n if leaves.len().count_ones() != 1 {\n\n panic!(\"invalid number of leaves: needs to be a power of two\");\n\n }\n\n\n\n let mut tmp = leaves.to_vec();\n\n let mut cache = Vec::new();\n\n\n\n while tmp.len() > 1 {\n\n if tmp.len() == 1 << caching {\n\n cache = tmp.clone();\n\n }\n\n tmp = tmp.chunks(2).fold(Vec::new(), |mut vec, chunk| {\n\n vec.push(hash2(&chunk[0], &chunk[1]));\n\n vec\n\n });\n\n }\n\n\n\n return (tmp[0], cache);\n\n}\n\n\n", "file_path": "signature_benchmark/src/merkle.rs", "rank": 27, "score": 168270.9004056757 }, { "content": "type PubKey = [u8; (1 << S) * N + N];\n\n\n\n/// SD-MSS-WOTS Keypair\n\n// TODO save some space here\n\n#[derive(Debug, Clone, PartialEq, PartialOrd, Hash, Default)]\n\npub struct Keypair {\n\n sk_seed: SecKey,\n\n pub shallow: UpdatableMerkleKeypair,\n\n pub deep: UpdatableMerkleKeypair,\n\n}\n\n\n\n/// SD-MSS-WOTS Signature\n\n// TODO collapse `deep` and `new_deep_ctr` into one value to save 4 bytes?\n\n#[derive(Debug, Clone, PartialEq, PartialOrd, Hash, Default)]\n\npub struct Signature {\n\n pub deep: bool,\n\n pub new_shallow_ctr: u32,\n\n pub new_deep_ctr: u32,\n\n pub merkle_sig: MerkleSignature,\n\n}\n", "file_path": "signature_benchmark/src/sdmss.rs", "rank": 28, "score": 155800.03369897814 }, { "content": "fn can_forge(msg1_hash: &Hash, msg2: &str) -> bool {\n\n let h2 = hash(msg2.as_bytes());\n\n let h1b = msg1_hash.as_bytes();\n\n let h2b = h2.as_bytes();\n\n\n\n for i in 0..N {\n\n let s1 = base_w(h1b[i]);\n\n let s2 = base_w(h2b[i]);\n\n\n\n for s in 0..X {\n\n if s2[s] < s1[s] || s2[s] < s1[s] {\n\n return false;\n\n }\n\n }\n\n }\n\n\n\n return true;\n\n}*/\n\n\n", "file_path": "signature_benchmark/examples/forge_wots.rs", "rank": 29, "score": 155303.8098572008 }, { "content": "fn wots_verify(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n let w = wots::Wots::new();\n\n c.bench_function(\"WOTS (SHA-256) Verify\", |b| {\n\n b.iter_batched(\n\n || {\n\n let n = rng.next_u32();\n\n let m = format!(\"msg{}\", n);\n\n (m.clone(), w.sign(&m))\n\n },\n\n |(msg, sig)| sig.verify(&msg),\n\n BatchSize::SmallInput,\n\n )\n\n });\n\n}\n\n\n\ncriterion_group!(\n\n benches,\n\n wots_cycles_for_msg,\n\n wots_keygen,\n\n wots_sign,\n\n wots_verify\n\n);\n\ncriterion_main!(benches);\n", "file_path": "signature_benchmark/benches/wots.rs", "rank": 30, "score": 151329.04801177682 }, { "content": "fn siphash(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n let mut hasher = SipHasher::new();\n\n c.bench_function(\"SipHash\", |b| {\n\n b.iter_batched(\n\n || {\n\n // Generate new message to sign\n\n let n = rng.next_u32();\n\n format!(\"msg{}\", n)\n\n },\n\n |m| {\n\n hasher.write(m.as_bytes());\n\n hasher.finish128()\n\n },\n\n BatchSize::SmallInput,\n\n )\n\n });\n\n}\n\n\n\ncriterion_group!(\n\n benches, hmac, aes_ctr, aes_mmo, blake3, sha2_224, sha2_256, sha3_224, sha3_256, siphash\n\n);\n\ncriterion_main!(benches);\n", "file_path": "signature_benchmark/benches/hmac.rs", "rank": 31, "score": 146251.21501184633 }, { "content": "/// Generate a forged signature for message msg2.\n\nfn forge_signature(msg1: &str, sig1: WotsSignature, msg2: &str) -> WotsSignature {\n\n let h1 = hash(msg1.as_bytes());\n\n let h2 = hash(msg2.as_bytes());\n\n let h1b = h1.as_bytes();\n\n let h2b = h2.as_bytes();\n\n\n\n let mut sig2 = WotsSignature {\n\n w: sig1.w,\n\n pk: sig1.pk,\n\n input: h2b.clone(),\n\n signature: sig1.signature,\n\n };\n\n\n\n for i in 0..N {\n\n let s1 = base_w(h1b[i]);\n\n let s2 = base_w(h2b[i]);\n\n\n\n for s in 0..X {\n\n let index = X * i + s;\n\n sig2.signature[index] = blake_hash(&sig2.signature[index], (s2[s] - s1[s]) as usize);\n\n }\n\n }\n\n\n\n return sig2;\n\n}\n\n\n", "file_path": "signature_benchmark/examples/forge_wots.rs", "rank": 32, "score": 133204.87853718834 }, { "content": "fn wots_sign(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n let w = wots::Wots::new();\n\n println!(\n\n \"Signature Size: {} Bytes\",\n\n w.sign(\"test123\").signature.len() * wots::N\n\n );\n\n c.bench_function(\"WOTS (SHA-256) Sign\", |b| {\n\n b.iter_batched(\n\n || {\n\n let n = rng.next_u32();\n\n format!(\"msg{}\", n)\n\n },\n\n |m| w.sign(&m),\n\n BatchSize::SmallInput,\n\n )\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/wots.rs", "rank": 33, "score": 125798.8731076422 }, { "content": "fn wots_keygen(c: &mut Criterion) {\n\n //println!(\"Public Key Size: {}\", wots::Wots::new().pk.len() * wots::N);\n\n c.bench_function(\"WOTS (SHA-256) KeyGen\", |b| b.iter(|| wots::Wots::new()));\n\n}\n\n\n", "file_path": "signature_benchmark/benches/wots.rs", "rank": 34, "score": 125798.8731076422 }, { "content": "fn wots_cycles_for_msg(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n let w = wots::Wots::new();\n\n c.bench_function(\"WOTS Convert Msg to Cycles\", |b| {\n\n b.iter_batched(\n\n || {\n\n let n = rng.next_u32();\n\n format!(\"msg{}\", n)\n\n },\n\n |m| wots::cycles_for_msg(&m, &w.pk_hash),\n\n BatchSize::SmallInput,\n\n )\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/wots.rs", "rank": 35, "score": 122959.13557842321 }, { "content": "fn falcon_verify(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n let (pk, sk) = keypair();\n\n c.bench_function(\"Falcon512 Verify\", |b| {\n\n b.iter_batched(\n\n || {\n\n let n = rng.next_u32();\n\n let s = format!(\"msg{}\", n);\n\n let h = hash(s.as_bytes()).to_hex();\n\n let msg = sign(black_box(h.as_bytes()), &sk);\n\n (msg, pk)\n\n },\n\n |(msg, pk)| open(&msg, &pk),\n\n BatchSize::SmallInput,\n\n )\n\n });\n\n}\n\n\n\ncriterion_group!(benches, falcon_keygen, falcon_sign, falcon_verify);\n\ncriterion_main!(benches);\n", "file_path": "signature_benchmark/benches/falcon.rs", "rank": 36, "score": 119960.93542792121 }, { "content": "fn ecdsa_verify(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n let mut csprng = OsRng {};\n\n let keys = Keypair::generate(&mut csprng);\n\n let pk = keys.public;\n\n c.bench_function(\"ECDSA (Ed25519) Verify\", |b| {\n\n b.iter_batched(\n\n || {\n\n let n = rng.next_u32();\n\n let s = format!(\"msg{}\", n);\n\n let h = hash(s.as_bytes()).to_hex();\n\n let sig = keys.sign(h.as_bytes());\n\n (h, sig)\n\n },\n\n |(h, sig)| pk.verify(h.as_bytes(), &sig),\n\n BatchSize::SmallInput,\n\n )\n\n });\n\n}\n\n\n\ncriterion_group!(benches, ecdsa_keygen, ecdsa_sign, ecdsa_verify);\n\ncriterion_main!(benches);\n", "file_path": "signature_benchmark/benches/ecdsa.rs", "rank": 37, "score": 119960.93542792121 }, { "content": "fn park_verify(c: &mut Criterion) {\n\n let mut p = park_otp::Proofer::new();\n\n let mut v = park_otp::Verifier::new(p.pk());\n\n\n\n // start at random point in chain\n\n let mut rng = thread_rng();\n\n let n = rng.next_u32() % 1000;\n\n\n\n let mut ch = v.next_challenge();\n\n let mut pwd = p.reply(ch);\n\n for _ in 0..n {\n\n ch = v.next_challenge();\n\n pwd = p.reply(ch);\n\n }\n\n\n\n c.bench_function(\"Park OTP VerifyPwd\", |b| b.iter(|| v.verify(pwd)));\n\n}\n\n\n\ncriterion_group!(lamport, lamport_init, lamport_gen, lamport_verify);\n\ncriterion_group!(park, park_init, park_gen, park_verify);\n\ncriterion_main!(lamport, park);\n", "file_path": "signature_benchmark/benches/otp.rs", "rank": 38, "score": 119960.93542792121 }, { "content": "fn dilithium_verify(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n let (pk, sk) = keypair();\n\n c.bench_function(\"Dilithium2 Verify\", |b| {\n\n b.iter_batched(\n\n || {\n\n let n = rng.next_u32();\n\n let s = format!(\"msg{}\", n);\n\n let h = hash(s.as_bytes()).to_hex();\n\n let msg = sign(black_box(h.as_bytes()), &sk);\n\n (msg, pk)\n\n },\n\n |(msg, pk)| open(&msg, &pk),\n\n BatchSize::SmallInput,\n\n )\n\n });\n\n}\n\n\n\ncriterion_group!(benches, dilithium_keygen, dilithium_sign, dilithium_verify);\n\ncriterion_main!(benches);\n", "file_path": "signature_benchmark/benches/dilithium.rs", "rank": 39, "score": 119960.93542792121 }, { "content": "fn merkle_verify(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n c.bench_function(\"MSS-WOTS (7, 7, false) - Verify\", |b| {\n\n b.iter_with_setup(\n\n || {\n\n let mut mss = UpdatableMerkleKeypair::new(7, 7, false);\n\n let n = rng.next_u32();\n\n let msg = format!(\"msg{}\", n);\n\n let sig = mss.sign(&msg);\n\n (mss, msg, sig)\n\n },\n\n |(mss, msg, sig)| sig.verify(&msg, mss.pk),\n\n )\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/merkle.rs", "rank": 40, "score": 119960.93542792121 }, { "content": "fn lamport_verify(c: &mut Criterion) {\n\n let mut p = lamport_otp::Proofer::new(128);\n\n let mut v = lamport_otp::Verifier::new(128, p.pk());\n\n let ch = v.next_challenge();\n\n let pwd = p.reply(ch);\n\n c.bench_function(\"Lamport OTP VerifyPwd (n=128, WC)\", |b| {\n\n b.iter(|| v.verify(pwd))\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/otp.rs", "rank": 41, "score": 119960.93542792121 }, { "content": "fn sd_merkle_verify(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n c.bench_function(\"SD-MSS-WOTS Verify\", |b| {\n\n b.iter_with_setup(\n\n || {\n\n let mut mss = Keypair::new();\n\n let n = rng.next_u32();\n\n let msg = format!(\"msg{}\", n);\n\n let sig = mss.sign(&msg, 0, 0);\n\n (mss, msg, sig)\n\n },\n\n |(mss, msg, sig)| sig.verify(&msg, mss.shallow.pk, mss.deep.pk),\n\n )\n\n });\n\n}\n\n\n\ncriterion_group!(\n\n benches,\n\n sd_merkle_keygen,\n\n sd_merkle_sign_min,\n\n sd_merkle_sign_avg,\n\n sd_merkle_sign_max,\n\n sd_merkle_verify\n\n);\n\ncriterion_main!(benches);\n", "file_path": "signature_benchmark/benches/sdmss.rs", "rank": 42, "score": 116972.94879017188 }, { "content": "fn tries_needed(msg: &str) -> u64 {\n\n let h = hash(msg.as_bytes());\n\n\n\n let mut tries = 1.0;\n\n for b in h.as_bytes() {\n\n let symbols = base_w(*b);\n\n\n\n for s in 0..X {\n\n tries *= W as f64 / (W as f64 - symbols[s] as f64);\n\n }\n\n }\n\n\n\n return tries.round() as u64;\n\n}\n\n\n", "file_path": "signature_benchmark/examples/forge_wots.rs", "rank": 43, "score": 116277.26344710407 }, { "content": "fn merkle_verify_small_ssc(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n c.bench_function(\"MSS-WOTS (3, 3, true) - Verify\", |b| {\n\n b.iter_with_setup(\n\n || {\n\n let mut mss = UpdatableMerkleKeypair::new(3, 3, true);\n\n let n = rng.next_u32();\n\n let msg = format!(\"msg{}\", n);\n\n let sig = mss.sign(&msg);\n\n (mss, msg, sig)\n\n },\n\n |(mss, msg, sig)| sig.verify(&msg, mss.pk),\n\n )\n\n });\n\n}\n\n\n\ncriterion_group!(normal, merkle_keygen, merkle_sign, merkle_verify);\n\ncriterion_group!(\n\n small_ssc,\n\n merkle_keygen_small_ssc,\n\n merkle_sign_small_ssc,\n\n merkle_verify_small_ssc\n\n);\n\ncriterion_main!(normal, small_ssc);\n", "file_path": "signature_benchmark/benches/merkle.rs", "rank": 44, "score": 114184.67985225155 }, { "content": "fn find_matching_second_msg(msg1: &str) -> String {\n\n let msg1_hash = hash(msg1.as_bytes());\n\n\n\n let pos = (0..usize::MAX)\n\n .into_par_iter()\n\n .map(|i| format!(\"forged-{}\", i))\n\n .position_any(|m| can_forge(&msg1_hash, &m))\n\n .unwrap();\n\n\n\n let msg2 = format!(\"forged-{}\", pos);\n\n println!(\"found forgable message ({}) for {}\", msg2, msg1);\n\n\n\n return msg2;\n\n}\n\n\n", "file_path": "signature_benchmark/examples/forge_wots.rs", "rank": 45, "score": 111278.30617307256 }, { "content": "type SecKey = [u8; N];\n", "file_path": "signature_benchmark/src/sdmss.rs", "rank": 46, "score": 108777.64377708625 }, { "content": "fn sha3_224(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n c.bench_function(\"SHA3-224\", |b| {\n\n b.iter_batched(\n\n || {\n\n // Generate new message to sign\n\n let n = rng.next_u32();\n\n format!(\"msg{}\", n)\n\n },\n\n |m| Sha3_224::digest(m.as_bytes()),\n\n BatchSize::SmallInput,\n\n )\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/hmac.rs", "rank": 47, "score": 96692.42192776917 }, { "content": "fn hmac(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n let n = rng.next_u32();\n\n let key = format!(\"key{}\", n);\n\n let mut mac = Hmac::<blake3::Hasher>::new_varkey(key.as_bytes()).unwrap();\n\n c.bench_function(\"HMAC (Blake3)\", |b| {\n\n b.iter_batched(\n\n || {\n\n // Generate new message to sign\n\n let n = rng.next_u32();\n\n let msg = format!(\"msg{}\", n);\n\n hash(msg.as_bytes())\n\n },\n\n |msg| {\n\n mac.update(msg.as_bytes());\n\n mac.finalize_reset()\n\n },\n\n BatchSize::SmallInput,\n\n )\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/hmac.rs", "rank": 48, "score": 96692.42192776917 }, { "content": "fn blake3(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n c.bench_function(\"Blake3\", |b| {\n\n b.iter_batched(\n\n || {\n\n // Generate new message to sign\n\n let n = rng.next_u32();\n\n format!(\"msg{}\", n)\n\n },\n\n |m| hash(m.as_bytes()),\n\n BatchSize::SmallInput,\n\n )\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/hmac.rs", "rank": 49, "score": 96692.42192776917 }, { "content": "fn sha3_256(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n c.bench_function(\"SHA3-256\", |b| {\n\n b.iter_batched(\n\n || {\n\n // Generate new message to sign\n\n let n = rng.next_u32();\n\n format!(\"msg{}\", n)\n\n },\n\n |m| Sha3_256::digest(m.as_bytes()),\n\n BatchSize::SmallInput,\n\n )\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/hmac.rs", "rank": 50, "score": 96692.42192776917 }, { "content": "fn sha2_256(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n c.bench_function(\"SHA2-256\", |b| {\n\n b.iter_batched(\n\n || {\n\n // Generate new message to sign\n\n let n = rng.next_u32();\n\n format!(\"msg{}\", n)\n\n },\n\n |m| Sha256::digest(m.as_bytes()),\n\n BatchSize::SmallInput,\n\n )\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/hmac.rs", "rank": 51, "score": 96692.42192776917 }, { "content": "fn sha2_224(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n c.bench_function(\"SHA2-224\", |b| {\n\n b.iter_batched(\n\n || {\n\n // Generate new message to sign\n\n let n = rng.next_u32();\n\n format!(\"msg{}\", n)\n\n },\n\n |m| Sha224::digest(m.as_bytes()),\n\n BatchSize::SmallInput,\n\n )\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/hmac.rs", "rank": 52, "score": 96692.42192776917 }, { "content": "fn ecdsa_sign(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n let mut csprng = OsRng {};\n\n let keys = Keypair::generate(&mut csprng);\n\n c.bench_function(\"ECDSA (Ed25519) Sign\", |b| {\n\n b.iter_batched(\n\n || {\n\n let n = rng.next_u32();\n\n let s = format!(\"msg{}\", n);\n\n let h = hash(s.as_bytes()).to_hex();\n\n h\n\n },\n\n |h| keys.sign(h.as_bytes()),\n\n BatchSize::SmallInput,\n\n )\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/ecdsa.rs", "rank": 53, "score": 94430.76052378656 }, { "content": "fn ecdsa_keygen(c: &mut Criterion) {\n\n let mut csprng = OsRng {};\n\n c.bench_function(\"ECDSA (Ed25519) KeyGen\", |b| {\n\n b.iter(|| Keypair::generate(&mut csprng))\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/ecdsa.rs", "rank": 54, "score": 94430.76052378656 }, { "content": "fn park_gen(c: &mut Criterion) {\n\n let mut p = park_otp::Proofer::new();\n\n let mut v = park_otp::Verifier::new(p.pk());\n\n c.bench_function(\"Park OTP GenPwd\", |b| {\n\n b.iter_batched(\n\n || v.next_challenge(),\n\n |ch| p.reply(ch),\n\n BatchSize::SmallInput,\n\n )\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/otp.rs", "rank": 55, "score": 94430.76052378656 }, { "content": "fn falcon_sign(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n let (_, sk) = keypair();\n\n c.bench_function(\"Falcon512 Sign\", |b| {\n\n b.iter_batched(\n\n || {\n\n let n = rng.next_u32();\n\n let s = format!(\"msg{}\", n);\n\n let h = hash(s.as_bytes()).to_hex();\n\n (h, sk)\n\n },\n\n |(h, sk)| sign(black_box(h.as_bytes()), &sk),\n\n BatchSize::SmallInput,\n\n )\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/falcon.rs", "rank": 56, "score": 94430.76052378656 }, { "content": "fn merkle_keygen(c: &mut Criterion) {\n\n c.bench_function(\"MSS-WOTS (7, 7, false) - KeyGen\", |b| {\n\n b.iter(|| UpdatableMerkleKeypair::new(7, 7, false))\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/merkle.rs", "rank": 57, "score": 94430.76052378656 }, { "content": "fn park_init(c: &mut Criterion) {\n\n c.bench_function(\"Park OTP Init\", |b| {\n\n b.iter(|| park_otp::Proofer::new().pk())\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/otp.rs", "rank": 58, "score": 94430.76052378656 }, { "content": "fn dilithium_sign(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n let (_, sk) = keypair();\n\n c.bench_function(\"Dilithium2 Sign\", |b| {\n\n b.iter_batched(\n\n || {\n\n let n = rng.next_u32();\n\n let s = format!(\"msg{}\", n);\n\n let h = hash(s.as_bytes()).to_hex();\n\n (h, sk)\n\n },\n\n |(h, sk)| sign(black_box(h.as_bytes()), &sk),\n\n BatchSize::SmallInput,\n\n )\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/dilithium.rs", "rank": 59, "score": 94430.76052378656 }, { "content": "fn falcon_keygen(c: &mut Criterion) {\n\n c.bench_function(\"Falcon512 KeyGen\", |b| b.iter(|| keypair()));\n\n}\n\n\n", "file_path": "signature_benchmark/benches/falcon.rs", "rank": 60, "score": 94430.76052378656 }, { "content": "fn dilithium_keygen(c: &mut Criterion) {\n\n c.bench_function(\"Dilithium2 KeyGen\", |b| b.iter(|| keypair()));\n\n}\n\n\n", "file_path": "signature_benchmark/benches/dilithium.rs", "rank": 61, "score": 94430.76052378656 }, { "content": "fn lamport_init(c: &mut Criterion) {\n\n c.bench_function(\"Lamport OTP Init (n=128)\", |b| {\n\n b.iter(|| lamport_otp::Proofer::new(128).pk())\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/otp.rs", "rank": 62, "score": 94430.76052378656 }, { "content": "fn aes_ctr(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n let mut block = [0u8; 16];\n\n rng.fill_bytes(&mut block);\n\n let key = GenericArray::clone_from_slice(&block);\n\n rng.fill_bytes(&mut block);\n\n let nonce = GenericArray::clone_from_slice(&block);\n\n\n\n let mut cipher = Aes128Ctr::new(&key, &nonce);\n\n c.bench_function(\"AES-128-CTR\", |b| {\n\n b.iter_batched(\n\n || {\n\n // Generate new message to sign\n\n let mut block = [0u8; 16];\n\n rng.fill_bytes(&mut block);\n\n block\n\n },\n\n |mut b| {\n\n cipher.apply_keystream(&mut b);\n\n },\n\n BatchSize::SmallInput,\n\n )\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/hmac.rs", "rank": 63, "score": 94430.76052378656 }, { "content": "fn merkle_sign(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n c.bench_function(\"MSS-WOTS (7, 7, false) - Sign\", |b| {\n\n b.iter_with_setup(\n\n || {\n\n let mss = UpdatableMerkleKeypair::new(7, 7, false);\n\n let n = rng.next_u32();\n\n (mss, format!(\"msg{}\", n))\n\n },\n\n |(mut mss, msg)| mss.sign(&msg),\n\n );\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/merkle.rs", "rank": 64, "score": 94430.76052378656 }, { "content": "fn kyber512_enc(c: &mut Criterion) {\n\n let (pk, _) = keypair();\n\n c.bench_function(\"Kyber512 Enc\", |b| b.iter(|| encapsulate(&pk)));\n\n}\n\n\n", "file_path": "signature_benchmark/benches/kyber.rs", "rank": 65, "score": 94430.76052378656 }, { "content": "fn aes_mmo(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n let mut block = [0u8; 16];\n\n rng.fill_bytes(&mut block);\n\n let iv = GenericArray::clone_from_slice(&block);\n\n\n\n let cipher = Aes128::new(&iv);\n\n c.bench_function(\"AES-128-MMO\", |b| {\n\n b.iter_batched(\n\n || {\n\n // Generate new message to sign\n\n let mut block = [0u8; 16];\n\n rng.fill_bytes(&mut block);\n\n //block\n\n (block, GenericArray::from(block))\n\n },\n\n |(b, mut ga)| {\n\n //let mut ga = GenericArray::from(b);\n\n let i = u128::from_be_bytes(b.clone());\n\n cipher.encrypt_block(&mut ga);\n\n let o = u128::from_be_bytes(b.clone());\n\n let r = i ^ o;\n\n return GenericArray::from(r.to_be_bytes());\n\n },\n\n BatchSize::SmallInput,\n\n )\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/hmac.rs", "rank": 66, "score": 94430.76052378656 }, { "content": "fn kyber512_keygen(c: &mut Criterion) {\n\n c.bench_function(\"Kyber512 KeyGen\", |b| b.iter(|| keypair()));\n\n}\n\n\n", "file_path": "signature_benchmark/benches/kyber.rs", "rank": 67, "score": 94430.76052378656 }, { "content": "fn lamport_gen(c: &mut Criterion) {\n\n let mut p = lamport_otp::Proofer::new(128);\n\n let mut v = lamport_otp::Verifier::new(128, p.pk());\n\n let ch = v.next_challenge();\n\n c.bench_function(\"Lamport OTP GenPwd (n=128, WC)\", |b| b.iter(|| p.reply(ch)));\n\n}\n\n\n", "file_path": "signature_benchmark/benches/otp.rs", "rank": 68, "score": 94430.76052378656 }, { "content": "fn kyber512_dec(c: &mut Criterion) {\n\n let (pk, sk) = keypair();\n\n c.bench_function(\"Kyber512 Dec\", |b| {\n\n b.iter_batched(\n\n || encapsulate(&pk),\n\n |(_, ct)| decapsulate(&ct, &sk),\n\n BatchSize::SmallInput,\n\n )\n\n });\n\n}\n\n\n\ncriterion_group!(benches, kyber512_keygen, kyber512_enc, kyber512_dec);\n\ncriterion_main!(benches);\n", "file_path": "signature_benchmark/benches/kyber.rs", "rank": 69, "score": 94430.76052378656 }, { "content": "fn sd_merkle_keygen(c: &mut Criterion) {\n\n c.bench_function(\"SD-MSS-WOTS KeyGen\", |b| b.iter(|| Keypair::new()));\n\n}\n\n\n", "file_path": "signature_benchmark/benches/sdmss.rs", "rank": 70, "score": 92325.49587457982 }, { "content": "fn sd_merkle_sign_max(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n c.bench_function(\"SD-MSS-WOTS Sign (max)\", |b| {\n\n b.iter_with_setup(\n\n || {\n\n let mut mss = Keypair::new();\n\n for _ in 0..(1 << sdmss::S) {\n\n mss.sign(\"hello\", 0, 0);\n\n }\n\n let n = rng.next_u32();\n\n (mss, format!(\"msg{}\", n))\n\n },\n\n |(mut mss, msg)| mss.sign(&msg, 0, 0),\n\n );\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/sdmss.rs", "rank": 71, "score": 90360.94758867902 }, { "content": "fn sd_merkle_sign_min(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n c.bench_function(\"SD-MSS-WOTS Sign (min)\", |b| {\n\n b.iter_with_setup(\n\n || {\n\n let mss = Keypair::new();\n\n let n = rng.next_u32();\n\n (mss, format!(\"msg{}\", n))\n\n },\n\n |(mut mss, msg)| mss.sign(&msg, 0, 0),\n\n );\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/sdmss.rs", "rank": 72, "score": 90360.94758867902 }, { "content": "fn merkle_sign_small_ssc(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n c.bench_function(\"MSS-WOTS (3, 3, true) - Sign\", |b| {\n\n b.iter_with_setup(\n\n || {\n\n let mss = UpdatableMerkleKeypair::new(3, 3, true);\n\n let n = rng.next_u32();\n\n (mss, format!(\"msg{}\", n))\n\n },\n\n |(mut mss, msg)| mss.sign(&msg),\n\n );\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/merkle.rs", "rank": 73, "score": 90360.94758867902 }, { "content": "fn sd_merkle_sign_avg(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n c.bench_function(\"SD-MSS-WOTS Sign (avg)\", |b| {\n\n b.iter_with_setup(\n\n || {\n\n let mut mss = Keypair::new();\n\n while rng.next_u32() % 2 == 1 {\n\n mss.sign(\"hello\", 0, 0);\n\n }\n\n let n = rng.next_u32();\n\n (mss, format!(\"msg{}\", n))\n\n },\n\n |(mut mss, msg)| mss.sign(&msg, 0, 0),\n\n );\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/sdmss.rs", "rank": 74, "score": 90360.94758867902 }, { "content": "fn merkle_keygen_small_ssc(c: &mut Criterion) {\n\n c.bench_function(\"MSS-WOTS (3, 3, true) - KeyGen\", |b| {\n\n b.iter(|| UpdatableMerkleKeypair::new(3, 3, true))\n\n });\n\n}\n\n\n", "file_path": "signature_benchmark/benches/merkle.rs", "rank": 75, "score": 90360.94758867902 }, { "content": "fn main() {\n\n let msg1 = find_good_first_msg();\n\n\n\n // Calculate a signature for msg1 using the secret key.\n\n let wots = Wots::new();\n\n let sig1 = wots.sign(&msg1);\n\n\n\n // Find a message which is forgable given msg1 and its signature.\n\n let msg2 = find_matching_second_msg(&msg1);\n\n\n\n // Forge a signature for msg2 using only public key, msg1 (+ its signature), and msg2.\n\n let forged_sig = forge_signature(&msg1, sig1, &msg2);\n\n\n\n if forged_sig.verify() {\n\n println!(\"Signature verification successful!\");\n\n } else {\n\n println!(\"Signature verification failed!\");\n\n }\n\n}\n\n\n", "file_path": "signature_benchmark/examples/forge_wots.rs", "rank": 76, "score": 82176.00157071868 }, { "content": "fn main() {}\n", "file_path": "signature_benchmark/examples/forge_wots.rs", "rank": 77, "score": 82176.00157071868 }, { "content": "fn find_good_first_msg() -> String {\n\n let (min_tries, s) = (0..u64::MAX)\n\n .into_par_iter()\n\n .map(|i| {\n\n let msg = format!(\"original-{}\", i);\n\n (tries_needed(&msg), msg)\n\n })\n\n .find_any(|(t, _)| *t < 1u64 << 16 * X)\n\n .unwrap();\n\n\n\n println!(\"the best message ({}) needs only {} tries\", s, min_tries);\n\n\n\n return s;\n\n}\n\n\n", "file_path": "signature_benchmark/examples/forge_wots.rs", "rank": 78, "score": 72142.53345431374 }, { "content": "\n\n/// Security parameter, PRF output size in bytes.\n\n/// Can be at most 256 / 8 (=32), as long as we instantiate with SHA256.\n\npub const N: usize = 128 / 8;\n\n\n\n/// Message digest length in bytes.\n\nconst M: usize = N;\n\n\n\n/// Length of the base `W` representation of a message of length `M`.\n\nconst L1: usize = (8 * M + LOG2_W - 1) / LOG2_W; // +LOG2_W-1 for ceil\n\n\n\n/// Length of the base `W` checksum of a base `W` message of length `L1`.\n\n/// `L2 = floor(log_W(L1 * (W - 1))) + 1`\n\nconst L2: usize = 3;\n\nconst L2_BYTES: usize = (L2 * LOG2_W + 7) / 8; // +7 for ceil\n\n\n\n/// Total number of function chains, i.e. number of N-byte hashes in the actual signature.\n\nconst L: usize = L1 + L2;\n\n\n\n/// WOTS+ Keypair\n", "file_path": "signature_benchmark/src/wots.rs", "rank": 79, "score": 42993.14822379618 }, { "content": "#[derive(Debug, Clone, PartialEq, PartialOrd, Hash, Default)]\n\npub struct Wots {\n\n pub pk_hash: [u8; N],\n\n pub pk_seed: [u8; N],\n\n sk_seed: [u8; N],\n\n}\n\n\n\n/// WOTS+ Signature\n\n#[derive(Debug, Clone, PartialEq, PartialOrd, Hash, Default)]\n\npub struct WotsSignature {\n\n pub pk_hash: [u8; N],\n\n pub pk_seed: [u8; N],\n\n pub signature: Vec<[u8; N]>,\n\n}\n\n\n\nimpl Wots {\n\n /// Generates a new WOTS+ Keypair.\n\n pub fn new() -> Self {\n\n let mut seed = [0u8; N];\n\n thread_rng().fill_bytes(&mut seed);\n", "file_path": "signature_benchmark/src/wots.rs", "rank": 80, "score": 42991.421620097586 }, { "content": " return WotsSignature {\n\n pk_hash: self.pk_hash.clone(),\n\n pk_seed: self.pk_seed.clone(),\n\n signature,\n\n };\n\n }\n\n}\n\n\n\nimpl WotsSignature {\n\n /// Verifies the signature against the public key.\n\n pub fn verify(&self, msg: &str) -> bool {\n\n let cycles = cycles_for_msg(msg, &self.pk_hash);\n\n\n\n // Calculate public key hash\n\n let mut sha = Sha256::new();\n\n sha.update(self.pk_seed);\n\n for (i, &c) in cycles.iter().enumerate() {\n\n let cyc = c as usize;\n\n let sig = self.signature[i];\n\n sha.update(chain(&sig, W - 1 - cyc, i, cyc, &self.pk_seed));\n\n }\n\n let pk_hash: [u8; N] = sha.finalize()[..N].try_into().unwrap();\n\n\n\n return pk_hash.ct_eq(&self.pk_hash).unwrap_u8() == 1;\n\n }\n\n}\n\n\n\n/// Signs a single message, without needing a Wots object.\n\n/// Generates secret values and bitmasks on-the-fly.\n", "file_path": "signature_benchmark/src/wots.rs", "rank": 81, "score": 42990.81217593645 }, { "content": "\n\n return Self {\n\n pk_hash,\n\n pk_seed,\n\n sk_seed,\n\n };\n\n }\n\n\n\n /// Hashes and then signs an input string of arbitrary length.\n\n pub fn sign(&self, msg: &str) -> WotsSignature {\n\n let cycles = cycles_for_msg(msg, &self.pk_hash);\n\n\n\n // Calculate signature\n\n let mut signature: Vec<[u8; N]> = Vec::with_capacity(L);\n\n for (i, &c) in cycles.iter().enumerate() {\n\n let cyc = c as usize;\n\n let secret = prf(&self.sk_seed, i as u32);\n\n signature.push(chain(&secret, cyc, i, 0, &self.pk_seed));\n\n }\n\n\n", "file_path": "signature_benchmark/src/wots.rs", "rank": 82, "score": 42990.64573850106 }, { "content": " return Self::from_seed(seed);\n\n }\n\n\n\n /// Creates a WOTS+ Keypair from the specified seed.\n\n /// The seed needs to come from a high-entropy cryptographically secure source of randomness.\n\n pub fn from_seed(sk_seed: [u8; N]) -> Self {\n\n let mut sha = Sha256::new();\n\n\n\n // Generate public seed\n\n let pk_seed = prf(&sk_seed, L as u32);\n\n sha.update(pk_seed);\n\n\n\n // Calculate public key hash\n\n for i in 0..L {\n\n let secret = prf(&sk_seed, i as u32);\n\n let public = chain(&secret, W - 1, i, 0, &pk_seed);\n\n sha.update(public);\n\n }\n\n\n\n let pk_hash: [u8; N] = sha.finalize()[..N].try_into().unwrap();\n", "file_path": "signature_benchmark/src/wots.rs", "rank": 83, "score": 42985.27461298851 }, { "content": " #[test]\n\n fn sign_and_verify() {\n\n let wots = Wots::new();\n\n let sig = wots.sign(\"hello world\");\n\n assert_eq!(sig.verify(\"hello world\"), true);\n\n assert_eq!(sig.verify(\"hello\"), false);\n\n assert_eq!(sig.verify(\"hello world 123\"), false);\n\n assert_eq!(sig.verify(\"123 hello world\"), false);\n\n }\n\n\n\n #[test]\n\n fn hash_chain_test() {\n\n let start = [0u8; N];\n\n let mid = chain(&start, 3, 0, 0, &[0u8; N]);\n\n let end1 = chain(&mid, 7, 0, 3, &[0u8; N]);\n\n let end2 = chain(&start, 10, 0, 0, &[0u8; N]);\n\n let end3 = chain(&end2, 0, 0, 0, &[0u8; N]);\n\n assert_eq!(end1, end2);\n\n assert_eq!(end1, end3);\n\n assert_ne!(end1, start);\n", "file_path": "signature_benchmark/src/wots.rs", "rank": 84, "score": 42973.99312519889 }, { "content": "// Copyright (C) 2021 Quentin Kniep <[email protected]>\n\n// Distributed under terms of the MIT license.\n\n\n\n//! Winternitz One-Time Signature (WOTS+) Scheme.\n\n//! This implementation does not conform to RFC 8391 (see the paragraph below).\n\n//!\n\n//! Instead of simply hashing the message and signing that hash,\n\n//! the public key hash is included in the message hash.\n\n//! This allows for 50% shorter signatures with the same parameters.\n\n\n\nuse std::convert::TryInto;\n\n\n\nuse rand::{thread_rng, RngCore};\n\nuse sha2::{Digest, Sha256};\n\nuse subtle::ConstantTimeEq;\n\n\n\n/// The Winternitz Parameter, determining time-space tradeoff.\n\n/// Needs to be a power of two, with 2 <= W <= 256.\n\npub const W: usize = 16;\n\npub const LOG2_W: usize = 4;\n", "file_path": "signature_benchmark/src/wots.rs", "rank": 85, "score": 42972.40666562503 }, { "content": " assert_ne!(end1, mid);\n\n assert_ne!(start, mid);\n\n }\n\n\n\n #[test]\n\n fn base_w_conversion() {\n\n for t in 0..=255 {\n\n let bw = base_w(&[t], (8 + LOG2_W - 1) / LOG2_W);\n\n let mut sum = 0usize;\n\n let mut value = 1usize;\n\n for digit in bw.iter().rev() {\n\n sum += (*digit as usize) * value;\n\n value *= W;\n\n }\n\n assert_eq!(sum, t as usize);\n\n }\n\n }\n\n}\n", "file_path": "signature_benchmark/src/wots.rs", "rank": 86, "score": 42965.010199421864 }, { "content": "// Copyright (C) 2021 Quentin Kniep <[email protected]>\n\n// Distributed under terms of the MIT license.\n\n\n\nuse criterion::*;\n\nuse rand::{thread_rng, RngCore};\n\n\n\nuse signature_benchmark::wots;\n\n\n", "file_path": "signature_benchmark/benches/wots.rs", "rank": 87, "score": 42960.835422338656 }, { "content": "/// Needs to be such that: W ^ X == 256, i.e. X = log_W(256).\n\npub const X: usize = 1;\n\n\n\n/// Security parameter, PRF output size in bytes.\n\n/// Can be at most 256 / 8 (=16), as long as we instantiate with SHA256.\n\npub const N: usize = 128 / 8;\n\n\n\n/// Message digest length in bytes.\n\nconst M: usize = 128 / 8;\n\n\n\n/// Length of the base `W` representation of a message of length `M`.\n\nconst L1: usize = M * X;\n\n\n\n/// Length of the base `W` checksum of a base `W` message of length `L1`.\n\n/// L2 = floor(log_W(L1 * (W - 1))) + 1\n\nconst L2: usize = 2;\n\n\n\n/// Total number of function chains, i.e. number of N-byte hashes in signature.\n\nconst L: usize = L1 + L2;\n\n\n", "file_path": "signature_benchmark/src/wots_sha.rs", "rank": 89, "score": 41272.304556484174 }, { "content": "\n\n/// Total number of function chains, i.e. number of N-byte hashes in the actual signature.\n\nconst L: usize = L1 + L2;\n\n\n\n/// W-OTS Keypair\n\n#[derive(Debug, Clone, PartialEq, PartialOrd, Hash, Default)]\n\npub struct Wots {\n\n pub pk_hash: [u8; N],\n\n pub pk_seed: [u8; N],\n\n sk_seed: [u8; N],\n\n}\n\n\n\n/// W-OTS Signature\n\n#[derive(Debug, Clone, PartialEq, PartialOrd, Hash, Default)]\n\npub struct WotsSignature {\n\n pub pk_hash: [u8; N],\n\n pub pk_seed: [u8; N],\n\n pub signature: Vec<[u8; N]>,\n\n}\n\n\n", "file_path": "signature_benchmark/src/wots_blake3.rs", "rank": 91, "score": 41271.80695432725 }, { "content": "impl Wots {\n\n /// Generates a new W-OTS Keypair.\n\n pub fn new() -> Self {\n\n let mut seed = [0u8; N];\n\n thread_rng().fill_bytes(&mut seed);\n\n return Self::from_seed(seed);\n\n }\n\n\n\n /// Creates a W-OTS Keypair from the specified seed.\n\n /// The seed needs to come from a high-entropy cryptographically secure source of randomness.\n\n pub fn from_seed(sk_seed: [u8; N]) -> Self {\n\n let mut pk_hash = [0u8; N];\n\n let mut hasher = blake3::Hasher::new();\n\n\n\n // Calculate public key hash\n\n for i in 0..L {\n\n let secret = prf(&sk_seed, i as u32);\n\n let public = chain(&secret, W - 1);\n\n hasher.update(&public);\n\n }\n", "file_path": "signature_benchmark/src/wots_blake3.rs", "rank": 93, "score": 41269.42815358564 }, { "content": " let secret = prf(&self.sk_seed, i as u32);\n\n signature.push(chain(&secret, c as usize));\n\n }\n\n\n\n return WotsSignature {\n\n pk_hash: self.pk_hash.clone(),\n\n pk_seed: self.pk_seed.clone(),\n\n signature,\n\n };\n\n }\n\n}\n\n\n\nimpl WotsSignature {\n\n /// Verifies the signature against the public key.\n\n pub fn verify(&self, msg: &str) -> bool {\n\n let cycles = cycles_for_msg(msg, &self.pk_hash);\n\n\n\n // Calculate public key hash\n\n let mut hasher = blake3::Hasher::new();\n\n for (i, &c) in cycles.iter().enumerate() {\n", "file_path": "signature_benchmark/src/wots_blake3.rs", "rank": 94, "score": 41268.419513190405 }, { "content": "impl Wots {\n\n /// Generates a new W-OTS Keypair.\n\n pub fn new() -> Self {\n\n let seed: [u8; N] = rand_seed();\n\n return Self::from_seed(seed);\n\n }\n\n\n\n /// Creates a W-OTS Keypair from the specified seed.\n\n pub fn from_seed(sk_seed: [u8; N]) -> Self {\n\n let mut seed = sk_seed.clone();\n\n let mut sk = Vec::<[u8; N]>::with_capacity(L);\n\n let mut pk = Vec::<[u8; N]>::with_capacity(KL);\n\n let mut bitmask = Vec::<[u8; N]>::with_capacity(W - 1);\n\n\n\n // Generate sk_1, ... , sk_L\n\n for _ in 0..L {\n\n let secret = prng(&mut seed);\n\n sk.push(secret);\n\n let public = chain(&secret, W - 1);\n\n pk.push(public);\n", "file_path": "signature_benchmark/src/wots_sha.rs", "rank": 95, "score": 41268.32659285499 }, { "content": "\n\n /// Hashes and then signs an arbitrary input string.\n\n pub fn sign(&self, input: &str) -> WotsSignature {\n\n // Hash input string together with public key hash\n\n let msg_hash = &Sha256::hash(input.as_bytes())[..M];\n\n let mut sha = Sha256::new();\n\n sha.update(&self.pk_hash);\n\n sha.update(msg_hash);\n\n let hash_bytes = &sha.finalize()[..M];\n\n\n\n // Regenerate sk_1, ... sk_L.\n\n let mut sk = Vec::<[u8; N]>::with_capacity(L);\n\n let mut seed = self.sk_seed.clone();\n\n for _ in 0..L {\n\n let secret = prng(&mut seed);\n\n sk.push(secret);\n\n }\n\n\n\n // Calculate message signature\n\n let mut signature: Vec<[u8; N]> = Vec::with_capacity(L);\n", "file_path": "signature_benchmark/src/wots_sha.rs", "rank": 96, "score": 41267.884259261664 }, { "content": "pub const W: usize = 256;\n\npub const LOG2_W: usize = 8;\n\n\n\n/// Number of symbols (in base W) in a single byte.\n\n/// Needs to be such that: W ^ X == 256, i.e. X = log_W(256).\n\npub const X: usize = 1;\n\n\n\n/// Security parameter, PRF output size in bytes.\n\n/// Can be at most 256 / 8 (=32), as long as we instantiate with Blake3-256.\n\npub const N: usize = 256 / 8;\n\n\n\n/// Message digest length in bytes.\n\nconst M: usize = 256 / 8;\n\n\n\n/// Length of the base `W` representation of a message of length `M`.\n\nconst L1: usize = M * X;\n\n\n\n/// Length of the base `W` checksum of a base `W` message of length `L1`.\n\n/// `L2 = floor(log_W(L1 * (W - 1))) + 1`\n\nconst L2: usize = 2;\n", "file_path": "signature_benchmark/src/wots_blake3.rs", "rank": 98, "score": 41267.14482939382 }, { "content": "/// Number of N-byte strings in private/public key.\n\nconst KL: usize = L + W - 1;\n\n\n\n/// W-OTS Keypair\n\n#[derive(Debug, Clone, PartialEq, PartialOrd, Hash, Default)]\n\npub struct Wots {\n\n pub pk_hash: [u8; N],\n\n pub pk_seed: [u8; N],\n\n sk_seed: [u8; N],\n\n}\n\n\n\n/// W-OTS Signature\n\n#[derive(Debug, Clone, PartialEq, PartialOrd, Hash, Default)]\n\npub struct WotsSignature {\n\n pub pk_hash: [u8; N],\n\n pub pk_seed: [u8; N],\n\n pub msg_hash: [u8; M],\n\n pub signature: Vec<[u8; N]>,\n\n}\n\n\n", "file_path": "signature_benchmark/src/wots_sha.rs", "rank": 99, "score": 41266.85735637912 } ]
Rust
src/stations.rs
danielrs/pandora-rs
494ab0aef0ddb9959cdae6c6a6fb5a8ab161a7d0
use super::Pandora; use error::Result; use method::Method; use music::{ToMusicToken, MusicType}; use playlist::Playlist; use serde_json; pub struct Stations<'a> { pandora: &'a Pandora, } impl<'a> Stations<'a> { pub fn new(pandora: &'a Pandora) -> Stations<'a> { Stations { pandora: pandora } } pub fn list(&self) -> Result<Vec<Station>> { let stations = try!(self.pandora .post::<StationList>(Method::UserGetStationList, None)); Ok(stations.stations) } pub fn create<T>(&self, music_token: &T) -> Result<Station> where T: ToMusicToken { self.pandora .post(Method::StationCreateStation, Some(serde_json::to_value(CreateStationRequest { track_token: None, music_type: None, music_token: Some(music_token.to_music_token()), }))) } pub fn rename<T>(&self, station: &T, station_name: &str) -> Result<Station> where T: ToStationToken { self.pandora .post(Method::StationRenameStation, Some(serde_json::to_value(RenameStationRequest { station_token: station.to_station_token(), station_name: station_name.to_owned(), }))) } pub fn delete<T>(&self, station: &T) -> Result<()> where T: ToStationToken { self.pandora .post_noop(Method::StationDeleteStation, Some(serde_json::to_value(DeleteStationRequest { station_token: station.to_station_token(), }))) } pub fn add_seed<S, T>(&self, station: &S, music_token: &T) -> Result<Seed> where S: ToStationToken, T: ToMusicToken { self.pandora .post(Method::StationAddMusic, Some(serde_json::to_value(AddSeedRequest { station_token: station.to_station_token(), music_token: music_token.to_music_token(), }))) } pub fn remove_seed(&self, seed: &Seed) -> Result<()> { self.pandora .post(Method::StationDeleteMusic, Some(serde_json::to_value(RemoveSeedRequest { seed_id: seed.seed_id.clone() }))) } pub fn station<T>(&self, station: &T) -> Result<Station> where T: ToStationToken { self.pandora .post(Method::StationGetStation, Some(serde_json::to_value(GetStationRequest { station_token: station.to_station_token(), include_extended_attributes: true, }))) } pub fn checksum(&self) -> Result<StationListChecksum> { self.pandora.post(Method::UserGetStationListChecksum, None) } pub fn playlist<T>(&self, station: &T) -> Playlist where T: ToStationToken { Playlist::new(self.pandora, station) } } pub trait ToStationToken { fn to_station_token(&self) -> String; } #[derive(Debug, Clone, Deserialize)] pub struct Station { #[serde(rename="stationId")] pub station_id: String, #[serde(rename="stationName")] pub station_name: String, } impl ToStationToken for Station { fn to_station_token(&self) -> String { self.station_id.clone() } } #[derive(Debug, Deserialize)] struct StationList { pub stations: Vec<Station>, pub checksum: String, } #[derive(Deserialize)] pub struct StationListChecksum { pub checksum: String, } #[derive(Debug, Deserialize)] pub struct ExtendedStation { #[serde(rename="stationId")] pub station_id: String, #[serde(rename="stationName")] pub station_name: String, #[serde(rename="artUrl")] pub art_url: Option<String>, pub music: Option<StationMusic>, } #[derive(Debug, Deserialize)] pub struct StationMusic { pub songs: Vec<SongSeed>, pub artists: Vec<ArtistSeed>, pub genre: Option<Vec<GenreSeed>>, } #[derive(Debug, Deserialize)] pub struct Seed { #[serde(rename="seedId")] pub seed_id: String, } #[derive(Debug, Deserialize)] pub struct SongSeed { #[serde(rename="seedId")] pub seed_id: String, #[serde(rename="artistName")] pub artist_name: String, #[serde(rename="artUrl")] pub art_url: String, #[serde(rename="songName")] pub song_name: String, #[serde(rename="musicToken")] pub music_token: String, } #[derive(Debug, Deserialize)] pub struct ArtistSeed { #[serde(rename="seedId")] pub seed_id: String, #[serde(rename="artistName")] pub artist_name: String, #[serde(rename="artUrl")] pub art_url: String, #[serde(rename="musicToken")] pub music_token: String, } #[derive(Debug, Deserialize)] pub struct GenreSeed { #[serde(rename="seedId")] pub seed_id: String, #[serde(rename="artistName")] pub genre_name: String, #[serde(rename="musicToken")] pub music_token: String, } #[derive(Serialize)] struct CreateStationRequest { #[serde(rename="trackToken")] track_token: Option<String>, #[serde(rename="musicType")] music_type: Option<MusicType>, #[serde(rename="musicToken")] music_token: Option<String>, } #[derive(Serialize)] struct RenameStationRequest { #[serde(rename="stationToken")] station_token: String, #[serde(rename="stationName")] station_name: String, } #[derive(Serialize)] struct DeleteStationRequest { #[serde(rename="stationToken")] station_token: String, } #[derive(Serialize)] struct GetStationRequest { #[serde(rename="stationToken")] station_token: String, #[serde(rename="includeExtendedAttributes")] include_extended_attributes: bool, } #[derive(Serialize)] struct AddSeedRequest { #[serde(rename="stationToken")] station_token: String, #[serde(rename="musicToken")] music_token: String, } #[derive(Serialize)] struct RemoveSeedRequest { #[serde(rename="seedId")] seed_id: String, }
use super::Pandora; use error::Result; use method::Method; use music::{ToMusicToken, MusicType}; use playlist::Playlist; use serde_json; pub struct Stations<'a> { pandora: &'a Pandora, } impl<'a> Stations<'a> { pub fn new(pandora: &'a Pandora) -> Stations<'a> { Stations { pandora: pandora } } pub fn list(&self) -> Result<Vec<Station>> { let stations = try!(self.pandora .post::<StationList>(Method::UserGetStationList, None)); Ok(stations.stations) } pub fn create<T>(&self, music_token: &T) -> Result<Station> where T: ToMusicToken { self.pandora .post(Method::StationCreateStation, Some(serde_json::to_value(CreateStationRequest { track_token: None, music_type: None, music_token: Some(music_token.to_music_token()), }))) } pub fn rename<T>(&self, station: &T, station_name: &str) -> Result<Station> where T: ToStationToken { self.pandora .post(Method::StationRenameStation, Some(serde_json::to_value(RenameStationRequest { station_token: station.to_station_token(), station_name: station_name.to_owned(), }))) } pub fn delete<T>(&self, station: &T) -> Result<()>
pub fn add_seed<S, T>(&self, station: &S, music_token: &T) -> Result<Seed> where S: ToStationToken, T: ToMusicToken { self.pandora .post(Method::StationAddMusic, Some(serde_json::to_value(AddSeedRequest { station_token: station.to_station_token(), music_token: music_token.to_music_token(), }))) } pub fn remove_seed(&self, seed: &Seed) -> Result<()> { self.pandora .post(Method::StationDeleteMusic, Some(serde_json::to_value(RemoveSeedRequest { seed_id: seed.seed_id.clone() }))) } pub fn station<T>(&self, station: &T) -> Result<Station> where T: ToStationToken { self.pandora .post(Method::StationGetStation, Some(serde_json::to_value(GetStationRequest { station_token: station.to_station_token(), include_extended_attributes: true, }))) } pub fn checksum(&self) -> Result<StationListChecksum> { self.pandora.post(Method::UserGetStationListChecksum, None) } pub fn playlist<T>(&self, station: &T) -> Playlist where T: ToStationToken { Playlist::new(self.pandora, station) } } pub trait ToStationToken { fn to_station_token(&self) -> String; } #[derive(Debug, Clone, Deserialize)] pub struct Station { #[serde(rename="stationId")] pub station_id: String, #[serde(rename="stationName")] pub station_name: String, } impl ToStationToken for Station { fn to_station_token(&self) -> String { self.station_id.clone() } } #[derive(Debug, Deserialize)] struct StationList { pub stations: Vec<Station>, pub checksum: String, } #[derive(Deserialize)] pub struct StationListChecksum { pub checksum: String, } #[derive(Debug, Deserialize)] pub struct ExtendedStation { #[serde(rename="stationId")] pub station_id: String, #[serde(rename="stationName")] pub station_name: String, #[serde(rename="artUrl")] pub art_url: Option<String>, pub music: Option<StationMusic>, } #[derive(Debug, Deserialize)] pub struct StationMusic { pub songs: Vec<SongSeed>, pub artists: Vec<ArtistSeed>, pub genre: Option<Vec<GenreSeed>>, } #[derive(Debug, Deserialize)] pub struct Seed { #[serde(rename="seedId")] pub seed_id: String, } #[derive(Debug, Deserialize)] pub struct SongSeed { #[serde(rename="seedId")] pub seed_id: String, #[serde(rename="artistName")] pub artist_name: String, #[serde(rename="artUrl")] pub art_url: String, #[serde(rename="songName")] pub song_name: String, #[serde(rename="musicToken")] pub music_token: String, } #[derive(Debug, Deserialize)] pub struct ArtistSeed { #[serde(rename="seedId")] pub seed_id: String, #[serde(rename="artistName")] pub artist_name: String, #[serde(rename="artUrl")] pub art_url: String, #[serde(rename="musicToken")] pub music_token: String, } #[derive(Debug, Deserialize)] pub struct GenreSeed { #[serde(rename="seedId")] pub seed_id: String, #[serde(rename="artistName")] pub genre_name: String, #[serde(rename="musicToken")] pub music_token: String, } #[derive(Serialize)] struct CreateStationRequest { #[serde(rename="trackToken")] track_token: Option<String>, #[serde(rename="musicType")] music_type: Option<MusicType>, #[serde(rename="musicToken")] music_token: Option<String>, } #[derive(Serialize)] struct RenameStationRequest { #[serde(rename="stationToken")] station_token: String, #[serde(rename="stationName")] station_name: String, } #[derive(Serialize)] struct DeleteStationRequest { #[serde(rename="stationToken")] station_token: String, } #[derive(Serialize)] struct GetStationRequest { #[serde(rename="stationToken")] station_token: String, #[serde(rename="includeExtendedAttributes")] include_extended_attributes: bool, } #[derive(Serialize)] struct AddSeedRequest { #[serde(rename="stationToken")] station_token: String, #[serde(rename="musicToken")] music_token: String, } #[derive(Serialize)] struct RemoveSeedRequest { #[serde(rename="seedId")] seed_id: String, }
where T: ToStationToken { self.pandora .post_noop(Method::StationDeleteStation, Some(serde_json::to_value(DeleteStationRequest { station_token: station.to_station_token(), }))) }
function_block-function_prefix_line
[ { "content": "/// Returns the encrypted input using the given key.\n\n///\n\n/// The returned string is encoded in hexadecimal notation,\n\n/// which is a UTF-8 string, so it's fine to return it using\n\n/// the `String` type.\n\npub fn encrypt(key: &str, input: &str) -> String {\n\n let cipherbytes = cipher_with(key.as_bytes(),\n\n input.as_bytes(),\n\n |blowfish, from, mut to| { blowfish.encrypt_block(from, to); });\n\n\n\n // Generate hexadecimal representation of `cipherbytes`.\n\n let mut output = String::with_capacity(cipherbytes.len() * 2);\n\n for b in cipherbytes {\n\n output.push_str(&format!(\"{:02x}\", b));\n\n }\n\n output\n\n}\n\n\n", "file_path": "src/crypt.rs", "rank": 0, "score": 96840.80176143014 }, { "content": "/// Returns the decrypted input using the given key.\n\n///\n\n/// Because Strings must be UTF-8 compilant, and decrypting\n\n/// doesn't guarantees an UTF-8 string, we return\n\n/// a OsString which doesn't have to be UTF-8 compilant.\n\npub fn decrypt(key: &str, hex_input: &str) -> OsString {\n\n use std::u8;\n\n use std::str;\n\n use std::ffi::OsStr;\n\n use std::os::unix::ffi::OsStrExt;\n\n\n\n // Gets bytes from hexadecimal representation.\n\n let mut input = Vec::with_capacity(hex_input.len());\n\n for chunk in hex_input.as_bytes().chunks(2) {\n\n // `chunk` is utf-8 since it is comming from &str.\n\n let fragment = unsafe { str::from_utf8_unchecked(chunk) };\n\n let byte = u8::from_str_radix(fragment, 16).unwrap_or(0);\n\n input.push(byte);\n\n }\n\n\n\n let mut cipherbytes = cipher_with(key.as_bytes(), &input, |blowfish, from, mut to| {\n\n blowfish.decrypt_block(from, to);\n\n });\n\n\n\n // Ignore up to `PADDING_BYTE`.\n\n if let Some(index) = cipherbytes.iter().position(|&b| b == PADDING_BYTE) {\n\n cipherbytes.truncate(index);\n\n }\n\n\n\n OsStr::from_bytes(&cipherbytes).to_owned()\n\n}\n\n\n", "file_path": "src/crypt.rs", "rank": 1, "score": 92513.97802213233 }, { "content": "pub fn request<T>(client: &Client,\n\n http_method: &HttpMethod,\n\n endpoint: Endpoint,\n\n method: Method,\n\n body: Option<Value>,\n\n credentials: Option<&Credentials>)\n\n -> Result<T>\n\n where T: Deserialize\n\n{\n\n\n\n let mut body = try!(serde_json::to_string(&authenticate_body(body, credentials)));\n\n if method.is_encrypted() {\n\n if let Some(credentials) = credentials {\n\n body = crypt::encrypt(credentials.encrypt_key(), &body);\n\n }\n\n }\n\n\n\n let builder = authenticate(client, http_method, endpoint, method, credentials);\n\n\n\n let mut res = try!(builder.body(&body).send());\n", "file_path": "src/request.rs", "rank": 8, "score": 61771.92274364139 }, { "content": "/// Trait for types that can return a music token for seeding.\n\npub trait ToMusicToken {\n\n fn to_music_token(&self) -> String;\n\n}\n\n\n\nimpl ToMusicToken for String {\n\n fn to_music_token(&self) -> String {\n\n self.clone()\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub enum MusicType {\n\n #[serde(rename=\"song\")]\n\n Song,\n\n #[serde(rename=\"artist\")]\n\n Artist,\n\n}\n\n\n\n/// Song information.\n\n#[derive(Debug, Deserialize)]\n", "file_path": "src/music.rs", "rank": 11, "score": 53455.39938468723 }, { "content": "fn main() {\n\n let res = Pandora::new(\"[email protected]\", \"johndoe\"); // <- real login here\n\n match res {\n\n Ok(pandora) => {\n\n let station_handler = pandora.stations();\n\n for station in station_handler.list().unwrap() {\n\n if station.station_name == \"Magical\".to_owned() {\n\n println!(\"== Tracks for \\\"{}\\\"\", station.station_name);\n\n let playlist = station_handler.playlist(&station);\n\n let tracklist = playlist.list().unwrap();\n\n\n\n for track in tracklist {\n\n println!(\"{:?}\", track);\n\n }\n\n }\n\n }\n\n }\n\n Err(e) => println!(\"Err: {:?}\", e),\n\n }\n\n}\n", "file_path": "examples/main/main.rs", "rank": 12, "score": 39247.27178458407 }, { "content": "#[derive(Serialize)]\n\nstruct Search {\n\n #[serde(rename=\"searchText\")]\n\n search_text: String,\n\n #[serde(rename=\"includeNearMatches\")]\n\n include_near_matches: bool,\n\n}\n\n\n\n/// Search results with both the songs and the artists that matched\n\n/// the search string.\n\n#[derive(Debug, Deserialize)]\n\npub struct SearchResults {\n\n #[serde(rename=\"nearMatchesAvailable\")]\n\n near_matches_available: bool,\n\n songs: Vec<Song>,\n\n artists: Vec<Artist>,\n\n}\n\n\n\nimpl SearchResults {\n\n /// Returns true if near matches are available.\n\n pub fn near_matches_available(&self) -> bool {\n", "file_path": "src/music.rs", "rank": 13, "score": 38731.04078437439 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Tracklist {\n\n pub items: Vec<Track>,\n\n}\n\n\n\n/// Track information. Most fields are optional since\n\n/// the tracklist can include ads.\n\n#[derive(Debug, Clone, Deserialize)]\n\npub struct Track {\n\n #[serde(rename=\"trackToken\")]\n\n pub track_token: Option<String>,\n\n #[serde(rename=\"artistName\")]\n\n pub artist_name: Option<String>,\n\n #[serde(rename=\"albumName\")]\n\n pub album_name: Option<String>,\n\n #[serde(rename=\"songName\")]\n\n pub song_name: Option<String>,\n\n #[serde(rename=\"songRating\")]\n\n pub song_rating: Option<u32>,\n\n\n\n #[serde(rename=\"audioUrlMap\")]\n", "file_path": "src/playlist.rs", "rank": 14, "score": 38731.04078437439 }, { "content": "#[derive(Serialize)]\n\nstruct TracklistRequest {\n\n #[serde(rename=\"stationToken\")]\n\n station_token: String,\n\n}\n\n\n", "file_path": "src/playlist.rs", "rank": 15, "score": 37428.93731475745 }, { "content": "/// Trait for types that return a track token.\n\npub trait ToTrackToken {\n\n fn to_track_token(&self) -> Option<String>;\n\n}\n\n\n\n/// List of tracks.\n", "file_path": "src/playlist.rs", "rank": 16, "score": 36962.572153026194 }, { "content": "#[derive(Serialize)]\n\nstruct UserLoginRequest {\n\n #[serde(rename=\"loginType\")]\n\n login_type: String,\n\n username: String,\n\n password: String,\n\n}\n\n\n\nimpl UserLoginRequest {\n\n pub fn new(username: String, password: String) -> Self {\n\n UserLoginRequest {\n\n login_type: \"user\".to_owned(),\n\n username: username,\n\n password: password,\n\n }\n\n }\n\n}\n", "file_path": "src/auth.rs", "rank": 17, "score": 36261.75201073954 }, { "content": "#[derive(Serialize)]\n\nstruct RateTrackRequest {\n\n #[serde(rename=\"stationToken\")]\n\n station_token: String,\n\n #[serde(rename=\"trackToken\")]\n\n track_token: String,\n\n #[serde(rename=\"isPositive\")]\n\n is_positive: bool,\n\n}\n", "file_path": "src/playlist.rs", "rank": 18, "score": 36261.75201073954 }, { "content": "/// Returns a RequestBuilder with the HTTP method and URL set. The URL query string\n\n/// will include the auth information if credentials were provided.\n\nfn authenticate<'a>(client: &'a Client,\n\n http_method: &HttpMethod,\n\n endpoint: Endpoint,\n\n method: Method,\n\n credentials: Option<&Credentials>)\n\n -> RequestBuilder<'a> {\n\n\n\n let url = format!(\"{}?method={}\", endpoint.to_string(), method.to_string());\n\n let mut url = Url::parse(&url).unwrap();\n\n\n\n if let Some(credentials) = credentials {\n\n use std::collections::BTreeMap;\n\n let mut query_pairs: BTreeMap<&str, &str> = BTreeMap::new();\n\n if let Some(partner_auth_token) = credentials.partner_auth_token() {\n\n query_pairs.insert(\"auth_token\", partner_auth_token);\n\n }\n\n if let Some(user_auth_token) = credentials.user_auth_token() {\n\n query_pairs.insert(\"auth_token\", user_auth_token);\n\n }\n\n if let Some(partner_id) = credentials.partner_id() {\n", "file_path": "src/request.rs", "rank": 19, "score": 32643.245678908752 }, { "content": "/// Rounds the given len so that it contains blocks\n\n/// of the same size.\n\nfn round_len(len: usize, block_size: usize) -> usize {\n\n let remainder = len % block_size;\n\n if remainder == 0 {\n\n len\n\n } else {\n\n len + block_size - remainder\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{encrypt, decrypt};\n\n use std::ffi::OsString;\n\n\n\n struct Test {\n\n key: String,\n\n plain_text: String,\n\n cipher_text: String,\n\n }\n\n\n", "file_path": "src/crypt.rs", "rank": 20, "score": 27357.631563323466 }, { "content": "/// Returns the authenticated body.\n\n///\n\n/// # Arguments\n\n/// * `body` - If no body is provided a new object is created instead. If a body is provided\n\n/// but is not an object, then the function does nothing and returns the same body.\n\n/// * `credentials` - The credentials to use when adding the auth information to the body.\n\nfn authenticate_body(body: Option<Value>, credentials: Option<&Credentials>) -> Value {\n\n let mut body = match body {\n\n Some(body) => body,\n\n None => serde_json::to_value(serde_json::Map::<String, Value>::new()),\n\n };\n\n\n\n if let Some(credentials) = credentials {\n\n if let Some(obj) = body.as_object_mut() {\n\n if let Some(partner_auth_token) = credentials.partner_auth_token() {\n\n obj.insert(\"partnerAuthToken\".to_owned(),\n\n Value::String(partner_auth_token.to_owned()));\n\n }\n\n if let Some(sync_time) = credentials.sync_time() {\n\n obj.insert(\"syncTime\".to_owned(), Value::U64(sync_time.clone()));\n\n }\n\n if let Some(user_auth_token) = credentials.user_auth_token() {\n\n obj.insert(\"userAuthToken\".to_owned(),\n\n Value::String(user_auth_token.to_owned()));\n\n }\n\n }\n\n }\n\n\n\n debug!(\"== Body created ==\\n{:?}\", body);\n\n body\n\n}\n", "file_path": "src/request.rs", "rank": 21, "score": 25334.426053629344 }, { "content": "/// Divides the input in blocks and ciphers it using the given closure.\n\nfn cipher_with<F>(key: &[u8], input: &[u8], func: F) -> Vec<u8>\n\n where F: Fn(&Blowfish, &[u8], &mut [u8])\n\n{\n\n\n\n let blowfish = Blowfish::new(key);\n\n let block_size = <Blowfish as BlockEncryptor>::block_size(&blowfish);\n\n\n\n // Input and output bytes\n\n let input_len = round_len(input.len(), block_size);\n\n let mut input = input.to_vec();\n\n input.resize(input_len, PADDING_BYTE);\n\n\n\n let mut output: Vec<u8> = Vec::with_capacity(input_len);\n\n unsafe {\n\n output.set_len(input_len);\n\n }\n\n\n\n // Encrypts input and into output\n\n for (ichunk, mut ochunk) in input.chunks(block_size).zip(output.chunks_mut(block_size)) {\n\n func(&blowfish, ichunk, ochunk);\n\n }\n\n\n\n output\n\n}\n\n\n", "file_path": "src/crypt.rs", "rank": 31, "score": 23675.92604301807 }, { "content": "[crates.io]: https://crates.io/crates/pandora\n\n[docs.rs]: https://docs.rs/pandora\n\n[hubcaps]: https://github.com/softprops/hubcaps\n\n\n\n[![crates.io](https://img.shields.io/crates/v/pandora.svg)][crates.io]\n\n[![docs.rs](https://docs.rs/pandora/badge.svg)][docs.rs]\n\n\n\n## pandora-rs\n\n\n\nUnofficial Rust library for interacting with the Pandora API. Inspired by the design of [hubcaps][hubcaps]. Check included example for usage and the [documentation][docs.rs] for detailed reference.\n", "file_path": "README.md", "rank": 32, "score": 15574.031330224121 }, { "content": "//! Playlists for a [Station](stations/struct.Stations.html).\n\n\n\nuse super::Pandora;\n\nuse error::Result;\n\nuse method::Method;\n\nuse stations::ToStationToken;\n\n\n\nuse serde_json;\n\n\n\n/// Handler for Playlists.\n\n#[derive(Debug)]\n\npub struct Playlist<'a> {\n\n pandora: &'a Pandora,\n\n station_token: String,\n\n}\n\n\n\nimpl<'a> Playlist<'a> {\n\n /// Creates a new Playlist handler.\n\n pub fn new<T>(pandora: &'a Pandora, station: &T) -> Playlist<'a>\n\n where T: ToStationToken\n", "file_path": "src/playlist.rs", "rank": 33, "score": 12.891052334969904 }, { "content": "pub mod music;\n\npub mod playlist;\n\npub mod request;\n\nmod response;\n\npub mod stations;\n\n\n\npub use auth::Credentials;\n\npub use playlist::Track;\n\npub use stations::Station;\n\n\n\n//////////\n\n// Module\n\n/////////\n\n\n\nuse error::{Error, Result};\n\nuse method::Method;\n\nuse music::Music;\n\nuse request::request;\n\nuse stations::Stations;\n\n\n", "file_path": "src/lib.rs", "rank": 34, "score": 11.222263584576211 }, { "content": " pandora: &'a Pandora,\n\n}\n\n\n\nimpl<'a> Music<'a> {\n\n /// Creates a new Music handler.\n\n pub fn new(pandora: &'a Pandora) -> Music<'a> {\n\n Music { pandora: pandora }\n\n }\n\n\n\n /// Searches for music using the given search string.\n\n pub fn search(&self, search_text: &str) -> Result<SearchResults> {\n\n self.pandora\n\n .post(Method::MusicSearch,\n\n Some(serde_json::to_value(Search {\n\n search_text: search_text.to_owned(),\n\n include_near_matches: true,\n\n })))\n\n }\n\n}\n", "file_path": "src/music.rs", "rank": 35, "score": 10.402898998325313 }, { "content": " pub fn new(username: &str, password: &str) -> Result<Self> {\n\n let credentials = try!(Credentials::new(username, password));\n\n Ok(Pandora::with_credentials(credentials))\n\n }\n\n\n\n /// Creates a new Pandora instance from the given credentials.\n\n pub fn with_credentials(credentials: Credentials) -> Self {\n\n Pandora {\n\n client: Client::new(),\n\n endpoint: DEFAULT_ENDPOINT,\n\n credentials: Mutex::new(RefCell::new(credentials)),\n\n }\n\n }\n\n\n\n /// Returns an instance of [Music](struct.Music.html).\n\n pub fn music(&self) -> Music {\n\n Music::new(self)\n\n }\n\n\n\n /// Returns an instance of [Stations](struct.Stations.html).\n", "file_path": "src/lib.rs", "rank": 36, "score": 10.08441333531666 }, { "content": " {\n\n Playlist {\n\n pandora: pandora,\n\n station_token: station.to_station_token(),\n\n }\n\n }\n\n\n\n /// Gets the current tracklist from Pandora.\n\n pub fn list(&self) -> Result<Vec<Track>> {\n\n let tracklist = try!(self.pandora.post::<Tracklist>(\n\n Method::StationGetPlaylist,\n\n Some(serde_json::to_value(TracklistRequest {\n\n station_token: self.station_token.clone()\n\n }))\n\n ));\n\n Ok(tracklist.items)\n\n }\n\n\n\n // TODO: Result should not be empty\n\n /// Rates a track.\n", "file_path": "src/playlist.rs", "rank": 37, "score": 9.881554281234882 }, { "content": "impl<T> Response<T> {\n\n pub fn from_result(result: T) -> Self {\n\n Response {\n\n stat: Stat::Ok,\n\n result: Some(result),\n\n message: None,\n\n code: None,\n\n }\n\n }\n\n\n\n pub fn from_error(message: String, code: u32) -> Self {\n\n Response {\n\n stat: Stat::Fail,\n\n result: None,\n\n message: Some(message),\n\n code: Some(code),\n\n }\n\n }\n\n}\n", "file_path": "src/response.rs", "rank": 38, "score": 8.286702306647719 }, { "content": " None => None,\n\n }\n\n }\n\n\n\n /// Returns a reference to the user id.\n\n pub fn user_id<'a>(&'a self) -> Option<&'a str> {\n\n match self.user_id {\n\n Some(ref user_id) => Some(user_id.as_str()),\n\n None => None,\n\n }\n\n }\n\n\n\n /// Returns a reference to the user authorization token.\n\n pub fn user_auth_token<'a>(&'a self) -> Option<&'a str> {\n\n match self.user_auth_token {\n\n Some(ref user_auth_token) => Some(user_auth_token.as_str()),\n\n None => None,\n\n }\n\n }\n\n\n", "file_path": "src/auth.rs", "rank": 39, "score": 8.284856211854658 }, { "content": "/// Struct for deserializing Pandora API responses.\n\n\n\n/// Enum for checking Pandora API responses of success (ok) or error (fail).\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub enum Stat {\n\n #[serde(rename=\"ok\")]\n\n Ok,\n\n #[serde(rename=\"fail\")]\n\n Fail,\n\n}\n\n\n\n/// Type for deserializing a Pandora API reponse.\n\n#[derive(Debug, Deserialize)]\n\npub struct Response<T> {\n\n pub stat: Stat,\n\n pub result: Option<T>,\n\n pub message: Option<String>,\n\n pub code: Option<u32>,\n\n}\n\n\n", "file_path": "src/response.rs", "rank": 40, "score": 8.132621249523377 }, { "content": " /// Returns a reference to the partner id.\n\n pub fn partner_id<'a>(&'a self) -> Option<&'a str> {\n\n match self.partner_id {\n\n Some(ref partner_id) => Some(partner_id.as_str()),\n\n None => None,\n\n }\n\n }\n\n\n\n /// Returns a reference to the partner authorization token.\n\n pub fn partner_auth_token<'a>(&'a self) -> Option<&'a str> {\n\n match self.partner_auth_token {\n\n Some(ref partner_auth_token) => Some(partner_auth_token.as_str()),\n\n None => None,\n\n }\n\n }\n\n\n\n /// Returns a reference to the synchronization time.\n\n pub fn sync_time<'a>(&'a self) -> Option<&'a u64> {\n\n match self.sync_time {\n\n Some(ref sync_time) => Some(&sync_time),\n", "file_path": "src/auth.rs", "rank": 41, "score": 7.635790673818022 }, { "content": "//! Traits and structs for Songs, Artists, and Searches.\n\n\n\nuse super::Pandora;\n\nuse error::Result;\n\nuse method::Method;\n\n\n\nuse serde_json;\n\n\n\n/// Trait for types that can return a music token for seeding.\n", "file_path": "src/music.rs", "rank": 42, "score": 7.563535614111247 }, { "content": "//! Most of the structs in this module are only used for generating\n\n//! the required [Credentials](struct.Credentials.html) needed to\n\n//! start using [Pandora](../struct.Pandora.html).\n\n\n\nuse super::DEFAULT_ENDPOINT;\n\nuse crypt::decrypt;\n\nuse error::Result;\n\nuse method::Method;\n\nuse request::request;\n\n\n\nuse hyper::client::Client;\n\nuse hyper::method::Method as HttpMethod;\n\n\n\nuse serde_json;\n\n\n\n/// Authentication details used in each request. Remember that Pandora puts\n\n/// an expiration date on a set of credentials, so they need to be\n\n/// created again regularly.\n\n///\n\n/// Most fields are optional due to the way Pandora API works, they have an\n", "file_path": "src/auth.rs", "rank": 43, "score": 7.365432779137487 }, { "content": " pub fn rate<T>(&self, track: T, is_positive: bool) -> Result<()>\n\n where T: ToTrackToken\n\n {\n\n self.pandora\n\n .post_noop(Method::StationAddFeedback,\n\n Some(serde_json::to_value(RateTrackRequest {\n\n station_token: self.station_token.clone(),\n\n track_token: track\n\n .to_track_token()\n\n .unwrap_or(\"\".to_owned()),\n\n is_positive: is_positive,\n\n })))\n\n }\n\n}\n\n\n\n/// Trait for types that return a track token.\n", "file_path": "src/playlist.rs", "rank": 44, "score": 7.336876175526381 }, { "content": "//! Methods defined by the unofficial Pandora API.\n\n\n\n/// The Pandora method.\n\n#[derive(Debug, Copy, Clone)]\n\npub enum Method {\n\n TestCheckLicensing,\n\n\n\n AuthPartnerLogin,\n\n AuthUserLogin,\n\n\n\n MusicSearch,\n\n\n\n UserGetStationList,\n\n UserGetStationListChecksum,\n\n\n\n StationCreateStation,\n\n StationRenameStation,\n\n StationDeleteStation,\n\n\n\n StationGetStation,\n", "file_path": "src/method.rs", "rank": 45, "score": 7.204798037607446 }, { "content": "use hyper::client::Client;\n\nuse hyper::method::Method as HttpMethod;\n\nuse serde::Deserialize;\n\nuse serde_json::value::Value;\n\n\n\nuse std::sync::Mutex;\n\nuse std::cell::RefCell;\n\n\n\n/// Main interface for interacting with the Pandora API.\n\n/// A Pandora instance is thread-safe, since it doesn't\n\n/// really uses any state; only the credentials, which\n\n/// are protected by a Mutex.\n\n#[derive(Debug)]\n\npub struct Pandora {\n\n client: Client,\n\n endpoint: Endpoint<'static>,\n\n credentials: Mutex<RefCell<Credentials>>,\n\n}\n\n\n\nimpl Pandora {\n", "file_path": "src/lib.rs", "rank": 46, "score": 7.108751850107974 }, { "content": " pub fn stations(&self) -> Stations {\n\n Stations::new(self)\n\n }\n\n\n\n /// Proxy method for GET requests.\n\n pub fn get<T>(&self, method: Method, body: Option<Value>) -> Result<T>\n\n where T: Deserialize\n\n {\n\n self.request(HttpMethod::Get, method, body)\n\n }\n\n\n\n /// Proxy method for GET requests that do not return data.\n\n pub fn get_noop(&self, method: Method, body: Option<Value>) -> Result<()> {\n\n self.request::<()>(HttpMethod::Get, method, body)\n\n }\n\n\n\n /// Proxy method for POST requests.\n\n pub fn post<T>(&self, method: Method, body: Option<Value>) -> Result<T>\n\n where T: Deserialize\n\n {\n", "file_path": "src/lib.rs", "rank": 47, "score": 7.083316388141953 }, { "content": "//! Enumerated errors for this API.\n\n\n\nuse std::error::Error as StdError;\n\nuse std::io::Error as IoError;\n\n\n\nuse hyper::error::Error as HttpError;\n\n\n\nuse serde_json::error::Error as CodecError;\n\n\n\n/// Specialized result.\n\npub type Result<T> = ::std::result::Result<T, Error>;\n\n\n\n/// Composite error type for the library.\n\n#[derive(Debug)]\n\npub enum Error {\n\n Io(IoError),\n\n Codec(CodecError),\n\n Http(HttpError),\n\n Api { message: String, code: ApiErrorCode },\n\n}\n", "file_path": "src/error.rs", "rank": 48, "score": 7.073286563823165 }, { "content": " Err(Error::Codec(_)) => Ok(()),\n\n Err(err) => Err(err),\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// Endpoint of the Pandora API\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Endpoint<'a>(&'a str);\n\n\n\nimpl<'a> ToString for Endpoint<'a> {\n\n fn to_string(&self) -> String {\n\n let Endpoint(url) = *self;\n\n url.to_owned()\n\n }\n\n}\n\n\n\npub const ENDPOINTS: [Endpoint<'static>; 4] =\n\n [Endpoint(\"http://tuner.pandora.com/services/json/\"),\n\n Endpoint(\"https://tuner.pandora.com/services/json/\"),\n\n Endpoint(\"http://internal-tuner.pandora.com/services/json/\"),\n\n Endpoint(\"https://internal-tuner.pandora.com/services/json/\")];\n\npub const DEFAULT_ENDPOINT: Endpoint<'static> = ENDPOINTS[0];\n", "file_path": "src/lib.rs", "rank": 49, "score": 6.138620685818719 }, { "content": "\n\nimpl<'a> ToTrackToken for &'a Track {\n\n fn to_track_token(&self) -> Option<String> {\n\n match self.track_token {\n\n Some(ref track_token) => Some(track_token.clone()),\n\n None => None,\n\n }\n\n }\n\n}\n\n\n\n/// Struct for deserializing audio types for a track.\n\n#[derive(Debug, Clone, Deserialize)]\n\npub struct TrackAudio {\n\n #[serde(rename=\"lowQuality\")]\n\n pub low_quality: Audio,\n\n #[serde(rename=\"mediumQuality\")]\n\n pub medium_quality: Audio,\n\n #[serde(rename=\"highQuality\")]\n\n pub high_quality: Audio,\n\n}\n", "file_path": "src/playlist.rs", "rank": 50, "score": 6.104468836565733 }, { "content": "extern crate pandora;\n\nextern crate serde_json;\n\n\n\nuse pandora::Pandora;\n\n\n", "file_path": "examples/main/main.rs", "rank": 51, "score": 6.061083220448431 }, { "content": " /// Returns a reference to the username.\n\n pub fn username(&self) -> &str {\n\n &self.username\n\n }\n\n\n\n /// Returns a reference to the password.\n\n pub fn password(&self) -> &str {\n\n &self.password\n\n }\n\n\n\n /// Returns a reference to the encryption key.\n\n pub fn encrypt_key(&self) -> &str {\n\n &self.encrypt_key\n\n }\n\n\n\n /// Returns a reference to the decryption key.\n\n pub fn decrypt_key(&self) -> &str {\n\n &self.decrypt_key\n\n }\n\n\n", "file_path": "src/auth.rs", "rank": 52, "score": 6.038737544845915 }, { "content": "//! Common functions for interacting with the unofficial Pandora API.\n\n\n\nuse super::Endpoint;\n\nuse auth::Credentials;\n\nuse crypt;\n\nuse error::{Error, Result};\n\nuse method::Method;\n\nuse response::{Stat, Response};\n\n\n\nuse std::io::Read;\n\n\n\nuse hyper::client::{RequestBuilder, Client};\n\nuse hyper::header::ContentLength;\n\nuse hyper::method::Method as HttpMethod;\n\n\n\nuse serde::Deserialize;\n\nuse serde::ser::Error as SerdeError;\n\nuse serde_json;\n\nuse serde_json::value::Value;\n\nuse serde_json::error::Error as JsonError;\n\n\n\nuse url::Url;\n\n\n\n\n", "file_path": "src/request.rs", "rank": 53, "score": 5.8566653618160665 }, { "content": " sync_time: Option<u64>,\n\n\n\n // User info.\n\n user_id: Option<String>,\n\n user_auth_token: Option<String>,\n\n}\n\n\n\nimpl Credentials {\n\n /// Creates new credentials from the given user and password.\n\n pub fn new(username: &str, password: &str) -> Result<Self> {\n\n let client = Client::new();\n\n let partner = Partner::default();\n\n let mut credentials = Credentials {\n\n username: username.to_owned(),\n\n password: password.to_owned(),\n\n\n\n encrypt_key: partner.encrypt_password.clone(),\n\n decrypt_key: partner.decrypt_password.clone(),\n\n\n\n partner_id: None,\n", "file_path": "src/auth.rs", "rank": 54, "score": 5.7466262608625085 }, { "content": " self.near_matches_available\n\n }\n\n\n\n /// Returns the songs in the search results.\n\n pub fn songs<'a>(&'a self) -> &'a [Song] {\n\n &self.songs\n\n }\n\n\n\n /// Returns the artists in the search results.\n\n pub fn artists<'a>(&'a self) -> &'a [Artist] {\n\n &self.artists\n\n }\n\n}\n\n\n\n////////////////////\n\n// Main struct\n\n////////////////////\n\n\n\n/// Music struct for searching songs and artists.\n\npub struct Music<'a> {\n", "file_path": "src/music.rs", "rank": 55, "score": 5.385778029724146 }, { "content": " let mut body = match res.headers.clone().get::<ContentLength>() {\n\n Some(&ContentLength(len)) => String::with_capacity(len as usize),\n\n None => String::new(),\n\n };\n\n try!(res.read_to_string(&mut body));\n\n\n\n debug!(\"== Received response ==\\nStatus: {:?}\\nHeaders: {:?}\\nBody: {:?}\",\n\n res.status,\n\n res.headers,\n\n body);\n\n\n\n let res: Response<T> = try!(serde_json::from_str(&body));\n\n match res {\n\n Response {\n\n stat: Stat::Ok,\n\n result: Some(result),\n\n ..\n\n } => Ok(result),\n\n Response {\n\n stat: Stat::Ok,\n", "file_path": "src/request.rs", "rank": 56, "score": 5.179183477514904 }, { "content": "#![feature(proc_macro)]\n\n\n\nextern crate crypto;\n\n\n\nextern crate hyper;\n\n\n\n#[macro_use]\n\nextern crate log;\n\n\n\n#[macro_use]\n\nextern crate serde_derive;\n\nextern crate serde;\n\nextern crate serde_json;\n\n\n\nextern crate url;\n\n\n\npub mod auth;\n\npub mod crypt;\n\npub mod error;\n\npub mod method;\n", "file_path": "src/lib.rs", "rank": 57, "score": 4.986159428184847 }, { "content": " pub track_audio: Option<TrackAudio>,\n\n\n\n #[serde(rename=\"adToken\")]\n\n pub ad_token: Option<String>,\n\n}\n\n\n\nimpl Track {\n\n pub fn is_ad(&self) -> bool {\n\n self.ad_token.is_some()\n\n }\n\n}\n\n\n\nimpl ToTrackToken for Track {\n\n fn to_track_token(&self) -> Option<String> {\n\n match self.track_token {\n\n Some(ref track_token) => Some(track_token.clone()),\n\n None => None,\n\n }\n\n }\n\n}\n", "file_path": "src/playlist.rs", "rank": 58, "score": 4.851570737693941 }, { "content": " /// Consumes PartnerLogin and sets the required information\n\n /// in the credentials.\n\n fn set_partner_login(&mut self, partner_login: PartnerLogin) {\n\n use std::str;\n\n use std::os::unix::ffi::OsStrExt;\n\n\n\n let sync_time_bytes: Vec<u8> = decrypt(self.decrypt_key(), &partner_login.sync_time)\n\n .as_os_str()\n\n .as_bytes()\n\n .iter()\n\n .skip(4)\n\n .cloned()\n\n .collect();\n\n let sync_time_str = str::from_utf8(&sync_time_bytes).unwrap_or(\"0\");\n\n let sync_time = sync_time_str.parse::<u64>().unwrap_or(0);\n\n\n\n self.partner_id = Some(partner_login.partner_id.clone());\n\n self.partner_auth_token = Some(partner_login.partner_auth_token.clone());\n\n self.sync_time = Some(sync_time);\n\n }\n", "file_path": "src/auth.rs", "rank": 59, "score": 4.58720444583509 }, { "content": " Method::StationCreateStation => \"station.createStation\".to_owned(),\n\n Method::StationRenameStation => \"station.renameStation\".to_owned(),\n\n Method::StationDeleteStation => \"station.deleteStation\".to_owned(),\n\n\n\n Method::StationGetStation => \"station.getStation\".to_owned(),\n\n Method::StationAddMusic => \"station.addMusic\".to_owned(),\n\n Method::StationDeleteMusic => \"station.deleteMusic\".to_owned(),\n\n\n\n Method::StationGetPlaylist => \"station.getPlaylist\".to_owned(),\n\n Method::StationAddFeedback => \"station.addFeedback\".to_owned(),\n\n }\n\n }\n\n}\n\n\n\nimpl Method {\n\n /// Returns true if the method needs the request body to be encrypted with Blowfish ECB.\n\n /// The [crypt](../crypt/index.html) module takes care of encryption and decryption.\n\n pub fn is_encrypted(&self) -> bool {\n\n match *self {\n\n Method::TestCheckLicensing |\n\n Method::AuthPartnerLogin => false,\n\n _ => true,\n\n }\n\n }\n\n}\n", "file_path": "src/method.rs", "rank": 60, "score": 4.553593118475205 }, { "content": "\n\nimpl StdError for Error {\n\n fn description(&self) -> &str {\n\n match *self {\n\n Error::Io(ref e) => e.description(),\n\n Error::Codec(ref e) => e.description(),\n\n Error::Http(ref e) => e.description(),\n\n Error::Api { ref message, .. } => message.as_str(),\n\n }\n\n }\n\n\n\n fn cause(&self) -> Option<&StdError> {\n\n match *self {\n\n Error::Io(ref e) => Some(e),\n\n Error::Codec(ref e) => Some(e),\n\n Error::Http(ref e) => Some(e),\n\n _ => None,\n\n }\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 61, "score": 4.0657401406099005 }, { "content": " result: None,\n\n ..\n\n } => Err(Error::Codec(JsonError::custom(\"Nothing to deserialize\"))),\n\n Response { stat: Stat::Fail, .. } => {\n\n Err(Error::Api {\n\n message: res.message.unwrap(),\n\n code: res.code.unwrap().into(),\n\n })\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/request.rs", "rank": 62, "score": 3.991763973695382 }, { "content": "//! Encryption and Decryption using Blowfish with ECB mode.\n\n\n\nuse std::ffi::OsString;\n\n\n\nuse crypto::blowfish::Blowfish;\n\nuse crypto::symmetriccipher::{BlockEncryptor, BlockDecryptor};\n\n\n\nconst PADDING_BYTE: u8 = 2;\n\n\n\n/// Returns the encrypted input using the given key.\n\n///\n\n/// The returned string is encoded in hexadecimal notation,\n\n/// which is a UTF-8 string, so it's fine to return it using\n\n/// the `String` type.\n", "file_path": "src/crypt.rs", "rank": 63, "score": 3.300995151818664 }, { "content": " StationAddMusic,\n\n StationDeleteMusic,\n\n\n\n StationGetPlaylist,\n\n StationAddFeedback,\n\n}\n\n\n\nimpl ToString for Method {\n\n fn to_string(&self) -> String {\n\n match *self {\n\n Method::TestCheckLicensing => \"test.checkLicensing\".to_owned(),\n\n\n\n Method::AuthPartnerLogin => \"auth.partnerLogin\".to_owned(),\n\n Method::AuthUserLogin => \"auth.userLogin\".to_owned(),\n\n\n\n Method::MusicSearch => \"music.search\".to_owned(),\n\n\n\n Method::UserGetStationList => \"user.getStationList\".to_owned(),\n\n Method::UserGetStationListChecksum => \"user.getStationListChecksum\".to_owned(),\n\n\n", "file_path": "src/method.rs", "rank": 64, "score": 3.19785212945022 }, { "content": " self.request(HttpMethod::Post, method, body)\n\n }\n\n\n\n /// Proxy method for POST requests that do not return data.\n\n pub fn post_noop(&self, method: Method, body: Option<Value>) -> Result<()> {\n\n self.request_noop(HttpMethod::Post, method, body)\n\n }\n\n\n\n fn request<T>(&self, http_method: HttpMethod, method: Method, body: Option<Value>) -> Result<T>\n\n where T: Deserialize\n\n {\n\n let credentials = self.credentials.lock().unwrap();\n\n\n\n let req = request(&self.client,\n\n &http_method,\n\n self.endpoint,\n\n method,\n\n body.clone(),\n\n Some(&credentials.borrow()));\n\n\n", "file_path": "src/lib.rs", "rank": 65, "score": 3.1896623749260615 }, { "content": "\n\n/// Audio information for a track.\n\n#[derive(Debug, Clone, Deserialize)]\n\npub struct Audio {\n\n pub bitrate: String,\n\n pub encoding: String,\n\n #[serde(rename=\"audioUrl\")]\n\n pub audio_url: String,\n\n pub protocol: String,\n\n}\n\n\n\n////////////////////\n\n// Request structs\n\n////////////////////\n\n\n", "file_path": "src/playlist.rs", "rank": 66, "score": 3.0955287876591804 }, { "content": " Method::AuthUserLogin,\n\n Some(user_login_body),\n\n Some(&credentials)));\n\n credentials.set_user_login(user_login);\n\n\n\n // At this point we can assume credentials are correct.\n\n Ok(credentials)\n\n }\n\n\n\n /// Refreshes the expiration time of the credentials.\n\n pub fn refresh(&mut self) -> Result<()> {\n\n match Credentials::new(&self.username, &self.password) {\n\n Ok(new_credentials) => {\n\n *self = new_credentials;\n\n Ok(())\n\n }\n\n Err(e) => Err(e),\n\n }\n\n }\n\n\n", "file_path": "src/auth.rs", "rank": 67, "score": 2.973238243530198 }, { "content": " partner_auth_token: None,\n\n sync_time: None,\n\n\n\n user_id: None,\n\n user_auth_token: None,\n\n };\n\n\n\n let partner_login: PartnerLogin = try!(request(&client,\n\n &HttpMethod::Post,\n\n DEFAULT_ENDPOINT,\n\n Method::AuthPartnerLogin,\n\n Some(serde_json::to_value(&partner)),\n\n None));\n\n credentials.set_partner_login(partner_login);\n\n\n\n let user_login_body = serde_json::to_value(&UserLoginRequest::new(username.to_owned(),\n\n password.to_owned()));\n\n let user_login: UserLogin = try!(request(&client,\n\n &HttpMethod::Post,\n\n DEFAULT_ENDPOINT,\n", "file_path": "src/auth.rs", "rank": 68, "score": 2.9149072866590675 }, { "content": " fn from(error: HttpError) -> Error {\n\n Error::Http(error)\n\n }\n\n}\n\n\n\n/// Pandora error codes.\n\n#[derive(Debug)]\n\npub enum ApiErrorCode {\n\n Unknown,\n\n\n\n InternalError,\n\n MaintenanceMode,\n\n\n\n UrlParamMissingMethod,\n\n UrlParamMissingAuthToken,\n\n UrlParamMissingPartnerId,\n\n UrlParamMissingUserId,\n\n\n\n SecureProtocolRequired,\n\n CertifiateRequired,\n", "file_path": "src/error.rs", "rank": 69, "score": 2.895876465141895 }, { "content": "pub struct Song {\n\n #[serde(rename=\"artistName\")]\n\n pub artist_name: String,\n\n #[serde(rename=\"musicToken\")]\n\n pub music_token: String,\n\n #[serde(rename=\"songName\")]\n\n pub song_name: String,\n\n pub score: u32,\n\n}\n\n\n\nimpl ToMusicToken for Song {\n\n fn to_music_token(&self) -> String {\n\n self.music_token.clone()\n\n }\n\n}\n\n\n\n/// Artist information.\n\n#[derive(Debug, Deserialize)]\n\npub struct Artist {\n\n #[serde(rename=\"artistName\")]\n", "file_path": "src/music.rs", "rank": 70, "score": 2.89407467193353 }, { "content": " pub artist_name: String,\n\n #[serde(rename=\"musicToken\")]\n\n pub music_token: String,\n\n #[serde(rename=\"likelyMatch\")]\n\n pub likely_match: bool,\n\n pub score: u32,\n\n}\n\n\n\nimpl ToMusicToken for Artist {\n\n fn to_music_token(&self) -> String {\n\n self.music_token.clone()\n\n }\n\n}\n\n\n\n/// Private struct for sending a search request.\n", "file_path": "src/music.rs", "rank": 71, "score": 2.7785014878143697 }, { "content": " #[serde(rename=\"partnerAuthToken\")]\n\n pub partner_auth_token: String,\n\n #[serde(rename=\"syncTime\")]\n\n pub sync_time: String,\n\n}\n\n\n\n/// User login.\n\n#[derive(Debug, Deserialize)]\n\npub struct UserLogin {\n\n #[serde(rename=\"userId\")]\n\n pub user_id: Option<String>,\n\n #[serde(rename=\"userAuthToken\")]\n\n pub user_auth_token: String,\n\n}\n\n\n\n////////////////////\n\n// Request structs\n\n////////////////////\n\n\n\n/// User login information.\n\n#[derive(Serialize)]\n", "file_path": "src/auth.rs", "rank": 72, "score": 2.7463683465045117 }, { "content": "/// authentication process with 2 phases:\n\n///\n\n/// 1. Login device (a.k.a. Partner).\n\n/// 2. Login user.\n\n///\n\n/// Some http requests to the API use the partial credentials created in step\n\n/// one to get the full credentials.\n\n#[derive(Debug)]\n\npub struct Credentials {\n\n // Username and password\n\n username: String,\n\n password: String,\n\n\n\n // Encryption / Decryption information.\n\n encrypt_key: String,\n\n decrypt_key: String,\n\n\n\n // Partner info.\n\n partner_id: Option<String>,\n\n partner_auth_token: Option<String>,\n", "file_path": "src/auth.rs", "rank": 73, "score": 2.734421660797969 }, { "content": "\n\n ParameterTypeMismatch,\n\n ParameterMissing,\n\n ParameterValueInvalid,\n\n\n\n ApiVersionNotSupported,\n\n LicensingRestrictions,\n\n InsufficientConnectivity,\n\n\n\n UnknownMethodName,\n\n WrongProtocol,\n\n\n\n ReadOnlyMode,\n\n InvalidAuthToken,\n\n InvalidPartnerOrUserLogin,\n\n ListenerNotAuthorized,\n\n UserNotAuthorized,\n\n\n\n MaxStationsReached,\n\n StationDoesNotExists,\n", "file_path": "src/error.rs", "rank": 74, "score": 2.3890284988144055 }, { "content": " 5 => ApiErrorCode::UrlParamMissingUserId,\n\n 6 => ApiErrorCode::SecureProtocolRequired,\n\n 7 => ApiErrorCode::CertifiateRequired,\n\n 8 => ApiErrorCode::ParameterTypeMismatch,\n\n 9 => ApiErrorCode::ParameterMissing,\n\n 10 => ApiErrorCode::ParameterValueInvalid,\n\n 11 => ApiErrorCode::ApiVersionNotSupported,\n\n 12 => ApiErrorCode::LicensingRestrictions,\n\n 13 => ApiErrorCode::InsufficientConnectivity,\n\n 14 => ApiErrorCode::UnknownMethodName,\n\n 15 => ApiErrorCode::WrongProtocol,\n\n 1000 => ApiErrorCode::ReadOnlyMode,\n\n 1001 => ApiErrorCode::InvalidAuthToken,\n\n 1002 => ApiErrorCode::InvalidPartnerOrUserLogin,\n\n 1003 => ApiErrorCode::ListenerNotAuthorized,\n\n 1004 => ApiErrorCode::UserNotAuthorized,\n\n 1005 => ApiErrorCode::MaxStationsReached,\n\n 1006 => ApiErrorCode::StationDoesNotExists,\n\n 1007 => ApiErrorCode::ComplimentaryPeriodAlreadyInUse,\n\n 1008 => ApiErrorCode::CallNotAllowed,\n", "file_path": "src/error.rs", "rank": 75, "score": 2.2385435401762797 }, { "content": " version: String,\n\n encrypt_password: String,\n\n decrypt_password: String)\n\n -> Self {\n\n Partner {\n\n username: username,\n\n password: password,\n\n device_model: device_model,\n\n version: version,\n\n encrypt_password: encrypt_password,\n\n decrypt_password: decrypt_password,\n\n }\n\n }\n\n}\n\n\n\n/// Partner login.\n\n#[derive(Debug, Deserialize)]\n\npub struct PartnerLogin {\n\n #[serde(rename=\"partnerId\")]\n\n pub partner_id: String,\n", "file_path": "src/auth.rs", "rank": 76, "score": 2.046714256593192 }, { "content": " }\n\n }\n\n\n\n fn request_noop(&self,\n\n http_method: HttpMethod,\n\n method: Method,\n\n body: Option<Value>)\n\n -> Result<()> {\n\n let credentials = self.credentials.lock().unwrap();\n\n\n\n let req = request::<()>(&self.client,\n\n &http_method,\n\n self.endpoint,\n\n method,\n\n body.clone(),\n\n Some(&credentials.borrow()));\n\n\n\n // Checks response and tries to revalidate possibly expired\n\n // credentials once.\n\n match req {\n", "file_path": "src/lib.rs", "rank": 77, "score": 1.6646829791197666 }, { "content": "\n\n ComplimentaryPeriodAlreadyInUse,\n\n CallNotAllowed,\n\n DeviceNotFound,\n\n PartnerNotAuthroized,\n\n\n\n InvalidUsername,\n\n InvalidPassword,\n\n UsernameAlreadyExists,\n\n\n\n DeviceAlreadyAssociatedToAccount,\n\n UpgradeDeviceModelInvalid,\n\n\n\n ExplicitPinIncorrect,\n\n ExplicitPinMalformed,\n\n\n\n DeviceModelInvalid,\n\n\n\n ZipCodeInvalid,\n\n BirthYearInvalid,\n", "file_path": "src/error.rs", "rank": 78, "score": 1.6108422486727867 }, { "content": "\n\nimpl ::std::fmt::Display for Error {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n write!(f, \"{:?}\", self)\n\n }\n\n}\n\n\n\nimpl From<IoError> for Error {\n\n fn from(error: IoError) -> Error {\n\n Error::Io(error)\n\n }\n\n}\n\n\n\nimpl From<CodecError> for Error {\n\n fn from(error: CodecError) -> Error {\n\n Error::Codec(error)\n\n }\n\n}\n\n\n\nimpl From<HttpError> for Error {\n", "file_path": "src/error.rs", "rank": 79, "score": 1.581115306566423 }, { "content": " decrypt_password: String,\n\n}\n\n\n\nimpl Default for Partner {\n\n fn default() -> Self {\n\n Partner {\n\n username: \"android\".to_owned(),\n\n password: \"AC7IBG09A3DTSYM4R41UJWL07VLN8JI7\".to_owned(),\n\n device_model: \"android-generic\".to_owned(),\n\n version: \"5\".to_owned(),\n\n encrypt_password: \"6#26FRL$ZWD\".to_owned(),\n\n decrypt_password: \"R=U!LH$O2B#\".to_owned(),\n\n }\n\n }\n\n}\n\n\n\nimpl Partner {\n\n pub fn new(username: String,\n\n password: String,\n\n device_model: String,\n", "file_path": "src/auth.rs", "rank": 80, "score": 1.3640388911062815 }, { "content": "\n\n /// Consumes UserLogin and sets the required information\n\n /// in the credentials.\n\n fn set_user_login(&mut self, user_login: UserLogin) {\n\n self.user_id = user_login.user_id.clone();\n\n self.user_auth_token = Some(user_login.user_auth_token.clone());\n\n }\n\n}\n\n\n\n/// Partner login request information.\n\n#[derive(Serialize)]\n\npub struct Partner {\n\n username: String,\n\n password: String,\n\n #[serde(rename=\"deviceModel\")]\n\n device_model: String,\n\n version: String,\n\n #[serde(rename=\"encryptPassword\")]\n\n encrypt_password: String,\n\n #[serde(rename=\"decryptPassword\")]\n", "file_path": "src/auth.rs", "rank": 81, "score": 1.1382062106063286 } ]
Rust
src/main.rs
cerrno/winnow
f172e648a9b69ac6dd0bc42ac753c173d89ad962
use std::collections::HashMap; use std::env; use std::io; use std::path::Path; use std::process::Command; use winnow::detector; use winnow::winnowing::{parse_patch, Fingerprint}; use colored::*; use indicatif::ProgressIterator; struct Repo { name: String, path: String, patches: Vec<String>, } impl Repo { fn new(repo: &str) -> io::Result<Self> { let repo_dir = Path::new(repo).file_name().unwrap(); let repo = Repo { name: repo_dir.to_str().unwrap().to_owned(), path: repo.to_owned(), patches: vec![], }; repo.git_clone()?; Ok(repo) } fn git_clone(&self) -> io::Result<()> { let clone_cmd = Command::new("git") .arg("clone") .arg("--bare") .arg(&self.path) .output()?; if clone_cmd.status.code().unwrap() == 128 { println!("{}", format!("repo {} already exists", self.path).yellow()); } else if !clone_cmd.status.success() { panic!("cannot clone repo {}", self.path); } Ok(()) } /* fn patches(&mut self) -> io::Result<()> { let start = empty_tree_hash()?; self.patches_since(&start) } */ fn make_patches_since(&mut self, start_hash: &str) -> io::Result<()> { let git_cmd = Command::new("git") .arg("format-patch") .arg("-k") .arg(start_hash) .current_dir(&self.name) .output()?; if !git_cmd.status.success() { println!("{}", String::from_utf8(git_cmd.stderr).unwrap()); panic!("cannot git format-patch"); } for l in String::from_utf8(git_cmd.stdout).unwrap().lines() { let mut s = self.name.clone(); s.push_str("/"); s.push_str(l); self.patches.push(s); } Ok(()) } fn parse_patches(&self) -> Vec<Fingerprint> { let mut out = vec![]; for p in self.patches.iter().progress() { out.append(&mut parse_patch(&p, &self.name)); } out } } fn empty_tree_hash() -> io::Result<String> { let hash = Command::new("git") .arg("hash-object") .arg("-t") .arg("tree") .arg("/dev/null") .output()?; if !hash.status.success() { panic!("cannot git hash-object"); } Ok(String::from_utf8(hash.stdout).unwrap().trim().to_owned()) } fn main() -> io::Result<()> { let args: Vec<String> = env::args().collect(); let (repo1, repo2, start_hash) = match args.len() { 3 => (args[1].clone(), args[2].clone(), empty_tree_hash()?), 4 => (args[1].clone(), args[2].clone(), args[3].clone()), _ => panic!("Invalid number of arguments"), }; let mut fingerprint_map: HashMap<String, Vec<Fingerprint>> = HashMap::new(); let mut repo = Repo::new(&repo1)?; repo.make_patches_since(&start_hash)?; let fingerprints = repo.parse_patches(); fingerprint_map.insert(repo.name, fingerprints); let mut repo = Repo::new(&repo2)?; repo.make_patches_since(&start_hash)?; let fingerprints = repo.parse_patches(); fingerprint_map.insert(repo.name, fingerprints); detector::run(fingerprint_map); Ok(()) }
use std::collections::HashMap; use std::env; use std::io; use std::path::Path; use std::process::Command; use winnow::detector; use winnow::winnowing::{parse_patch, Fingerprint}; use colored::*; use indicatif::ProgressIterator; struct Repo { name: String, path: String, patches: Vec<String>, } impl Repo { fn new(repo: &str) -> io::Result<Self> {
fn git_clone(&self) -> io::Result<()> { let clone_cmd = Command::new("git") .arg("clone") .arg("--bare") .arg(&self.path) .output()?; if clone_cmd.status.code().unwrap() == 128 { println!("{}", format!("repo {} already exists", self.path).yellow()); } else if !clone_cmd.status.success() { panic!("cannot clone repo {}", self.path); } Ok(()) } /* fn patches(&mut self) -> io::Result<()> { let start = empty_tree_hash()?; self.patches_since(&start) } */ fn make_patches_since(&mut self, start_hash: &str) -> io::Result<()> { let git_cmd = Command::new("git") .arg("format-patch") .arg("-k") .arg(start_hash) .current_dir(&self.name) .output()?; if !git_cmd.status.success() { println!("{}", String::from_utf8(git_cmd.stderr).unwrap()); panic!("cannot git format-patch"); } for l in String::from_utf8(git_cmd.stdout).unwrap().lines() { let mut s = self.name.clone(); s.push_str("/"); s.push_str(l); self.patches.push(s); } Ok(()) } fn parse_patches(&self) -> Vec<Fingerprint> { let mut out = vec![]; for p in self.patches.iter().progress() { out.append(&mut parse_patch(&p, &self.name)); } out } } fn empty_tree_hash() -> io::Result<String> { let hash = Command::new("git") .arg("hash-object") .arg("-t") .arg("tree") .arg("/dev/null") .output()?; if !hash.status.success() { panic!("cannot git hash-object"); } Ok(String::from_utf8(hash.stdout).unwrap().trim().to_owned()) } fn main() -> io::Result<()> { let args: Vec<String> = env::args().collect(); let (repo1, repo2, start_hash) = match args.len() { 3 => (args[1].clone(), args[2].clone(), empty_tree_hash()?), 4 => (args[1].clone(), args[2].clone(), args[3].clone()), _ => panic!("Invalid number of arguments"), }; let mut fingerprint_map: HashMap<String, Vec<Fingerprint>> = HashMap::new(); let mut repo = Repo::new(&repo1)?; repo.make_patches_since(&start_hash)?; let fingerprints = repo.parse_patches(); fingerprint_map.insert(repo.name, fingerprints); let mut repo = Repo::new(&repo2)?; repo.make_patches_since(&start_hash)?; let fingerprints = repo.parse_patches(); fingerprint_map.insert(repo.name, fingerprints); detector::run(fingerprint_map); Ok(()) }
let repo_dir = Path::new(repo).file_name().unwrap(); let repo = Repo { name: repo_dir.to_str().unwrap().to_owned(), path: repo.to_owned(), patches: vec![], }; repo.git_clone()?; Ok(repo) }
function_block-function_prefix_line
[ { "content": "pub fn parse_patch(path: &str, repo: &str) -> Vec<Fingerprint> {\n\n let patch = fs::read_to_string(path).unwrap();\n\n let mut patchset = PatchSet::new();\n\n if let Err(e) = patchset.parse(&patch) {\n\n println!(\"{:?}\", e);\n\n return vec![];\n\n }\n\n let commit_hash = patch.split_whitespace().nth(1);\n\n dprintln!(\"{}\", commit_hash.unwrap());\n\n // let commit_hash = decode_hex(commit_hash.unwrap());\n\n // let mut a = [0; 20];\n\n // for (i, v) in commit_hash.unwrap().into_iter().enumerate() {\n\n // a[i] = v;\n\n // }\n\n winnow(\n\n patchset,\n\n path.to_owned(),\n\n commit_hash.unwrap().to_owned(),\n\n repo,\n\n )\n\n}\n\n\n", "file_path": "src/winnowing.rs", "rank": 0, "score": 144425.08918825316 }, { "content": "fn clean(input: &str) -> String {\n\n input\n\n .chars()\n\n .filter(|c| c.is_alphabetic())\n\n .map(|c| c.to_lowercase())\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "src/winnowing.rs", "rank": 1, "score": 88134.091617842 }, { "content": "pub fn run(repo_map: HashMap<String, Vec<Fingerprint>>) {\n\n // step 0: construct document map\n\n let mut doc_map: HashMap<Document, Vec<Fingerprint>> = HashMap::new();\n\n for (repo, fingerprints) in repo_map {\n\n for f in fingerprints {\n\n doc_map\n\n .entry(Document {\n\n repo: repo.clone(),\n\n patch: f.location.patch.clone(),\n\n commit: f.location.commit.clone(),\n\n file: f.location.file.clone(),\n\n hunk_index: f.location.hunk,\n\n })\n\n .or_insert_with(Vec::new)\n\n .push(f);\n\n }\n\n }\n\n println!(\"Done generating doc_map\");\n\n\n\n // step 1: construct inverted index\n", "file_path": "src/detector.rs", "rank": 2, "score": 84397.65968490596 }, { "content": "fn add_file(incompletes: Vec<(u64, usize, usize)>, file: &str) -> Vec<(u64, usize, usize, String)> {\n\n incompletes\n\n .into_iter()\n\n .map(|(hash, line, hunk_index)| (hash, hunk_index, line, file.to_owned()))\n\n .collect()\n\n}\n\n\n", "file_path": "src/winnowing.rs", "rank": 4, "score": 55852.27346850854 }, { "content": "fn from_same_doc(d: &Document, f: &Fingerprint) -> bool {\n\n d.repo == f.location.repo && d.commit == f.location.commit && d.file == f.location.file\n\n}\n", "file_path": "src/detector.rs", "rank": 6, "score": 53705.603793111135 }, { "content": "fn winnow_str(input: &str, window: u32) -> Vec<u64> {\n\n let input = clean(input);\n\n let ngram_hash_iter = ngram(input.chars(), window)\n\n .map(|x| x.iter().collect::<String>())\n\n .map(|x| hash(&x));\n\n // lul fix this\n\n let hashes = ngram_hash_iter.collect::<Vec<u64>>();\n\n ngram(hashes.into_iter(), window).map(select_hash).collect()\n\n}\n\n\n\n// use std::num::ParseIntError;\n\n\n\n// fn decode_hex(s: &str) -> Result<Vec<u8>, ParseIntError> {\n\n// (0..s.len())\n\n// .step_by(2)\n\n// .map(|i| u8::from_str_radix(&s[i..i + 2], 16))\n\n// .collect()\n\n// }\n\n\n", "file_path": "src/winnowing.rs", "rank": 7, "score": 53097.90924956227 }, { "content": "fn make_fingerprints(\n\n incompletes: Vec<(u64, usize, usize, String)>,\n\n patch: String,\n\n commit_hash: String,\n\n repo: String,\n\n) -> Vec<Fingerprint> {\n\n incompletes\n\n .into_iter()\n\n .map(|(hash, hunk, line, file)| Fingerprint {\n\n hash,\n\n location: Location {\n\n repo: repo.clone(),\n\n patch: patch.clone(),\n\n commit: commit_hash.clone(),\n\n file,\n\n hunk,\n\n line,\n\n },\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/winnowing.rs", "rank": 8, "score": 49137.90942729023 }, { "content": "/// Construct an n-gram from an iterator\n\n///\n\n/// 4-gram example:\n\n/// abcdefgh => abcd bcde cdef defg efgh\n\n///\n\n/// ```\n\n/// # use winnow::winnowing::ngram;\n\n///\n\n/// let input = \"abcdefgh\";\n\n/// let ngrams = ngram(input.chars(), 4);\n\n/// assert_eq!(\n\n/// ngrams\n\n/// .map(|x| x.into_iter().collect::<String>())\n\n/// .collect::<Vec<String>>(),\n\n/// vec![\"abcd\", \"bcde\", \"cdef\", \"defg\", \"efgh\"]\n\n/// );\n\n/// ```\n\npub fn ngram<I>(mut f: I, n: u32) -> impl Iterator<Item = Vec<I::Item>>\n\nwhere\n\n I: Iterator + Clone,\n\n{\n\n let mut v = vec![];\n\n let mut b = f.clone();\n\n for _ in 0..n {\n\n // assert!(b.next().is_some(), \"Input size smaller than N\");\n\n if b.next().is_none() {\n\n return v.into_iter();\n\n }\n\n }\n\n loop {\n\n let ngram = f.clone().take(n as usize).collect();\n\n v.push(ngram);\n\n f.next();\n\n if b.next().is_none() {\n\n break;\n\n }\n\n }\n\n // fix to yield vals instead of making vec and returning iterator\n\n v.into_iter()\n\n}\n\n\n", "file_path": "src/winnowing.rs", "rank": 9, "score": 40357.698187334085 }, { "content": "#[derive(Debug, PartialEq, Eq, Hash)]\n\nstruct Document {\n\n repo: String,\n\n patch: String,\n\n commit: String,\n\n file: String,\n\n hunk_index: usize,\n\n}\n\n\n", "file_path": "src/detector.rs", "rank": 10, "score": 39043.38409304523 }, { "content": "#[derive(Debug, PartialEq, Eq, Hash)]\n\nstruct DetectorPair<'a> {\n\n a: &'a Document,\n\n b: &'a Document,\n\n fingerprints: Vec<Fingerprint>,\n\n}\n\n\n", "file_path": "src/detector.rs", "rank": 11, "score": 36199.87120894528 }, { "content": "#[test]\n\nfn winnow_source_1() {\n\n assert!(true); // todo add winnow test\n\n}\n", "file_path": "tests/winnowing.rs", "rank": 12, "score": 30335.652931286328 }, { "content": "pub fn winnow(\n\n commit: PatchSet,\n\n patch_name: String,\n\n commit_hash: String,\n\n repo: &str,\n\n) -> Vec<Fingerprint> {\n\n let mut hash_line_file = vec![];\n\n for patchfile in commit {\n\n let mut hash_line_hunk = vec![];\n\n let file = &patchfile.target_file.clone();\n\n for (h_index, hunk) in patchfile.into_iter().enumerate() {\n\n for line in hunk.target_lines() {\n\n hash_line_hunk.append(&mut winnow_line(line, h_index));\n\n }\n\n }\n\n let mut v = add_file(hash_line_hunk, file);\n\n hash_line_file.append(&mut v);\n\n }\n\n make_fingerprints(hash_line_file, patch_name, commit_hash, repo.to_owned())\n\n}\n\n\n", "file_path": "src/winnowing.rs", "rank": 13, "score": 29095.106941391117 }, { "content": "fn hash<T: Hash>(t: &T) -> u64 {\n\n let mut s = DefaultHasher::new();\n\n t.hash(&mut s);\n\n s.finish()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use fs::read_to_string;\n\n\n\n #[test]\n\n fn test_clean() {\n\n let input = \"A do run run run, a do run run\";\n\n let input = clean(input);\n\n assert!(input == \"adorunrunrunadorunrun\");\n\n }\n\n\n\n #[test]\n\n fn test_ngram() {\n", "file_path": "src/winnowing.rs", "rank": 15, "score": 23424.52651383135 }, { "content": "// select smallest hash out of vec\n\n// select rightmost in case of tie\n\nfn select_hash(hashes: Vec<u64>) -> u64 {\n\n let mut min = hashes.get(0).unwrap();\n\n for v in hashes.iter() {\n\n if v < min {\n\n min = v;\n\n }\n\n }\n\n *min\n\n}\n\n\n", "file_path": "src/winnowing.rs", "rank": 16, "score": 23420.84124904889 }, { "content": "fn show_pair(pair: &'_ DetectorPair<'_>) -> std::io::Result<()> {\n\n let p1 = fs::read_to_string(&pair.a.patch)?;\n\n let p2 = fs::read_to_string(&pair.b.patch)?;\n\n let mut d1 = PatchSet::new();\n\n d1.parse(&p1).unwrap();\n\n let mut d2 = PatchSet::new();\n\n d2.parse(&p2).unwrap();\n\n\n\n // println!(\n\n // \"{}\",\n\n // d1.into_iter()\n\n // .find(|d| d.target_file == pair.a.file)\n\n // .expect(\"couldn't find file\")\n\n // .to_string()\n\n // .green()\n\n // );\n\n // println!(\"{}\", pair.a.hunk_index);\n\n // println!(\n\n // \"{}\",\n\n // d2.into_iter()\n", "file_path": "src/detector.rs", "rank": 17, "score": 20574.5410265147 }, { "content": "fn winnow_line(line: Line, hunk_index: usize) -> Vec<(u64, usize, usize)> {\n\n let n = line.target_line_no.unwrap(); // should have target_line_no since it came from target_lines\n\n let hashes = winnow_str(&line.value, 10);\n\n hashes.into_iter().map(|h| (h, n, hunk_index)).collect()\n\n}\n\n\n", "file_path": "src/winnowing.rs", "rank": 18, "score": 18738.21275502831 }, { "content": "- A _fingerprint_ is a hash resulting from the application of the [winnowing][winnowing-paper] algorithm to subset (_window_) of a specific _document_. _Fingerprints_ are always associated with the _documents_ they derive from by means of their _location_.\n\n\n\n0. Given a set of repository urls, `minp`, and `maxp`\n\n1. Load the set of all _documents_\n\n - Pull all repositories\n\n - Per repository, read all commits\n\n - Per commit, read all _hunks_\n\n - Per _hunk_, do the following\n\n2. Compute the set of all _fingerprints_\n\n - Initialize an empty vector of _fingerprints_ and _location_ tuples, `fv`\n\n - Given _hunk_, perform winnowing on the text, giving a set of _fingerprints_\n\n - Per resulting _fingerprint_, construct a _location_ given the current context of repository, filename (from commit), commit, and line number (from hunk)\n\n - Store in `fv`\n\n3. Construct a reverse index on the set of _fingerprints_ `fv`\n\n - Initialize an empty nested hashmap of _fingerprint_ hash to hashmap of repository name to _location_, `fi`\n\n - Per element of `fv`, insert _fingerprint_ into `fi` using hash, repository (from _location_), and _location_ from tuple\n\n4. Prune set of _fingerprints_\n\n - Initialize an empty hashmap of `(repository, filename, commit hash)` tuple to vector of _fingerprint_ hash, `fd`\n\n - Per `key,value` in `fv`, use _fingerprint_ hash (`key`) to query index `fi`\n\n - Compute _fingerprint popularity_ `p` from length of the keys of the `value` (nested) map (the number of unique repositories corresponding to the _locations_ where this _fingerprint_ hash can be found) minus one (discounting the current repository)\n\n - If `p` < the minimum _popularity_ cutoff `minp`, there are not enough matches across the set of _documents_ for this _fingerprint_ to be interesting to identify similarity. If `p` > the maximum _popularity_ cutoff `maxp`, this _fingerprint_ is likely a language keyword, boilerplate code, or something else shared amongst almost all files/repositories.\n\n - Otherwise, insert element into `fd` using the `value` (nested) map's data to give the _location_ and the `key` _fingerprint_ hash\n\n5. Perform quadratic (pairwise) _document_ comparison\n\n - Initialize an empty output map of `(document, document)` tuple to vector of _fingerprints_, `out`\n\n - Iterate through keys in `fd` as _document_ `a`\n\n - Per document _a_, iterate through other keys in `fd` as _document_ `b`\n\n - Use `fi` to determine fingerprints existing in both `a` and `b`\n\n - Save tuple `(a,b)` and matching _fingerprint_ in map\n\n6. Return rank-ordered map `out`, sorted by the length of the value (the number of matches for a given pair of _documents_)\n\n7. Compute useful metrics from `out`\n\n\n\n\n", "file_path": "README.md", "rank": 19, "score": 12674.594428187285 }, { "content": "> falls below some user-specified threshold.\n\n\n\n> Presentation of the results is another important issue for users. Statistics\n\n> such as reporting the percentage of overlap between two documents are useful,\n\n> but not nearly as useful as actually showing the matches marked-up in the\n\n> original text. MOSS uses the fingerprints to determine where the longest\n\n> matching sequences are; in particular, if a1 in document 1 matches a2 in\n\n> document 2, and b1 in document 1 matches b2 in document 2, and furthermore a1\n\n> and b1 are consecutive in document 1 and a2 and b2 are consecutive in\n\n> document 2, then we have discovered a longer match across documents\n\n> consisting of a followed by b. While this merging of matches is easy to\n\n> implement, k-grams are naturally coarse and some of the match is usually lost\n\n> at the beginning and the end of the match. It is possible that once a pair of\n\n> similar documents are detected using fingerprinting that it would be better\n\n> to use a suffix-tree algorithm [\\[15\\]][suffix-tree] to find maximal matches\n\n> in just that pair of documents.\n\n\n", "file_path": "README.md", "rank": 20, "score": 12673.886653549542 }, { "content": "### Others\n\n#### Preprocessing\n\n> \"It does this by preprocessing the source code files, calculating a numeric\n\n> fingerprint for each file, and then performing a longest common sequence\n\n> search on the two fingerprints. The preprocessing stage replaces all function\n\n> and variable names with a single token, and removes all comments and\n\n> whitespace from the source code. The fingerprint stage calculates hash values\n\n> for windows of characters in the resulting file, preserving the lowest hash\n\n> values as the file’s fingerprint\" [Engels et al. 2007][engels-paper]\n\n\n\n### Notes\n\n- check out https://blog.tonari.no/why-we-love-rust\n\n- Btree Map for winnowing?\n\n\n\n[moss]: https://theory.stanford.edu/~aiken/moss/\n\n[engels-paper]: https://dl.acm.org/doi/pdf/10.1145/1227310.1227324\n\n[winnowing-paper]: https://theory.stanford.edu/~aiken/publications/papers/sigmod03.pdf\n\n[suffix-tree]: https://www.cs.helsinki.fi/u/ukkonen/SuffixT1withFigs.pdf\n", "file_path": "README.md", "rank": 21, "score": 12673.09902356534 }, { "content": "## Past work\n\n\n\n### Winnowing paper\n\n[Link to paper][winnowing-paper] \n\n> For this application, positional information (document and line number) is\n\n> stored with each selected fingerprint. The first step builds an index mapping\n\n> fingerprints to locations for all documents, much like the inverted index\n\n> built by search engines mapping words to positions in documents. In the\n\n> second step, each document is fingerprinted a second time and the selected\n\n> fingerprints are looked up in the index; this gives the list of all matching\n\n> fingerprints for each document. Now the list of matching fingerprints for a\n\n> document d may contain fingerprints from many different documents d1, d2,\n\n> . . .. In the next step, the list of matching fingerprints for each document\n\n> d is sorted by document and the matches for each pair of documents (d, d1\n\n> ), (d, d2 ), . . . is formed. Matches between documents are rank-ordered by\n\n> size (number of fingerprints) and the largest matches are reported to the\n\n> user. Note that up until this last step, no explicit consideration of pairs\n\n> of documents is required. This is very important, as we could not hope to\n\n> carry out copy detection by comparing each pair of documents in a large\n\n> corpus. By postponing the quadratic computation to the last step, we can\n\n> optimize it by never materializing the matching for a pair of documents if it\n", "file_path": "README.md", "rank": 22, "score": 12672.763332853616 }, { "content": "# winnow\n\n[![build](https://github.com/schuermannator/winnow/workflows/build/badge.svg?branch=master)](https://github.com/schuermannator/winnow/actions)\n\n\n\nSoftware source code similarity detection similar to [Moss][moss]. It is based\n\non incremental [winnowing][winnowing-paper] of commits.\n\n\n\n## Our approach\n\nWe desire to detect similar code between files in different git repositories over the set of all commits. That is, we wish to be able to identify if there is shared code between any two files in any two _different_ repositories across all commits in those repositories.\n\n\n\nTODO:\n\n- Since we are winnowing _hunks_ rather than files, the fingerprints will not match on the boundaries. A temporary workaround is to use small ngrams until this is more properly considered.\n\n\n\nDefinitions:\n\n- A _document_ is a set of additive changes corresponding to a hunk in a diff for a particular file in a git repository. A git repository contains a set of commits, each of which contain a number of files, themselves containing a number of _hunks_, or individual sets of changes. These additive subset of all _hunks_ in all files in all commits gives the set of all _documents_ in a repository.\n\n- A _location_ uniquely identifies the line number of a _fingerprint_ in a _document_, given by the tuple `(repository, filename, commit hash, line number)`.\n", "file_path": "README.md", "rank": 23, "score": 12672.59150882431 }, { "content": "use std::collections::hash_map::DefaultHasher;\n\nuse std::fs;\n\nuse std::hash::{Hash, Hasher};\n\nuse unidiff::{Line, PatchSet};\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct Fingerprint {\n\n pub hash: u64,\n\n pub location: Location,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]\n\npub struct Location {\n\n pub repo: String,\n\n pub patch: String,\n\n pub commit: String,\n\n pub file: String,\n\n pub hunk: usize,\n\n pub line: usize,\n\n}\n\n\n", "file_path": "src/winnowing.rs", "rank": 25, "score": 7.864928441506649 }, { "content": "use crate::winnowing::{Fingerprint, Location};\n\nuse colored::*;\n\nuse std::collections::HashMap;\n\nuse std::fs;\n\nuse unidiff::PatchSet;\n\n\n\n#[derive(Debug, PartialEq, Eq, Hash)]\n", "file_path": "src/detector.rs", "rank": 26, "score": 7.650905582011303 }, { "content": " let input = clean(\"A do run run run\");\n\n let ngrams = ngram(input.chars(), 5);\n\n assert_eq!(\n\n ngrams\n\n .map(|x| x.into_iter().collect::<String>())\n\n .collect::<Vec<String>>(),\n\n vec![\"adoru\", \"dorun\", \"orunr\", \"runru\", \"unrun\", \"nrunr\", \"runru\", \"unrun\"]\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_scheme() {\n\n let winnow_size = 50;\n\n let us = read_to_string(\"tests/data/play.scm\").unwrap();\n\n let them = read_to_string(\"tests/data/play1.scm\").unwrap();\n\n let us = winnow_str(&us, winnow_size);\n\n let them = winnow_str(&them, winnow_size);\n\n println!(\"{:?}\", us);\n\n println!(\"{:?}\", them);\n\n println!(\"{:?}\", us.len());\n", "file_path": "src/winnowing.rs", "rank": 30, "score": 3.8115751765773953 }, { "content": " for (i, doc1) in documents.clone().enumerate() {\n\n for doc2 in documents.nth(i + 1).iter() {\n\n // consider pair (doc1, doc2)\n\n dprintln!(\"({:x?}\\n{:x?})\\n\", doc1, doc2);\n\n // matched fingerprints\n\n let mut fingerprints: Vec<Fingerprint> = vec![];\n\n // get this doc's fingerprints and look them up in the index\n\n for f in doc_map.get(doc1).unwrap() {\n\n let mut match_fingerprints = inverted_index\n\n .get(&f.hash)\n\n .unwrap()\n\n .iter()\n\n .cloned()\n\n .filter(|fp| from_same_doc(doc2, fp))\n\n .filter(|fp| doc1.repo != fp.location.repo)\n\n .collect();\n\n fingerprints.append(&mut match_fingerprints);\n\n }\n\n detected_pairs.push(DetectorPair {\n\n a: doc1,\n", "file_path": "src/detector.rs", "rank": 31, "score": 3.4606005100244 }, { "content": " let mut inverted_index: HashMap<u64, Vec<Fingerprint>> = HashMap::new();\n\n for fingerprints in doc_map.values() {\n\n for f in fingerprints {\n\n let fingerprint_vec = inverted_index.entry(f.hash).or_insert_with(Vec::new);\n\n fingerprint_vec.push(f.clone());\n\n }\n\n }\n\n println!(\n\n \"Done generating inverted_index; keys: {}\",\n\n inverted_index.keys().len()\n\n );\n\n\n\n // step 2: construct map from my_locations -> matched_locations\n\n let mut location_map: HashMap<Location, Vec<Location>> = HashMap::new();\n\n for (doc, fingerprints) in &doc_map {\n\n for f in fingerprints {\n\n let matched_fingerprints = inverted_index.get(&f.hash).unwrap();\n\n let popularity = matched_fingerprints\n\n .iter()\n\n .filter(|&match_fp| match_fp.location.repo != *doc.repo)\n", "file_path": "src/detector.rs", "rank": 32, "score": 3.188470500792034 }, { "content": "// use winnow::winnowing::winnow;\n\n\n\n#[test]\n", "file_path": "tests/winnowing.rs", "rank": 33, "score": 2.2873977265479093 }, { "content": " // .find(|d| d.target_file == pair.b.file)\n\n // .expect(\"couldn't find file\")\n\n // .to_string()\n\n // .red()\n\n // );\n\n // println!(\"{}\", pair.b.hunk_index);\n\n println!(\n\n \"{}\",\n\n d1.into_iter()\n\n .find(|d| d.target_file == pair.a.file)\n\n .expect(\"couldn't find file\")\n\n .into_iter()\n\n .nth(pair.a.hunk_index)\n\n .expect(\"couldn't find hunk\")\n\n .to_string()\n\n .green()\n\n );\n\n println!(\n\n \"{}\",\n\n d2.into_iter()\n", "file_path": "src/detector.rs", "rank": 34, "score": 1.9132834462053738 }, { "content": " println!(\"{:?}\", them.len());\n\n }\n\n\n\n #[test]\n\n fn test_basic() {\n\n let input = \"A do run run run, a do run run\";\n\n let output = winnow_str(input, 5);\n\n let expected = vec![\n\n 4020085029674966483,\n\n 1468765096528618582,\n\n 1468765096528618582,\n\n 1468765096528618582,\n\n 1468765096528618582,\n\n 1468765096528618582,\n\n 2165872647979677269,\n\n 2165872647979677269,\n\n 2165872647979677269,\n\n 2880295526655702587,\n\n 7536710649711940037,\n\n 4020085029674966483,\n\n 4020085029674966483,\n\n ];\n\n assert_eq!(output, expected);\n\n }\n\n}\n", "file_path": "src/winnowing.rs", "rank": 35, "score": 1.7995879106290813 }, { "content": " .find(|d| d.target_file == pair.b.file)\n\n .expect(\"couldn't find file\")\n\n .into_iter()\n\n .nth(pair.b.hunk_index)\n\n .expect(\"couldn't find hunk\")\n\n .to_string()\n\n .red()\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "src/detector.rs", "rank": 36, "score": 1.7516295889892912 }, { "content": "#![warn(\n\n unreachable_pub,\n\n trivial_casts,\n\n trivial_numeric_casts,\n\n unused_extern_crates,\n\n rust_2018_idioms,\n\n missing_debug_implementations\n\n)]\n\n\n\nconst DEBUG: bool = false;\n\n\n\n#[macro_use]\n\nmacro_rules! dprintln {\n\n ($($arg:tt)*) => {\n\n if crate::DEBUG {\n\n println!($($arg)*)\n\n }\n\n };\n\n}\n\n\n\npub mod detector;\n\npub mod winnowing;\n", "file_path": "src/lib.rs", "rank": 37, "score": 1.3694409275449047 } ]
Rust
bindings/src/event_bindings.rs
stumptownlabs/beeper-android-seshat
2de21b9002f72ac083a143629e0ad3d806fd63ce
extern crate jni; extern crate seshat; use jni::sys::{jlong, jboolean, jstring}; use jni::JNIEnv; use jni::objects::{JObject, JString, JValue}; use seshat::{Event, EventType}; use crate::utils::*; /* * EVENT BINDINGS */ #[no_mangle] pub unsafe extern "C" fn Java_com_beeper_android_1seshat_event_Event_n_1new_1event( env: JNIEnv, _: JObject, j_event_type: jlong, j_content_value: JString, j_has_msg_type: jboolean, j_msg_type: JString, j_event_id: JString, j_sender: JString, j_server_ts: jlong, j_room_id: JString, ) -> jlong { let event_type: EventType = match j_event_type { 1 => EventType::Name, 2 => EventType::Topic, _ => EventType::Message, }; let content_value = jstring_to_string(&env, j_content_value); let msg_type_string = jstring_to_string(&env, j_msg_type); let msg_type = match j_has_msg_type { 0 => None, _ => Some(msg_type_string.as_str()) }; let event_id = jstring_to_string(&env, j_event_id); let sender = jstring_to_string(&env, j_sender); let server_ts = j_server_ts; let room_id = jstring_to_string(&env, j_room_id); let proto_event = Event::new( event_type.clone(), content_value.as_str(), msg_type, event_id.as_str(), sender.as_str(), server_ts, room_id.as_str(), "", ); let event_source = event_to_json(proto_event).unwrap(); let event = Event::new( event_type, content_value.as_str(), msg_type, event_id.as_str(), sender.as_str(), server_ts, room_id.as_str(), event_source.as_str(), ); Box::into_raw(Box::new(event)) as jlong } #[no_mangle] pub unsafe extern "C" fn Java_com_beeper_android_1seshat_event_Event_n_1free_1event( _: JNIEnv, _: JObject, event_ptr: jlong, ) { Box::from_raw(event_ptr as *mut Event); } #[no_mangle] pub unsafe extern "C" fn Java_com_beeper_android_1seshat_event_Event_n_1event_1from_1json( env: JNIEnv, _: JObject, j_event_source: JString, j_result: JObject ) { let event_source = jstring_to_string(&env,j_event_source); let result = partial_event_from_json(&event_source.as_str()); match result { Ok(event) => { let event_pointer = Box::into_raw(Box::new(event)) as jlong; let jvm_long_field_id_type = "J"; let event_ptr_field_name = "resultPtr"; env.set_field( j_result, event_ptr_field_name, jvm_long_field_id_type, JValue::from(event_pointer), ) .unwrap(); } Err(err) => { let error_message = err.to_string(); let io_error = seshat::Error::IOError(err); let error_code = seshat_error_code(io_error); let jvm_int_field_id_type = "I"; let error_code_field_name = "errorCode"; env.set_field( j_result, error_code_field_name, jvm_int_field_id_type, JValue::from(error_code), ) .unwrap(); let jvm_int_field_id_type = "Ljava/lang/String;"; let error_message_field_name = "errorMessage"; let j_error_message = env.new_string(error_message).unwrap(); let jvalue = j_error_message.into_inner(); env.set_field( j_result, error_message_field_name, jvm_int_field_id_type, JValue::from(JObject::from(jvalue)), ) .unwrap(); } }; } #[no_mangle] pub unsafe extern "C" fn Java_com_beeper_android_1seshat_event_Event_n_1get_1event_1type( _: JNIEnv, _: JObject, event_ptr: jlong, ) -> jlong { let event = Box::from_raw(event_ptr as *mut Event); let event_type = event.event_type.clone(); Box::leak(event); match event_type { EventType::Message => { 0 } EventType::Name => { 1 } EventType::Topic => { 2 } } } #[no_mangle] pub unsafe extern "C" fn Java_com_beeper_android_1seshat_event_Event_n_1get_1event_1content_1value( env: JNIEnv, _: JObject, event_ptr: jlong, ) -> jstring { let event = Box::from_raw(event_ptr as *mut Event); let content_value = event.content_value.clone(); Box::leak(event); let output = env.new_string(content_value).unwrap(); output.into_inner() } #[no_mangle] pub unsafe extern "C" fn Java_com_beeper_android_1seshat_event_Event_n_1get_1event_1id( env: JNIEnv, _: JObject, event_ptr: jlong, ) -> jstring { let event = Box::from_raw(event_ptr as *mut Event); let event_id = event.event_id.clone(); Box::leak(event); let output = env.new_string(event_id).unwrap(); output.into_inner() } #[no_mangle] pub unsafe extern "C" fn Java_com_beeper_android_1seshat_event_Event_n_1get_1event_1sender( env: JNIEnv, _: JObject, event_ptr: jlong, ) -> jstring { let event = Box::from_raw(event_ptr as *mut Event); let sender = event.sender.clone(); Box::leak(event); let output = env.new_string(sender).unwrap(); output.into_inner() } #[no_mangle] pub unsafe extern "C" fn Java_com_beeper_android_1seshat_event_Event_n_1get_1event_1server_1ts( _: JNIEnv, _: JObject, event_ptr: jlong, ) -> jlong { let event = Box::from_raw(event_ptr as *mut Event); let server_ts = event.server_ts.clone(); Box::leak(event); server_ts } #[no_mangle] pub unsafe extern "C" fn Java_com_beeper_android_1seshat_event_Event_n_1get_1event_1room_1id( env: JNIEnv, _: JObject, event_ptr: jlong, ) -> jstring { let event = Box::from_raw(event_ptr as *mut Event); let room_id = event.room_id.clone(); Box::leak(event); let output = env.new_string(room_id).unwrap(); output.into_inner() } #[no_mangle] pub unsafe extern "C" fn Java_com_beeper_android_1seshat_event_Event_n_1get_1event_1message_1type( env: JNIEnv, _: JObject, event_ptr: jlong, ) -> jstring { let event = Box::from_raw(event_ptr as *mut Event); let option = event.msgtype.clone(); let message_type = match option{ None => {String::from("")} Some(msgtype) => { msgtype } }; Box::leak(event); let output = env.new_string(message_type).unwrap(); output.into_inner() }
extern crate jni; extern crate seshat; use jni::sys::{jlong, jboolean, jstring}; use jni::JNIEnv; use jni::objects::{JObject, JString, JValue}; use seshat::{Event, EventType}; use crate::utils::*; /* * EVENT BINDINGS */ #[no_mangle] pub unsafe extern "C" fn Java_com_beeper_android_1seshat_event_Event_n_1new_1event( env: JNIEnv, _: JObject, j_event_type: jlong, j_content_value: JString, j_has_msg_type: jboolean, j_msg_type: JString, j_event_id: JString, j_sender: JString, j_server_ts: jlong, j_room_id: JString, ) -> jlong { let event_type: EventType = match j_event_type { 1 => EventType::Name, 2 => EventType::Topic, _ => EventType::Message, }; let content_value = jstring_to_string(&env, j_content_value); let msg_type_string = jstring_to_string(&env, j_msg_type); let msg_type = match j_has_msg_type { 0 => None, _ => Some(msg_type_string.as_str()) }; let event_id = jstring_to_string(&env, j_event_id); let sender = jstring_to_string(&env, j_sender); let server_ts = j_server_ts; let room_id = jstring_to_string(&env, j_room_id); let proto_event = Event::new( event_type.clone(), content_value.as_str(), msg_type, event_id.as_str(), sender.as_str(), server_ts, room_id.as_str(), "", ); let event_source = event_to_json(proto_event).unwrap(); let event = Event::new( event_type, content_value.as_str(), msg_type, event_id.as_str(), sender.as_str(), server_ts, room_id.as_str(), event_source.as_str(), ); Box::into_raw(Box::new(event)) as jlong } #[no_mangle] pub unsafe extern "C" fn Java_com_beeper_android_1seshat_event_Event_n_1free_1event( _: JNIEnv, _: JObject, event_ptr: jlong, ) { Box::from_raw(event_ptr as *mut Event); } #[no_mangle] pub unsafe extern "C" fn Java_com_beeper_android_1seshat_event_Event_n_1event_1from_1json( env: JNIEnv, _: JObject, j_event_source: JString, j_result: JObject ) { let event_source = jstring_to_string(&env,j_event_source); let result = partial_event_from_json(&event_source.as_str()); match result { Ok(event) => { let event_pointer = Box::into_raw(Box::new(event)) as jlong; let jvm_long_field_id_type = "J"; let event_ptr_field_name = "resultPtr"; env.set_field( j_result, event_ptr_field_name, jvm_long_field_id_type, JValue::from(event_pointer), ) .unwrap(); } Err(err) => { let error_message = err.to_string(); let io_error = seshat::Error::IOError(err); let error_code = seshat_error_code(io_error); let jvm_int_field_id_type = "I"; let error_code_field_name = "errorCode"; env.set_field( j_result, error_code_field_name, jvm_int_field_id_type, JValue::from(error_code), ) .unwrap(); let jvm_int_field_id_type = "Ljava/lang/String;"; let error_message_field_name = "errorMessage"; let j_error_message = env.new_string(error_message).unwrap(); let jvalue = j_error_message.into_inner(); env.set_field( j_result, error_message_field_name, jvm_int_field_id_type, JValue::from(JObject::from(jvalue)), ) .unwrap(); } }; } #[no_mangle] pub unsafe extern "C" fn Java_com_beeper_android_1seshat_event_Event_n_1get_1event_1type( _: JNIEnv, _: JObject, event_ptr: jlong, ) -> jlong { let event = Box::from_raw(event_ptr as *mut Event); let event_type = event.event_type.clone(); Box::leak(event); match event_type { EventType::Message => { 0 } EventType::Name => { 1 } EventType::Topic => { 2 } } } #[no_mangle] pub unsafe extern "C" fn Java_com_beeper_android_1seshat_event_Event_n_1get_1event_1content_1value( env: JNIEnv, _: JObject, event_ptr: jlong, ) -> jstring { let event = Box::from_raw(event_ptr as *mut Event); let content_value = event.content_value.clone(); Box::leak(event); let output = env.new_string(content_value).unwrap(); output.into_inner() } #[no_mangle] pub unsafe extern "C" fn Java_com_beeper_android_1seshat_event_Event_n_1get_1event_1id( env: JNIEnv, _: JObject, event_ptr: jlong, ) -> jstring { let event = Box::from_raw(event_ptr as *mut Event); let event_id = event.event_id.clone(); Box::leak(event); let output = env.new_string(event_id).unwrap(); output.into_inner() } #[no_mangle] pub unsafe extern "C" fn Java_com_beeper_android_1seshat_event_Event_n_1get_1event_1sender( env: JNIEnv, _: JObject, event_ptr: jlong, ) -> jstring { let event = Box::from_raw(event_ptr as *mut Event); let sender = event.sender.clone(); Box::leak(event); let output = env.new_string(sender).unwrap(); output.into_inner() } #[no_mangle] pub unsafe extern "C" fn Java_com_beeper_android_1seshat_event_Event_n_1get_1event_1server_1ts( _: JNIEnv, _: JObject, event_ptr: jlong, ) -> jlong { let event = Box::from_raw(event_ptr as *mut Event); let server_ts = event.server_ts.clone(); Box::leak(event); server_ts } #[no_mangle] pub unsafe extern "C" fn Java_com_beeper_android_1seshat_event_Event_n_1get_1event_1room_1id( env: JNIEnv, _: JObject, event_ptr: jlong, ) -> jstring { let event = Box::from_raw(event_ptr as *mut Event); let room_id = event.room_id.clone(); Box::leak(event); let output = env.new_string(room_id).unwrap(); output.into_inner() } #[no_mangle] p
{ let event = Box::from_raw(event_ptr as *mut Event); let option = event.msgtype.clone(); let message_type = match option{ None => {String::from("")} Some(msgtype) => { msgtype } }; Box::leak(event); let output = env.new_string(message_type).unwrap(); output.into_inner() }
ub unsafe extern "C" fn Java_com_beeper_android_1seshat_event_Event_n_1get_1event_1message_1type( env: JNIEnv, _: JObject, event_ptr: jlong, ) -> jstring
function_block-random_span
[ { "content": "pub fn event_to_json(event: Event) -> Result<String,serde_json::error::Error> {\n\n match serde_json::to_string(&event){\n\n Ok(json) => {\n\n Ok(json)\n\n }\n\n Err(error) => {\n\n Err(error)\n\n }\n\n }\n\n}\n\n\n", "file_path": "bindings/src/utils.rs", "rank": 0, "score": 225629.87202621577 }, { "content": "pub fn event_from_json(event_source: &str) -> std::io::Result<Event> {\n\n let object: Value = serde_json::from_str(event_source)?;\n\n let content = &object[\"content\"];\n\n let event_type = &object[\"type\"];\n\n\n\n let event_type = match event_type.as_str().unwrap_or_default() {\n\n \"m.room.message\" => EventType::Message,\n\n \"m.room.name\" => EventType::Name,\n\n \"m.room.topic\" => EventType::Topic,\n\n _ => return Err(IoError::new(ErrorKind::Other, \"Invalid event type.\")),\n\n };\n\n\n\n let (content_value, msgtype) = match event_type {\n\n EventType::Message => (\n\n content[\"body\"]\n\n .as_str()\n\n .ok_or_else(|| IoError::new(ErrorKind::Other, \"No content value found\"))?,\n\n Some(\"m.text\"),\n\n ),\n\n EventType::Topic => (\n", "file_path": "bindings/src/utils.rs", "rank": 1, "score": 224892.771220179 }, { "content": "pub fn partial_event_from_json(event_source: &str) -> std::io::Result<Event> {\n\n let object: Value = serde_json::from_str(event_source)?;\n\n let content = &object[\"content_value\"];\n\n let event_type = &object[\"event_type\"];\n\n\n\n let event_type = match event_type.as_str().unwrap_or_default() {\n\n \"Message\" => EventType::Message,\n\n \"Name\" => EventType::Name,\n\n \"Topic\" => EventType::Topic,\n\n _ => return Err(IoError::new(ErrorKind::Other, \"Invalid event type.\")),\n\n };\n\n\n\n let (content_value, msgtype) = match event_type {\n\n EventType::Message => (\n\n content.as_str()\n\n .ok_or_else(|| IoError::new(ErrorKind::Other, \"No content value found\"))?,\n\n Some(\"m.text\"),\n\n ),\n\n EventType::Topic => (\n\n content.as_str()\n", "file_path": "bindings/src/utils.rs", "rank": 2, "score": 221611.06875563212 }, { "content": "fn fake_event() -> Event {\n\n let domain: String = FreeEmailProvider(EN).fake();\n\n\n\n Event::new(\n\n EventType::Message,\n\n \"Hello world\",\n\n Some(\"m.text\"),\n\n &format!(\"${}:{}\", (0..10).fake::<u8>(), &domain),\n\n &format!(\n\n \"@{}:{}\",\n\n Username(EN).fake::<String>(),\n\n FreeEmailProvider(EN).fake::<String>()\n\n ),\n\n 151636_2244026,\n\n \"!test_room:localhost\",\n\n EVENT_SOURCE,\n\n )\n\n}\n\n\n", "file_path": "bindings/seshat/tests/integration_test.rs", "rank": 3, "score": 186042.50376670895 }, { "content": "#[test]\n\nfn delete_events() {\n\n let tmpdir = tempdir().unwrap();\n\n let mut db = Database::new(tmpdir.path()).unwrap();\n\n let profile = Profile::new(\"Alice\", \"\");\n\n\n\n db.add_event(EVENT.clone(), profile.clone());\n\n db.add_event(TOPIC_EVENT.clone(), profile);\n\n db.force_commit().unwrap();\n\n db.reload().unwrap();\n\n\n\n let searcher = db.get_searcher();\n\n let result = searcher\n\n .search(\"Test\", &SearchConfig::new())\n\n .unwrap()\n\n .results;\n\n assert_eq!(result.len(), 2);\n\n\n\n let receiver = db.delete_event(&EVENT.event_id);\n\n let result = receiver.recv().unwrap();\n\n result.unwrap();\n", "file_path": "bindings/seshat/tests/integration_test.rs", "rank": 4, "score": 159009.09638328568 }, { "content": "#[test]\n\nfn load_event() {\n\n let tmpdir = tempdir().unwrap();\n\n let db = Database::new(tmpdir.path()).unwrap();\n\n let mut profile = Profile::new(\"Alice\", \"\");\n\n\n\n let mut event = EVENT.clone();\n\n Database::save_event(&db.connection.lock().unwrap(), &mut event, &mut profile).unwrap();\n\n let events = Database::load_events(\n\n &db.connection.lock().unwrap(),\n\n &[\n\n (1.0, \"$15163622445EBvZJ:localhost\".to_string()),\n\n (0.3, \"$FAKE\".to_string()),\n\n ],\n\n 0,\n\n 0,\n\n false,\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(*EVENT.source, events[0].event_source)\n\n}\n\n\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 5, "score": 159009.09638328568 }, { "content": "#[test]\n\nfn delete_an_event() {\n\n let tmpdir = tempdir().unwrap();\n\n let mut db = Database::new(tmpdir.path()).unwrap();\n\n let profile = Profile::new(\"Alice\", \"\");\n\n\n\n db.add_event(EVENT.clone(), profile.clone());\n\n db.add_event(TOPIC_EVENT.clone(), profile);\n\n\n\n db.force_commit().unwrap();\n\n\n\n assert!(\n\n Database::load_pending_deletion_events(&db.connection.lock().unwrap())\n\n .unwrap()\n\n .is_empty()\n\n );\n\n\n\n let recv = db.delete_event(&EVENT.event_id);\n\n recv.recv().unwrap().unwrap();\n\n\n\n assert_eq!(\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 6, "score": 159009.09638328568 }, { "content": "#[test]\n\nfn store_event() {\n\n let tmpdir = tempdir().unwrap();\n\n let db = Database::new(tmpdir.path()).unwrap();\n\n let profile = Profile::new(\"Alice\", \"\");\n\n let id = Database::save_profile(\n\n &db.connection.lock().unwrap(),\n\n \"@alice.example.org\",\n\n &profile,\n\n )\n\n .unwrap();\n\n\n\n let mut event = EVENT.clone();\n\n let id = Database::save_event_helper(&db.connection.lock().unwrap(), &mut event, id).unwrap();\n\n assert_eq!(id, 1);\n\n}\n\n\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 7, "score": 159009.0963832857 }, { "content": "#[test]\n\nfn delete_an_event() {\n\n let tmpdir = TempDir::new().unwrap();\n\n let config = Config::new().set_language(&Language::English);\n\n let index = Index::new(&tmpdir, &config).unwrap();\n\n\n\n let mut writer = index.get_writer().unwrap();\n\n\n\n writer.add_event(&EVENT);\n\n writer.add_event(&TOPIC_EVENT);\n\n writer.force_commit().unwrap();\n\n index.reload().unwrap();\n\n\n\n let searcher = index.get_searcher();\n\n let result = searcher\n\n .search(\"Test\", &Default::default())\n\n .unwrap()\n\n .results;\n\n\n\n let event_id = &EVENT.event_id;\n\n\n", "file_path": "bindings/seshat/src/index/mod.rs", "rank": 8, "score": 159009.0963832857 }, { "content": "#[test]\n\nfn add_an_event() {\n\n let tmpdir = TempDir::new().unwrap();\n\n let config = Config::new().set_language(&Language::English);\n\n let index = Index::new(&tmpdir, &config).unwrap();\n\n\n\n let mut writer = index.get_writer().unwrap();\n\n\n\n writer.add_event(&EVENT);\n\n writer.force_commit().unwrap();\n\n index.reload().unwrap();\n\n\n\n let searcher = index.get_searcher();\n\n let result = searcher\n\n .search(\"Test\", &Default::default())\n\n .unwrap()\n\n .results;\n\n\n\n let event_id = EVENT.event_id.to_string();\n\n\n\n assert_eq!(result.len(), 1);\n\n assert_eq!(result[0].1, event_id)\n\n}\n\n\n", "file_path": "bindings/seshat/src/index/mod.rs", "rank": 9, "score": 159009.0963832857 }, { "content": "#[test]\n\nfn event_count() {\n\n let tmpdir = TempDir::new().unwrap();\n\n let config = Config::new().set_language(&Language::English);\n\n let index = Index::new(&tmpdir, &config).unwrap();\n\n\n\n let mut writer = index.get_writer().unwrap();\n\n\n\n assert_eq!(writer.added_events, 0);\n\n writer.add_event(&EVENT);\n\n assert_eq!(writer.added_events, 1);\n\n\n\n writer.force_commit().unwrap();\n\n assert_eq!(writer.added_events, 0);\n\n}\n\n\n", "file_path": "bindings/seshat/src/index/mod.rs", "rank": 10, "score": 159009.0963832857 }, { "content": "#[test]\n\nfn duplicate_events() {\n\n let tmpdir = tempdir().unwrap();\n\n let mut db = Database::new(tmpdir.path()).unwrap();\n\n let profile = Profile::new(\"Alice\", \"\");\n\n\n\n db.add_event(EVENT.clone(), profile.clone());\n\n db.add_event(EVENT.clone(), profile);\n\n\n\n db.force_commit().unwrap();\n\n db.reload().unwrap();\n\n\n\n let searcher = db.get_searcher();\n\n let result = searcher\n\n .search(\"Test\", &Default::default())\n\n .unwrap()\n\n .results;\n\n assert_eq!(result.len(), 1);\n\n}\n\n\n", "file_path": "bindings/seshat/tests/integration_test.rs", "rank": 11, "score": 159009.09638328568 }, { "content": "pub fn seshat_error_code(error: Error) -> i32 {\n\n match error {\n\n Error::PoolError(_) => 0,\n\n Error::DatabaseError(_) => 1,\n\n Error::IndexError(_) => 2,\n\n Error::FsError(_) => 3,\n\n Error::IOError(_) => 4,\n\n Error::DatabaseUnlockError(_) => 5,\n\n Error::DatabaseVersionError => 6,\n\n Error::DatabaseOpenError(_) => 7,\n\n Error::SqlCipherError(_) => 8,\n\n Error::ReindexError => 9,\n\n }\n\n}\n\n\n", "file_path": "bindings/src/utils.rs", "rank": 12, "score": 157358.89155442256 }, { "content": "#[test]\n\nfn store_event_and_profile() {\n\n let tmpdir = tempdir().unwrap();\n\n let db = Database::new(tmpdir.path()).unwrap();\n\n let mut profile = Profile::new(\"Alice\", \"\");\n\n let mut event = EVENT.clone();\n\n Database::save_event(&db.connection.lock().unwrap(), &mut event, &mut profile).unwrap();\n\n}\n\n\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 13, "score": 155616.87345346005 }, { "content": "#[test]\n\nfn load_file_events() {\n\n let tmpdir = tempdir().unwrap();\n\n let mut db = Database::new(tmpdir.path()).unwrap();\n\n let profile = Profile::new(\"Alice\", \"\");\n\n\n\n db.add_event(EVENT.clone(), profile.clone());\n\n db.add_event(FILE_EVENT.clone(), profile.clone());\n\n db.add_event(IMAGE_EVENT.clone(), profile);\n\n db.force_commit().unwrap();\n\n db.reload().unwrap();\n\n\n\n let connection = db.get_connection().unwrap();\n\n\n\n let mut config = LoadConfig::new(&FILE_EVENT.room_id).limit(10);\n\n\n\n let result = connection\n\n .load_file_events(&config)\n\n .expect(\"Can't load file events\");\n\n assert!(!result.is_empty());\n\n assert!(result.len() == 2);\n", "file_path": "bindings/seshat/tests/integration_test.rs", "rank": 14, "score": 155616.87345346002 }, { "content": "#[test]\n\nfn add_differing_events() {\n\n let tmpdir = tempdir().unwrap();\n\n let mut db = Database::new(tmpdir.path()).unwrap();\n\n let profile = Profile::new(\"Alice\", \"\");\n\n\n\n db.add_event(EVENT.clone(), profile.clone());\n\n db.add_event(TOPIC_EVENT.clone(), profile);\n\n db.force_commit().unwrap();\n\n db.reload().unwrap();\n\n\n\n let searcher = db.get_searcher();\n\n let result = searcher\n\n .search(\"Test\", &SearchConfig::new())\n\n .unwrap()\n\n .results;\n\n assert_eq!(result.len(), 2);\n\n}\n\n\n", "file_path": "bindings/seshat/tests/integration_test.rs", "rank": 15, "score": 155616.87345346002 }, { "content": "#[test]\n\nfn save_the_event_multithreaded() {\n\n let tmpdir = tempdir().unwrap();\n\n let mut db = Database::new(tmpdir.path()).unwrap();\n\n let profile = Profile::new(\"Alice\", \"\");\n\n\n\n db.add_event(EVENT.clone(), profile);\n\n db.commit().unwrap();\n\n db.reload().unwrap();\n\n\n\n let events = Database::load_events(\n\n &db.connection.lock().unwrap(),\n\n &[\n\n (1.0, \"$15163622445EBvZJ:localhost\".to_string()),\n\n (0.3, \"$FAKE\".to_string()),\n\n ],\n\n 0,\n\n 0,\n\n false,\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(*EVENT.source, events[0].event_source)\n\n}\n\n\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 16, "score": 155616.87345346002 }, { "content": "#[test]\n\nfn create_event_db() {\n\n let tmpdir = tempdir().unwrap();\n\n let _db = Database::new(tmpdir.path()).unwrap();\n\n}\n\n\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 17, "score": 155616.87345346002 }, { "content": "#[test]\n\nfn load_event_context() {\n\n let tmpdir = tempdir().unwrap();\n\n let mut db = Database::new(tmpdir.path()).unwrap();\n\n let profile = Profile::new(\"Alice\", \"\");\n\n\n\n db.add_event(EVENT.clone(), profile.clone());\n\n\n\n let mut before_event = None;\n\n\n\n for i in 1..6 {\n\n let mut event: Event = Faker.fake();\n\n event.server_ts = EVENT.server_ts - i;\n\n event.source = format!(\"Hello before event {}\", i);\n\n\n\n if before_event.is_none() {\n\n before_event = Some(event.clone());\n\n }\n\n\n\n db.add_event(event, profile.clone());\n\n }\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 18, "score": 155616.87345346002 }, { "content": "#[test]\n\nfn save_and_search_historic_events() {\n\n let tmpdir = tempdir().unwrap();\n\n let db = Database::new(tmpdir.path()).unwrap();\n\n let profile = Profile::new(\"Alice\", \"\");\n\n\n\n let mut events = Vec::new();\n\n\n\n for i in 1..6 {\n\n let mut event: Event = fake_event();\n\n event.server_ts = EVENT.server_ts - i;\n\n event.source = format!(\"Hello before event {}\", i);\n\n events.push((event, profile.clone()));\n\n }\n\n\n\n let checkpoint = CrawlerCheckpoint {\n\n room_id: \"!test:room\".to_string(),\n\n token: \"1234\".to_string(),\n\n full_crawl: false,\n\n direction: CheckpointDirection::Backwards,\n\n };\n\n\n\n let receiver = db.add_historic_events(events, Some(checkpoint.clone()), None);\n\n let ret = receiver.recv().unwrap();\n\n assert!(ret.is_ok());\n\n let connection = db.get_connection().unwrap();\n\n\n\n let checkpoints = connection.load_checkpoints().unwrap();\n\n assert!(checkpoints.contains(&checkpoint));\n\n}\n\n\n", "file_path": "bindings/seshat/tests/integration_test.rs", "rank": 19, "score": 152393.36880142623 }, { "content": "#[test]\n\nfn load_file_events_directions() {\n\n let tmpdir = tempdir().unwrap();\n\n let mut db = Database::new(tmpdir.path()).unwrap();\n\n let profile = Profile::new(\"Alice\", \"\");\n\n\n\n db.add_event(EVENT.clone(), profile.clone());\n\n db.add_event(FILE_EVENT.clone(), profile.clone());\n\n db.add_event(IMAGE_EVENT.clone(), profile.clone());\n\n db.add_event(VIDEO_EVENT.clone(), profile);\n\n db.force_commit().unwrap();\n\n db.reload().unwrap();\n\n\n\n let connection = db.get_connection().unwrap();\n\n\n\n // Get the newest event.\n\n let mut config = LoadConfig::new(&FILE_EVENT.room_id).limit(1);\n\n let result = connection.load_file_events(&config).unwrap();\n\n\n\n assert_eq!(result.len(), 1);\n\n assert_eq!(result[0].0, VIDEO_EVENT.source);\n", "file_path": "bindings/seshat/tests/integration_test.rs", "rank": 20, "score": 152393.36880142623 }, { "content": "#[test]\n\nfn add_events_to_differing_rooms() {\n\n let tmpdir = TempDir::new().unwrap();\n\n let config = Config::new().set_language(&Language::English);\n\n let index = Index::new(&tmpdir, &config).unwrap();\n\n\n\n let event_id = EVENT.event_id.to_string();\n\n let mut writer = index.get_writer().unwrap();\n\n\n\n let mut event2 = EVENT.clone();\n\n event2.room_id = \"!Test2:room\".to_string();\n\n\n\n writer.add_event(&EVENT);\n\n writer.add_event(&event2);\n\n\n\n writer.force_commit().unwrap();\n\n index.reload().unwrap();\n\n\n\n let searcher = index.get_searcher();\n\n let result = searcher\n\n .search(\"Test\", &SearchConfig::new().for_room(&EVENT.room_id))\n", "file_path": "bindings/seshat/src/index/mod.rs", "rank": 21, "score": 152393.36880142623 }, { "content": "#[test]\n\nfn add_events_with_null_byte() {\n\n let event_source: &str = r#\"{\n\n \"content\": {\n\n \"body\": \"\\u00000\",\n\n \"msgtype\": \"m.text\"\n\n },\n\n \"event_id\": \"$15163622448EBvZJ:localhost\",\n\n \"origin_server_ts\": 1516362244050,\n\n \"sender\": \"@example2:localhost\",\n\n \"type\": \"m.room.message\",\n\n \"unsigned\": {\"age\": 43289803098},\n\n \"user_id\": \"@example2:localhost\",\n\n \"age\": 43289803098,\n\n \"room_id\": \"!test:example.org\"\n\n }\"#;\n\n\n\n let event = RecoveryDatabase::event_from_json(event_source).unwrap();\n\n\n\n let tmpdir = tempdir().unwrap();\n\n let db = Database::new(tmpdir.path()).unwrap();\n\n let profile = Profile::new(\"Alice\", &event.content_value);\n\n\n\n let events = vec![(event, profile)];\n\n db.add_historic_events(events, None, None)\n\n .recv()\n\n .unwrap()\n\n .expect(\"Event should be added\");\n\n}\n\n\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 22, "score": 152393.36880142623 }, { "content": "pub fn recv_error_code() -> i32 {\n\n 10\n\n}\n\n\n\npub unsafe fn j_object_to_checkpoint_option(env: &JNIEnv, j_native_option_checkpoint: JObject) -> Option<CrawlerCheckpoint> {\n\n let has_new_checkpoint_j_value = env.call_method(j_native_option_checkpoint, \"hasSome\", \"()Z\", &[]).unwrap();\n\n let has_new_checkpoint = has_new_checkpoint_j_value.z().unwrap();\n\n match has_new_checkpoint {\n\n true => {\n\n let checkpoint_ptr_j_value = env.call_method(j_native_option_checkpoint, \"getValue\", \"()J\", &[]).unwrap();\n\n let checkpoint_ptr = checkpoint_ptr_j_value.j().unwrap();\n\n let checkpoint = Box::from_raw(checkpoint_ptr as *mut CrawlerCheckpoint);\n\n let option = Some((*checkpoint).clone());\n\n Box::leak(checkpoint);\n\n option\n\n },\n\n false => None,\n\n }\n\n}\n\n\n", "file_path": "bindings/src/utils.rs", "rank": 23, "score": 144843.1290914456 }, { "content": "#[test]\n\nfn is_empty() {\n\n let tmpdir = tempdir().unwrap();\n\n let mut db = Database::new(tmpdir.path()).unwrap();\n\n let connection = db.get_connection().unwrap();\n\n assert!(connection.is_empty().unwrap());\n\n\n\n let profile = Profile::new(\"Alice\", \"\");\n\n db.add_event(EVENT.clone(), profile);\n\n db.commit().unwrap();\n\n assert!(!connection.is_empty().unwrap());\n\n}\n\n\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 24, "score": 124535.8497786024 }, { "content": "#[test]\n\nfn delete() {\n\n let tmpdir = tempdir().unwrap();\n\n let path: &Path = tmpdir.path();\n\n\n\n assert!(path.exists());\n\n\n\n let db = Database::new(tmpdir.path()).unwrap();\n\n db.delete().unwrap();\n\n\n\n assert!(!path.exists());\n\n}\n\n\n", "file_path": "bindings/seshat/tests/integration_test.rs", "rank": 25, "score": 124535.8497786024 }, { "content": "#[cfg(feature = \"encryption\")]\n\n#[test]\n\nfn encrypted_db() {\n\n let tmpdir = tempdir().unwrap();\n\n let db_config = Config::new().set_passphrase(\"test\");\n\n let mut db = match Database::new_with_config(tmpdir.path(), &db_config) {\n\n Ok(db) => db,\n\n Err(e) => panic!(\"Coulnd't open encrypted database {}\", e),\n\n };\n\n\n\n let connection = match db.get_connection() {\n\n Ok(c) => c,\n\n Err(e) => panic!(\"Could not get database connection {}\", e),\n\n };\n\n\n\n assert!(\n\n connection.is_empty().unwrap(),\n\n \"New database should be empty\"\n\n );\n\n\n\n let profile = Profile::new(\"Alice\", \"\");\n\n db.add_event(EVENT.clone(), profile);\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 26, "score": 121957.01745193911 }, { "content": "#[test]\n\nfn resume_committing() {\n\n let tmpdir = tempdir().unwrap();\n\n let mut db = Database::new(tmpdir.path()).unwrap();\n\n let profile = Profile::new(\"Alice\", \"\");\n\n\n\n // Check that we don't have any uncommitted events.\n\n assert!(\n\n Database::load_uncommitted_events(&db.connection.lock().unwrap())\n\n .unwrap()\n\n .is_empty()\n\n );\n\n\n\n db.add_event(EVENT.clone(), profile);\n\n db.commit().unwrap();\n\n db.reload().unwrap();\n\n\n\n // Now we do have uncommitted events.\n\n assert!(\n\n !Database::load_uncommitted_events(&db.connection.lock().unwrap())\n\n .unwrap()\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 27, "score": 121957.01745193911 }, { "content": "#[test]\n\nfn user_version() {\n\n let tmpdir = tempdir().unwrap();\n\n let db = Database::new(tmpdir.path()).unwrap();\n\n let connection = db.get_connection().unwrap();\n\n\n\n assert_eq!(connection.get_user_version().unwrap(), 0);\n\n connection.set_user_version(10).unwrap();\n\n assert_eq!(connection.get_user_version().unwrap(), 10);\n\n}\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 28, "score": 121957.01745193911 }, { "content": "#[test]\n\nfn switch_languages() {\n\n let tmpdir = TempDir::new().unwrap();\n\n let config = Config::new().set_language(&Language::English);\n\n let index = Index::new(&tmpdir, &config).unwrap();\n\n\n\n let mut writer = index.get_writer().unwrap();\n\n\n\n writer.add_event(&EVENT);\n\n writer.force_commit().unwrap();\n\n index.reload().unwrap();\n\n\n\n let searcher = index.get_searcher();\n\n let result = searcher\n\n .search(\"Test\", &Default::default())\n\n .unwrap()\n\n .results;\n\n\n\n let event_id = EVENT.event_id.to_string();\n\n\n\n assert_eq!(result.len(), 1);\n\n assert_eq!(result[0].1, event_id);\n\n\n\n drop(index);\n\n\n\n let config = Config::new().set_language(&Language::German);\n\n let index = Index::new(&tmpdir, &config);\n\n\n\n assert!(index.is_err())\n\n}\n\n\n", "file_path": "bindings/seshat/src/index/mod.rs", "rank": 29, "score": 121957.01745193911 }, { "content": "#[test]\n\nfn japanese_tokenizer() {\n\n let tmpdir = TempDir::new().unwrap();\n\n let config = Config::new().set_language(&Language::Japanese);\n\n let index = Index::new(&tmpdir, &config).unwrap();\n\n\n\n let mut writer = index.get_writer().unwrap();\n\n\n\n for event in JAPANESE_EVENTS.iter() {\n\n writer.add_event(event);\n\n }\n\n\n\n writer.force_commit().unwrap();\n\n index.reload().unwrap();\n\n\n\n let searcher = index.get_searcher();\n\n let result = searcher\n\n .search(\"伝説\", &Default::default())\n\n .unwrap()\n\n .results;\n\n\n\n let event_id = JAPANESE_EVENTS[1].event_id.to_string();\n\n\n\n assert_eq!(result.len(), 1);\n\n assert_eq!(result[0].1, event_id);\n\n}\n\n\n", "file_path": "bindings/seshat/src/index/mod.rs", "rank": 30, "score": 121957.01745193911 }, { "content": "#[test]\n\nfn create_db() {\n\n let tmpdir = tempdir().unwrap();\n\n let _db = Database::new(tmpdir.path()).unwrap();\n\n}\n\n\n", "file_path": "bindings/seshat/tests/integration_test.rs", "rank": 31, "score": 121957.01745193911 }, { "content": "#[test]\n\nfn is_room_indexed() {\n\n let tmpdir = tempdir().unwrap();\n\n let mut db = Database::new(tmpdir.path()).unwrap();\n\n\n\n let connection = db.get_connection().unwrap();\n\n\n\n assert!(connection.is_empty().unwrap());\n\n assert!(!connection.is_room_indexed(\"!test_room:localhost\").unwrap());\n\n\n\n let profile = Profile::new(\"Alice\", \"\");\n\n db.add_event(EVENT.clone(), profile);\n\n db.force_commit().unwrap();\n\n\n\n assert!(connection.is_room_indexed(\"!test_room:localhost\").unwrap());\n\n assert!(!connection.is_room_indexed(\"!test_room2:localhost\").unwrap());\n\n}\n\n\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 32, "score": 121957.01745193911 }, { "content": "#[cfg(feature = \"encryption\")]\n\n#[test]\n\nfn change_passphrase() {\n\n let tmpdir = tempdir().unwrap();\n\n let db_config = Config::new().set_passphrase(\"test\");\n\n let mut db = match Database::new_with_config(tmpdir.path(), &db_config) {\n\n Ok(db) => db,\n\n Err(e) => panic!(\"Coulnd't open encrypted database {}\", e),\n\n };\n\n\n\n let connection = db\n\n .get_connection()\n\n .expect(\"Could not get database connection\");\n\n assert!(\n\n connection.is_empty().unwrap(),\n\n \"New database should be empty\"\n\n );\n\n\n\n let profile = Profile::new(\"Alice\", \"\");\n\n db.add_event(EVENT.clone(), profile);\n\n\n\n db.commit().expect(\"Could not commit events to database\");\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 33, "score": 121957.01745193911 }, { "content": "#[test]\n\nfn paginated_search() {\n\n let tmpdir = TempDir::new().unwrap();\n\n let config = Config::new().set_language(&Language::English);\n\n let index = Index::new(&tmpdir, &config).unwrap();\n\n\n\n let mut writer = index.get_writer().unwrap();\n\n\n\n writer.add_event(&EVENT);\n\n writer.add_event(&TOPIC_EVENT);\n\n writer.force_commit().unwrap();\n\n index.reload().unwrap();\n\n\n\n let searcher = index.get_searcher();\n\n let first_search = searcher\n\n .search(\"Test\", SearchConfig::new().limit(1))\n\n .unwrap();\n\n\n\n assert_eq!(first_search.results.len(), 1);\n\n\n\n let second_search = searcher\n", "file_path": "bindings/seshat/src/index/mod.rs", "rank": 34, "score": 121957.01745193911 }, { "content": "#[test]\n\nfn load_a_profile() {\n\n let tmpdir = tempdir().unwrap();\n\n let db = Database::new(tmpdir.path()).unwrap();\n\n\n\n let profile = Profile::new(\"Alice\", \"\");\n\n let user_id = \"@alice.example.org\";\n\n let profile_id =\n\n Database::save_profile(&db.connection.lock().unwrap(), user_id, &profile).unwrap();\n\n\n\n let loaded_profile =\n\n Database::load_profile(&db.connection.lock().unwrap(), profile_id).unwrap();\n\n\n\n assert_eq!(profile, loaded_profile);\n\n}\n\n\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 35, "score": 121957.01745193911 }, { "content": "#[test]\n\nfn stats_getting() {\n\n let tmpdir = tempdir().unwrap();\n\n let mut db = Database::new(tmpdir.path()).unwrap();\n\n let profile = Profile::new(\"Alice\", \"\");\n\n\n\n for i in 0..1000 {\n\n let mut event: Event = Faker.fake();\n\n event.server_ts += i;\n\n db.add_event(event, profile.clone());\n\n }\n\n\n\n db.commit().unwrap();\n\n\n\n let connection = db.get_connection().unwrap();\n\n\n\n let stats = connection.get_stats().unwrap();\n\n\n\n assert_eq!(stats.event_count, 1000);\n\n assert_eq!(stats.room_count, 1);\n\n assert!(stats.size > 0);\n\n}\n\n\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 36, "score": 121957.01745193911 }, { "content": "#[test]\n\nfn save_and_search() {\n\n let tmpdir = tempdir().unwrap();\n\n let mut db = Database::new(tmpdir.path()).unwrap();\n\n let profile = Profile::new(\"Alice\", \"\");\n\n\n\n db.add_event(EVENT.clone(), profile);\n\n db.force_commit().unwrap();\n\n db.reload().unwrap();\n\n\n\n let result = db.search(\"Test\", &Default::default()).unwrap().results;\n\n assert!(!result.is_empty());\n\n assert_eq!(result[0].event_source, EVENT.source);\n\n}\n\n\n", "file_path": "bindings/seshat/tests/integration_test.rs", "rank": 37, "score": 121957.01745193911 }, { "content": "#[test]\n\nfn commit_a_write() {\n\n let tmpdir = tempdir().unwrap();\n\n let mut db = Database::new(tmpdir.path()).unwrap();\n\n db.commit().unwrap();\n\n}\n\n\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 38, "score": 121957.01745193911 }, { "content": "#[test]\n\nfn store_profile() {\n\n let tmpdir = tempdir().unwrap();\n\n let db = Database::new(tmpdir.path()).unwrap();\n\n\n\n let profile = Profile::new(\"Alice\", \"\");\n\n\n\n let id = Database::save_profile(\n\n &db.connection.lock().unwrap(),\n\n \"@alice.example.org\",\n\n &profile,\n\n );\n\n assert_eq!(id.unwrap(), 1);\n\n\n\n let id = Database::save_profile(\n\n &db.connection.lock().unwrap(),\n\n \"@alice.example.org\",\n\n &profile,\n\n );\n\n assert_eq!(id.unwrap(), 1);\n\n\n\n let profile_new = Profile::new(\"Alice\", \"mxc://some_url\");\n\n\n\n let id = Database::save_profile(\n\n &db.connection.lock().unwrap(),\n\n \"@alice.example.org\",\n\n &profile_new,\n\n );\n\n assert_eq!(id.unwrap(), 2);\n\n}\n\n\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 39, "score": 121957.01745193911 }, { "content": "#[test]\n\nfn delete_uncommitted() {\n\n let tmpdir = tempdir().unwrap();\n\n let mut db = Database::new(tmpdir.path()).unwrap();\n\n let profile = Profile::new(\"Alice\", \"\");\n\n\n\n for i in 1..1000 {\n\n let mut event: Event = Faker.fake();\n\n event.server_ts += i;\n\n db.add_event(event, profile.clone());\n\n\n\n if i % 100 == 0 {\n\n db.commit().unwrap();\n\n }\n\n }\n\n\n\n db.force_commit().unwrap();\n\n assert!(\n\n Database::load_uncommitted_events(&db.connection.lock().unwrap())\n\n .unwrap()\n\n .is_empty()\n\n );\n\n}\n\n\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 40, "score": 121957.01745193911 }, { "content": "#[test]\n\nfn get_size() {\n\n let tmpdir = tempdir().unwrap();\n\n let mut db = Database::new(tmpdir.path()).unwrap();\n\n\n\n let profile = Profile::new(\"Alice\", \"\");\n\n\n\n db.add_event(EVENT.clone(), profile.clone());\n\n\n\n let mut before_event = None;\n\n\n\n for i in 1..6 {\n\n let mut event: Event = fake_event();\n\n event.server_ts = EVENT.server_ts - i;\n\n event.source = format!(\"Hello before event {}\", i);\n\n\n\n if before_event.is_none() {\n\n before_event = Some(event.clone());\n\n }\n\n\n\n db.add_event(event, profile.clone());\n\n }\n\n db.force_commit().unwrap();\n\n assert!(db.get_size().unwrap() > 0);\n\n}\n\n\n", "file_path": "bindings/seshat/tests/integration_test.rs", "rank": 41, "score": 121957.01745193911 }, { "content": "#[test]\n\nfn search_with_specific_key() {\n\n let tmpdir = tempdir().unwrap();\n\n let mut db = Database::new(tmpdir.path()).unwrap();\n\n let profile = Profile::new(\"Alice\", \"\");\n\n let searcher = db.get_searcher();\n\n\n\n db.add_event(EVENT.clone(), profile.clone());\n\n db.force_commit().unwrap();\n\n db.reload().unwrap();\n\n\n\n let result = searcher\n\n .search(\"Test\", &SearchConfig::new().with_key(EventType::Topic))\n\n .unwrap()\n\n .results;\n\n assert!(result.is_empty());\n\n\n\n db.add_event(TOPIC_EVENT.clone(), profile);\n\n db.force_commit().unwrap();\n\n db.reload().unwrap();\n\n\n\n let searcher = db.get_searcher();\n\n let result = searcher\n\n .search(\"Test\", &SearchConfig::new().with_key(EventType::Topic))\n\n .unwrap()\n\n .results;\n\n assert_eq!(result.len(), 1);\n\n assert_eq!(result[0].event_source, TOPIC_EVENT.source)\n\n}\n\n\n", "file_path": "bindings/seshat/tests/integration_test.rs", "rank": 42, "score": 119509.71900223337 }, { "content": "#[test]\n\nfn database_upgrade_v1_2() {\n\n let mut path = PathBuf::from(file!());\n\n path.pop();\n\n path.pop();\n\n path.pop();\n\n path.push(\"data/database/v1_2\");\n\n let db = Database::new(&path);\n\n match db {\n\n Ok(_) => panic!(\"Database doesn't need a reindex.\"),\n\n Err(e) => match e {\n\n Error::ReindexError => (),\n\n e => panic!(\"Database doesn't need a reindex: {}\", e),\n\n },\n\n }\n\n\n\n let mut recovery_db = RecoveryDatabase::new(&path).expect(\"Can't open recovery db\");\n\n\n\n recovery_db.delete_the_index().unwrap();\n\n recovery_db.open_index().unwrap();\n\n\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 43, "score": 119509.71900223337 }, { "content": "#[test]\n\nfn store_empty_profile() {\n\n let tmpdir = tempdir().unwrap();\n\n let db = Database::new(tmpdir.path()).unwrap();\n\n\n\n let profile = Profile {\n\n displayname: None,\n\n avatar_url: None,\n\n };\n\n let id = Database::save_profile(\n\n &db.connection.lock().unwrap(),\n\n \"@alice.example.org\",\n\n &profile,\n\n );\n\n assert_eq!(id.unwrap(), 1);\n\n}\n\n\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 44, "score": 119509.71900223337 }, { "content": "#[test]\n\nfn save_and_load_checkpoints() {\n\n let tmpdir = tempdir().unwrap();\n\n let db = Database::new(tmpdir.path()).unwrap();\n\n\n\n let checkpoint = CrawlerCheckpoint {\n\n room_id: \"!test:room\".to_string(),\n\n token: \"1234\".to_string(),\n\n full_crawl: false,\n\n direction: CheckpointDirection::Backwards,\n\n };\n\n\n\n let mut connection = db.get_connection().unwrap();\n\n let transaction = connection.transaction().unwrap();\n\n\n\n Database::replace_crawler_checkpoint(&transaction, Some(&checkpoint), None).unwrap();\n\n transaction.commit().unwrap();\n\n\n\n let checkpoints = connection.load_checkpoints().unwrap();\n\n\n\n println!(\"{:?}\", checkpoints);\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 45, "score": 119509.71900223337 }, { "content": "#[test]\n\nfn database_upgrade_v1() {\n\n let mut path = PathBuf::from(file!());\n\n path.pop();\n\n path.pop();\n\n path.pop();\n\n path.push(\"data/database/v1\");\n\n let db = Database::new(path);\n\n\n\n // Sadly the v1 database has invalid json in the source field, reindexing it\n\n // won't be possible. Let's check that it's marked for a reindex.\n\n match db {\n\n Ok(_) => panic!(\"Database doesn't need a reindex.\"),\n\n Err(e) => match e {\n\n Error::ReindexError => (),\n\n e => panic!(\"Database doesn't need a reindex: {}\", e),\n\n },\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nuse crate::database::recovery::test::reindex_loop;\n\n\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 46, "score": 119509.71900223337 }, { "content": "#[cfg(feature = \"encryption\")]\n\n#[test]\n\nfn encrypted_save_and_search() {\n\n let tmpdir = tempdir().unwrap();\n\n let db_config = Config::new().set_passphrase(\"wordpass\");\n\n let mut db = Database::new_with_config(tmpdir.path(), &db_config).unwrap();\n\n let profile = Profile::new(\"Alice\", \"\");\n\n\n\n db.add_event(EVENT.clone(), profile);\n\n db.force_commit().unwrap();\n\n db.reload().unwrap();\n\n\n\n let result = db.search(\"Test\", &Default::default()).unwrap().results;\n\n assert!(!result.is_empty());\n\n assert_eq!(result[0].event_source, EVENT.source);\n\n}\n\n\n", "file_path": "bindings/seshat/tests/integration_test.rs", "rank": 47, "score": 119509.71900223337 }, { "content": "#[test]\n\nfn duplicate_empty_profiles() {\n\n let tmpdir = tempdir().unwrap();\n\n let db = Database::new(tmpdir.path()).unwrap();\n\n let profile = Profile {\n\n displayname: None,\n\n avatar_url: None,\n\n };\n\n let user_id = \"@alice.example.org\";\n\n\n\n let first_id =\n\n Database::save_profile(&db.connection.lock().unwrap(), user_id, &profile).unwrap();\n\n let second_id =\n\n Database::save_profile(&db.connection.lock().unwrap(), user_id, &profile).unwrap();\n\n\n\n assert_eq!(first_id, second_id);\n\n\n\n let connection = db.connection.lock().unwrap();\n\n\n\n let mut stmt = connection\n\n .prepare(\"SELECT id FROM profile WHERE user_id=?1\")\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 48, "score": 119509.71900223337 }, { "content": "#[test]\n\nfn change_passphrase() {\n\n let tmpdir = tempdir().unwrap();\n\n let dir = EncryptedMmapDirectory::open_or_create(tmpdir.path(), \"wordpass\", PBKDF_COUNT)\n\n .expect(\"Can't create a new store\");\n\n\n\n drop(dir);\n\n EncryptedMmapDirectory::change_passphrase(tmpdir.path(), \"wordpass\", \"password\", PBKDF_COUNT)\n\n .expect(\"Can't change passphrase\");\n\n let dir = EncryptedMmapDirectory::open(tmpdir.path(), \"wordpass\");\n\n assert!(\n\n dir.is_err(),\n\n \"Opened an existing store with the old passphrase\"\n\n );\n\n let _ = EncryptedMmapDirectory::open(tmpdir.path(), \"password\")\n\n .expect(\"Can't open the store with the new passphrase\");\n\n}\n", "file_path": "bindings/seshat/src/index/encrypted_dir.rs", "rank": 49, "score": 119509.71900223337 }, { "content": "#[test]\n\nfn enc_unaligned() {\n\n let orig = [0u8; 16];\n\n let key = [0u8; 16];\n\n let hmac_key = [0u8; 16];\n\n\n\n let mut enc = Vec::new();\n\n {\n\n let mut aes =\n\n AesWriter::<Aes128Ctr, Hmac<Sha256>, _>::new(&mut enc, &key, &hmac_key, 16).unwrap();\n\n for chunk in orig.chunks(3) {\n\n aes.write_all(&chunk).unwrap();\n\n }\n\n }\n\n let dec = decrypt(Cursor::new(&enc));\n\n assert_eq!(dec, &orig);\n\n}\n\n\n", "file_path": "bindings/seshat/src/index/encrypted_stream.rs", "rank": 50, "score": 119509.71900223337 }, { "content": "#[test]\n\nfn enc_dec_single() {\n\n let orig = [0u8; 16];\n\n let enc = encrypt(&orig);\n\n let dec = decrypt(Cursor::new(&enc));\n\n assert_eq!(dec, &orig);\n\n}\n\n\n", "file_path": "bindings/seshat/src/index/encrypted_stream.rs", "rank": 51, "score": 117184.14132036 }, { "content": "#[test]\n\nfn dec_read_unaligned() {\n\n let orig = [0u8; 16];\n\n let enc = encrypt(&orig);\n\n\n\n let key = [0u8; 16];\n\n let mut dec: Vec<u8> = Vec::new();\n\n let mut aes =\n\n AesReader::<Aes128Ctr, _>::new::<Hmac<Sha256>>(Cursor::new(&enc), &key, &key, 16, 32)\n\n .unwrap();\n\n loop {\n\n let mut buf = [0u8; 3];\n\n let read = aes.read(&mut buf).unwrap();\n\n dec.extend(&buf[..read]);\n\n if read == 0 {\n\n break;\n\n }\n\n }\n\n assert_eq!(dec, &orig);\n\n}\n", "file_path": "bindings/seshat/src/index/encrypted_stream.rs", "rank": 52, "score": 117184.14132036 }, { "content": "#[test]\n\nfn enc_dec_single_full() {\n\n let orig = [0u8; 16];\n\n let enc = encrypt(&orig);\n\n let dec = decrypt(Cursor::new(&enc));\n\n assert_eq!(dec, &orig);\n\n}\n\n\n", "file_path": "bindings/seshat/src/index/encrypted_stream.rs", "rank": 53, "score": 114971.42375615417 }, { "content": "#[test]\n\nfn create_store_with_empty_passphrase() {\n\n let tmpdir = tempdir().unwrap();\n\n let dir = EncryptedMmapDirectory::open(tmpdir.path(), \"\");\n\n assert!(\n\n dir.is_err(),\n\n \"Opened an existing store with the wrong passphrase\"\n\n );\n\n}\n\n\n", "file_path": "bindings/seshat/src/index/encrypted_dir.rs", "rank": 54, "score": 114971.42375615417 }, { "content": "#[test]\n\nfn create_new_store_and_reopen() {\n\n let tmpdir = tempdir().unwrap();\n\n let dir = EncryptedMmapDirectory::open_or_create(tmpdir.path(), \"wordpass\", PBKDF_COUNT)\n\n .expect(\"Can't create a new store\");\n\n drop(dir);\n\n let dir = EncryptedMmapDirectory::open(tmpdir.path(), \"wordpass\")\n\n .expect(\"Can't open the existing store\");\n\n drop(dir);\n\n let dir = EncryptedMmapDirectory::open(tmpdir.path(), \"password\");\n\n assert!(\n\n dir.is_err(),\n\n \"Opened an existing store with the wrong passphrase\"\n\n );\n\n}\n\n\n", "file_path": "bindings/seshat/src/index/encrypted_dir.rs", "rank": 55, "score": 114971.42375615417 }, { "content": "class NativeSerializedEventsResult{\n\n val array = mutableListOf<String>()\n\n fun add(serializedEvent:String){\n\n array.add(serializedEvent)\n\n }\n\n}", "file_path": "android-seshat/src/main/java/com/beeper/android_seshat/event/NativeSerializedEventsResult.kt", "rank": 56, "score": 114417.06664995679 }, { "content": "package com.beeper.android_seshat.event\n\n\n", "file_path": "android-seshat/src/main/java/com/beeper/android_seshat/event/NativeSerializedEventsResult.kt", "rank": 57, "score": 104558.06037299341 }, { "content": "type WriterRet = (JoinHandle<()>, Sender<ThreadMessage>);\n\n\n\nimpl Database {\n\n /// Create a new Seshat database or open an existing one.\n\n /// # Arguments\n\n ///\n\n /// * `path` - The directory where the database will be stored in. This\n\n /// should be an empty directory if a new database should be created.\n\n pub fn new<P: AsRef<Path>>(path: P) -> Result<Database>\n\n where\n\n PathBuf: std::convert::From<P>,\n\n {\n\n Database::new_with_config(path, &Config::new())\n\n }\n\n\n\n /// Create a new Seshat database or open an existing one with the given\n\n /// configuration.\n\n /// # Arguments\n\n ///\n\n /// * `path` - The directory where the database will be stored in. This\n", "file_path": "bindings/seshat/src/database/mod.rs", "rank": 58, "score": 104487.52706539244 }, { "content": " fmt::{Display, Formatter},\n\n sync::mpsc::Sender,\n\n};\n\n\n\nuse crate::error::Result;\n\n\n\n#[cfg(test)]\n\nuse fake::faker::internet::raw::*;\n\n#[cfg(test)]\n\nuse fake::locales::*;\n\n#[cfg(test)]\n\nuse fake::{Dummy, Fake};\n\n\n\n/// Matrix event types.\n\n#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Serialize, Deserialize)]\n\npub enum EventType {\n\n /// Matrix room messages, corresponds to the m.room.message type, has a body\n\n /// inside of the content.\n\n #[serde(alias = \"m.room.message\", alias = \"content.body\")]\n\n Message,\n", "file_path": "bindings/seshat/src/events.rs", "rank": 59, "score": 103926.04173359754 }, { "content": "pub(crate) type EventContext = (\n\n Vec<SerializedEvent>,\n\n Vec<SerializedEvent>,\n\n HashMap<MxId, Profile>,\n\n);\n\n\n\npub(crate) type RoomId = String;\n\npub(crate) type MxId = String;\n\npub(crate) type EventId = String;\n\npub(crate) type SerializedEvent = String;\n\n\n\nimpl Event {\n\n /// Create a new event.\n\n /// # Arguments\n\n ///\n\n /// * `event_type` - The type of the event.\n\n /// * `content_value` - The plain text value of the content, body for a\n\n /// message event, topic for a topic event and name for a name event.\n\n /// * `event_id` - The unique identifier of the event.\n\n /// * `sender` - The unique identifier of the event author.\n", "file_path": "bindings/seshat/src/events.rs", "rank": 60, "score": 103925.19452326042 }, { "content": " &format!(\"${}:{}\", (0..std::u64::MAX).fake::<u64>(), &domain),\n\n &format!(\n\n \"@{}:{}\",\n\n Username(EN).fake::<String>(),\n\n FreeEmailProvider(EN).fake::<String>()\n\n ),\n\n 151636_2244026,\n\n \"!test_room:localhost\",\n\n EVENT_SOURCE,\n\n )\n\n }\n\n}\n\n\n\npub(crate) type HistoricEventsT = (\n\n Option<CrawlerCheckpoint>,\n\n Option<CrawlerCheckpoint>,\n\n Vec<(Event, Profile)>,\n\n Sender<Result<bool>>,\n\n);\n\n\n", "file_path": "bindings/seshat/src/events.rs", "rank": 61, "score": 103924.38285003744 }, { "content": "}\n\n\n\nimpl ToSql for EventType {\n\n fn to_sql(&self) -> rusqlite::Result<ToSqlOutput<'_>> {\n\n Ok(ToSqlOutput::from(format!(\"{}\", self)))\n\n }\n\n}\n\n\n\nimpl FromSql for EventType {\n\n fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {\n\n match value {\n\n ValueRef::Text(s) => {\n\n let s = std::str::from_utf8(s).map_err(|e| FromSqlError::Other(Box::new(e)))?;\n\n\n\n let e = match s {\n\n \"m.room.message\" => EventType::Message,\n\n \"m.room.name\" => EventType::Name,\n\n \"m.room.topic\" => EventType::Topic,\n\n _ => return Err(FromSqlError::InvalidType),\n\n };\n", "file_path": "bindings/seshat/src/events.rs", "rank": 62, "score": 103924.08230659006 }, { "content": "}\n\n\n\nimpl Display for CheckpointDirection {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {\n\n let string = match self {\n\n CheckpointDirection::Forwards => \"Forwards\",\n\n CheckpointDirection::Backwards => \"Backwards\",\n\n };\n\n\n\n write!(f, \"{}\", string)\n\n }\n\n}\n\n\n\nimpl ToSql for CheckpointDirection {\n\n fn to_sql(&self) -> rusqlite::Result<ToSqlOutput<'_>> {\n\n Ok(ToSqlOutput::from(format!(\"{}\", self)))\n\n }\n\n}\n\n\n\nimpl FromSql for CheckpointDirection {\n", "file_path": "bindings/seshat/src/events.rs", "rank": 63, "score": 103922.6103216161 }, { "content": " /// The MXID of the user who sent this event.\n\n pub sender: String,\n\n /// Timestamp in milliseconds on the originating Homeserver when this event\n\n /// was sent.\n\n pub server_ts: i64,\n\n /// The ID of the room associated with this event.\n\n pub room_id: String,\n\n /// The serialized JSON string of the event. This string will be returned\n\n /// by a search later on.\n\n pub source: String,\n\n}\n\n\n\n#[cfg(test)]\n\nimpl<T> Dummy<T> for Event {\n\n fn dummy_with_rng<R: ?Sized>(_config: &T, _rng: &mut R) -> Self {\n\n let domain: String = FreeEmailProvider(EN).fake();\n\n Event::new(\n\n EventType::Message,\n\n \"Hello world\",\n\n Some(\"m.text\"),\n", "file_path": "bindings/seshat/src/events.rs", "rank": 64, "score": 103922.00683691444 }, { "content": " /// Matrix room messages, corresponds to the m.room.name type, has a name\n\n /// inside of the content.\n\n #[serde(alias = \"m.room.name\", alias = \"content.name\")]\n\n Name,\n\n /// Matrix room messages, corresponds to the m.room.topic type, has a topic\n\n /// inside of the content.\n\n #[serde(alias = \"m.room.topic\", alias = \"content.topic\")]\n\n Topic,\n\n}\n\n\n\nimpl Display for EventType {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {\n\n let string = match self {\n\n EventType::Message => \"m.room.message\",\n\n EventType::Topic => \"m.room.topic\",\n\n EventType::Name => \"m.room.name\",\n\n };\n\n\n\n write!(f, \"{}\", string)\n\n }\n", "file_path": "bindings/seshat/src/events.rs", "rank": 65, "score": 103920.61148024604 }, { "content": "// Copyright 2019 The Matrix.org Foundation C.I.C.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse rusqlite::{\n\n types::{FromSql, FromSqlError, FromSqlResult, ToSqlOutput, ValueRef},\n\n ToSql,\n\n};\n\nuse std::{\n\n collections::HashMap,\n", "file_path": "bindings/seshat/src/events.rs", "rank": 66, "score": 103920.473468868 }, { "content": " msgtype,\n\n event_id: event_id.to_string(),\n\n sender: sender.to_string(),\n\n server_ts,\n\n room_id: room_id.to_string(),\n\n source: source.to_string(),\n\n }\n\n }\n\n}\n\n\n\n/// A users profile information at the time an event was posted.\n\n#[derive(Debug, PartialEq, Default, Clone, Serialize, Deserialize)]\n\npub struct Profile {\n\n /// The users display name if one is set.\n\n pub displayname: Option<String>,\n\n /// The user's avatar URL if they have set one.\n\n pub avatar_url: Option<String>,\n\n}\n\n\n\nimpl Profile {\n", "file_path": "bindings/seshat/src/events.rs", "rank": 67, "score": 103920.18650891245 }, { "content": " None,\n\n \"$15163622445EBvZE:localhost\",\n\n \"@example2:localhost\",\n\n 151636_2244038,\n\n \"!test_room:localhost\",\n\n TOPIC_EVENT_SOURCE,\n\n );\n\n}\n\n\n\n#[cfg(test)]\n\nlazy_static! {\n\n pub static ref JAPANESE_EVENTS: Vec<Event> = vec![\n\n Event::new(\n\n EventType::Message,\n\n \"日本語の本文\",\n\n Some(\"m.text\"),\n\n \"$15163622445EBvZE:localhost\",\n\n \"@example2:localhost\",\n\n 151636_2244038,\n\n \"!test_room:localhost\",\n", "file_path": "bindings/seshat/src/events.rs", "rank": 68, "score": 103919.12697366952 }, { "content": " \"sender\": \"@example2:localhost\",\n\n \"type\": \"m.room.message\",\n\n \"unsigned\": {\"age\": 43289803095},\n\n \"user_id\": \"@example2:localhost\",\n\n \"age\": 43289803095\n\n}\"#;\n\n\n\n#[cfg(test)]\n\npub static TOPIC_EVENT_SOURCE: &str = r#\"{\n\n \"content\": {\n\n \"topic\": \"Test topic\"\n\n },\n\n \"event_id\": \"$15163622448EBvZJ:localhost\",\n\n \"origin_server_ts\": 1516362244050,\n\n \"sender\": \"@example2:localhost\",\n\n \"type\": \"m.room.topic\",\n\n \"unsigned\": {\"age\": 43289803098},\n\n \"user_id\": \"@example2:localhost\",\n\n \"age\": 43289803098\n\n}\"#;\n", "file_path": "bindings/seshat/src/events.rs", "rank": 69, "score": 103918.80459859941 }, { "content": " /// * `server_ts` - The timestamp of the event.\n\n /// * `room_id` - The unique identifier of the room that the event belongs\n\n /// to.\n\n /// * `source` - The serialized version of the event.\n\n #[allow(clippy::too_many_arguments)]\n\n pub fn new(\n\n event_type: EventType,\n\n content_value: &str,\n\n msgtype: Option<&str>,\n\n event_id: &str,\n\n sender: &str,\n\n server_ts: i64,\n\n room_id: &str,\n\n source: &str,\n\n ) -> Event {\n\n let msgtype = msgtype.map(|t| t.to_string());\n\n\n\n Event {\n\n event_type,\n\n content_value: content_value.to_string(),\n", "file_path": "bindings/seshat/src/events.rs", "rank": 70, "score": 103918.54376145027 }, { "content": " /// The unique id of the room that this checkpoint belongs to.\n\n pub room_id: String,\n\n /// The token that can be used to go further back in the event timeline of\n\n /// the room and fetch more messages from the room history.\n\n pub token: String,\n\n /// Is this a checkpoint for a complete crawl of the message history.\n\n // bool defaults to `false`\n\n #[serde(default)]\n\n pub full_crawl: bool,\n\n /// The direction which should be used to crawl the room timeline.\n\n pub direction: CheckpointDirection,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]\n\n#[allow(missing_docs)]\n\npub enum CheckpointDirection {\n\n #[serde(rename = \"f\", alias = \"forwards\", alias = \"forward\")]\n\n Forwards,\n\n #[serde(rename = \"b\", alias = \"backwards\", alias = \"backward\")]\n\n Backwards,\n", "file_path": "bindings/seshat/src/events.rs", "rank": 71, "score": 103918.35208110933 }, { "content": "\n\n Ok(e)\n\n }\n\n _ => Err(FromSqlError::InvalidType),\n\n }\n\n }\n\n}\n\n\n\n/// Matrix event that can be added to the database.\n\n#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]\n\npub struct Event {\n\n /// The type of the event.\n\n pub event_type: EventType,\n\n /// The textual representation of a message, this part of the event will be\n\n /// indexed.\n\n pub content_value: String,\n\n /// The type of the message if the event is of a m.room.message type.\n\n pub msgtype: Option<String>,\n\n /// The unique identifier of this event.\n\n pub event_id: String,\n", "file_path": "bindings/seshat/src/events.rs", "rank": 72, "score": 103918.24805460335 }, { "content": "\n\n#[cfg(test)]\n\nlazy_static! {\n\n pub static ref EVENT: Event = Event::new(\n\n EventType::Message,\n\n \"Test message\",\n\n Some(\"m.text\"),\n\n \"$15163622445EBvZJ:localhost\",\n\n \"@example2:localhost\",\n\n 151636_2244026,\n\n \"!test_room:localhost\",\n\n EVENT_SOURCE,\n\n );\n\n}\n\n\n\n#[cfg(test)]\n\nlazy_static! {\n\n pub static ref TOPIC_EVENT: Event = Event::new(\n\n EventType::Topic,\n\n \"Test topic\",\n", "file_path": "bindings/seshat/src/events.rs", "rank": 73, "score": 103917.35309849567 }, { "content": " fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {\n\n match value {\n\n ValueRef::Text(s) => {\n\n let s = std::str::from_utf8(s).map_err(|e| FromSqlError::Other(Box::new(e)))?;\n\n\n\n let e = match s {\n\n \"Forwards\" => CheckpointDirection::Forwards,\n\n \"Backwards\" => CheckpointDirection::Backwards,\n\n _ => return Err(FromSqlError::InvalidType),\n\n };\n\n\n\n Ok(e)\n\n }\n\n _ => Err(FromSqlError::InvalidType),\n\n }\n\n }\n\n}\n", "file_path": "bindings/seshat/src/events.rs", "rank": 74, "score": 103916.28804941312 }, { "content": " // Create a new profile.\n\n /// # Arguments\n\n ///\n\n /// * `displayname` - The human readable name of the user.\n\n /// * `avatar_url` - The URL of the avatar of the user.\n\n pub fn new(displayname: &str, avatar_url: &str) -> Profile {\n\n Profile {\n\n displayname: Some(displayname.to_string()),\n\n avatar_url: Some(avatar_url.to_string()),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub static EVENT_SOURCE: &str = r#\"{\n\n \"content\": {\n\n \"body\": \"Test message, msgtype: m.text\"\n\n },\n\n \"event_id\": \"$15163622445EBvZJ:localhost\",\n\n \"origin_server_ts\": 1516362244026,\n", "file_path": "bindings/seshat/src/events.rs", "rank": 75, "score": 103915.56078608189 }, { "content": " \"\",\n\n ),\n\n Event::new(\n\n EventType::Message,\n\n \"ルダの伝説 時のオカリナ\",\n\n Some(\"m.text\"),\n\n \"$15163622445ZERuD:localhost\",\n\n \"@example2:localhost\",\n\n 151636_2244063,\n\n \"!test_room:localhost\",\n\n \"\",\n\n ),\n\n ];\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\n/// A checkpoint that remembers the current point in a room timeline when\n\n/// fetching the history of the room.\n\npub struct CrawlerCheckpoint {\n", "file_path": "bindings/seshat/src/events.rs", "rank": 76, "score": 103915.23839872888 }, { "content": "#[cfg(test)]\n\nfn encrypt(data: &[u8]) -> Vec<u8> {\n\n let key = [0u8; 16];\n\n let hmac_key = [0u8; 16];\n\n\n\n let mut enc = Vec::new();\n\n {\n\n let mut aes =\n\n AesWriter::<Aes128Ctr, Hmac<Sha256>, _>::new(&mut enc, &key, &hmac_key, 16).unwrap();\n\n aes.write_all(&data).unwrap();\n\n }\n\n enc\n\n}\n\n\n", "file_path": "bindings/seshat/src/index/encrypted_stream.rs", "rank": 77, "score": 102977.63804126685 }, { "content": "class NativeEventList(events: Map<Event, Profile>){\n\n private val eventList = mutableListOf<Long>()\n\n private val profileList = mutableListOf<Long>()\n\n\n\n init{\n\n events.onEach {\n\n eventList.add(it.key.ptr)\n\n profileList.add(it.value.ptr)\n\n }\n\n }\n\n private val eventsPointers : Map<Long,Long> = events.map {\n\n it.key.ptr to it.value.ptr\n\n }.toMap()\n\n\n\n fun getEvents(): LongArray {\n\n return eventList.toLongArray()\n\n }\n\n\n\n fun getProfiles(): LongArray {\n\n return profileList.toLongArray()\n\n }\n\n\n\n}", "file_path": "android-seshat/src/main/java/com/beeper/android_seshat/event/NativeEventList.kt", "rank": 78, "score": 95773.20839149349 }, { "content": "class NativeResult(var resultPtr : Long = -1, var errorCode: Int = -1, var errorMessage: String = String())\n", "file_path": "android-seshat/src/main/java/com/beeper/android_seshat/util/NativeResult.kt", "rank": 79, "score": 94115.4672478641 }, { "content": "class Event internal constructor(ptr: Long){\n\n internal val ptr:Long\n\n\n\n init{\n\n ensureNativeLibIsLoaded()\n\n this.ptr = ptr\n\n }\n\n\n\n constructor(\n\n eventType : EventType, contentValue : String, msgType: String?, eventId: String,\n\n sender: String, serverTs: Long, roomId: String\n\n ) : this(\n\n n_new_event(\n\n eventType.code.toLong(),\n\n contentValue,\n\n //TODO: Create NativeOptional type to pass optionals\n\n msgType != null,\n\n msgType ?: String(),\n\n eventId,\n\n sender,\n", "file_path": "android-seshat/src/main/java/com/beeper/android_seshat/event/Event.kt", "rank": 80, "score": 93321.16983377666 }, { "content": "#[cfg(test)]\n\nfn decrypt<R: Read + Seek + Clone>(data: R) -> Vec<u8> {\n\n let key = [0u8; 16];\n\n let mut dec = Vec::new();\n\n let mut aes = AesReader::<Aes128Ctr, _>::new::<Hmac<Sha256>>(data, &key, &key, 16, 32).unwrap();\n\n aes.read_to_end(&mut dec).unwrap();\n\n dec\n\n}\n\n\n", "file_path": "bindings/seshat/src/index/encrypted_stream.rs", "rank": 81, "score": 91020.12009634367 }, { "content": "/// Key derivation result for our subsequent key derivations. The salt will be\n\n/// read from our key file and we will re-derive our encryption and MAC keys.\n\ntype KeyDerivationResult = (KeyBuffer, KeyBuffer);\n\n\n\n// The constants here are chosen to be similar to the constants for the Matrix\n\n// key export format[1].\n\n// [1] https://matrix.org/docs/spec/client_server/r0.5.0#key-exports\n\nconst KEYFILE: &str = \"seshat-index.key\";\n\n// 16 byte random salt.\n\nconst SALT_SIZE: usize = 16;\n\n// 16 byte random IV for the AES-CTR mode.\n\nconst IV_SIZE: usize = 16;\n\n// 32 byte or 256 bit encryption keys.\n\nconst KEY_SIZE: usize = 32;\n\n// 32 byte message authentication code since HMAC-SHA256 is used.\n\nconst MAC_LENGTH: usize = 32;\n\n// 1 byte for the store version.\n\nconst VERSION: u8 = 1;\n\n\n\n#[cfg(test)]\n\n// Tests don't need to protect against brute force attacks.\n\npub(crate) const PBKDF_COUNT: u32 = 10;\n", "file_path": "bindings/seshat/src/index/encrypted_dir.rs", "rank": 82, "score": 83265.58852799819 }, { "content": "/// Key derivation result type for our initial key derivation. Consists of a\n\n/// tuple containing a encryption key, a MAC key, and a random salt.\n\ntype InitialKeyDerivationResult = (KeyBuffer, KeyBuffer, Vec<u8>);\n\n\n", "file_path": "bindings/seshat/src/index/encrypted_dir.rs", "rank": 95, "score": 77974.86274779757 }, { "content": "\n\n companion object{\n\n @JvmStatic\n\n private external fun n_new_event(\n\n eventType: Long,\n\n contentValue: String,\n\n hasMsgType: Boolean,\n\n msgType: String,\n\n eventId: String,\n\n sender: String,\n\n serverTs: Long,\n\n roomId: String,\n\n ): Long\n\n\n\n fun eventFromSource(eventSource:String) : com.beeper.android_seshat.util.Result<Event,DatabaseErrorType>{\n\n val nativeResult = NativeResult()\n\n n_event_from_json(eventSource,nativeResult)\n\n return if(nativeResult.errorCode < 0){\n\n Success(Event(nativeResult.resultPtr))\n\n }else{\n", "file_path": "android-seshat/src/main/java/com/beeper/android_seshat/event/Event.kt", "rank": 96, "score": 76130.13722707718 }, { "content": " com.beeper.android_seshat.util.Error(DatabaseErrorType.fromCode(nativeResult.errorCode, nativeResult.errorMessage))\n\n }\n\n }\n\n\n\n @JvmStatic\n\n private external fun n_event_from_json(\n\n eventSource: String,\n\n nativeResult: NativeResult\n\n )\n\n\n\n }\n\n}\n", "file_path": "android-seshat/src/main/java/com/beeper/android_seshat/event/Event.kt", "rank": 97, "score": 76128.62023856744 }, { "content": " * Called if the object is GC'd by the JVM\n\n */\n\n protected fun finalize() {\n\n n_free_event(ptr)\n\n }\n\n\n\n internal fun testFinalize(){\n\n finalize()\n\n }\n\n\n\n private external fun n_free_event(eventPointer: Long)\n\n\n\n private external fun n_get_event_type(eventPointer: Long) : Int\n\n private external fun n_get_event_content_value(eventPointer: Long) : String\n\n private external fun n_get_event_id(eventPointer: Long) : String\n\n private external fun n_get_event_sender(eventPointer: Long) : String\n\n private external fun n_get_event_server_ts(eventPointer: Long) : Long\n\n private external fun n_get_event_room_id(eventPointer: Long) : String\n\n private external fun n_get_event_message_type(eventPointer: Long) : String\n\n\n", "file_path": "android-seshat/src/main/java/com/beeper/android_seshat/event/Event.kt", "rank": 98, "score": 76125.84908263652 }, { "content": "package com.beeper.android_seshat.event\n\n\n\nimport com.beeper.android_seshat.LibraryLoader.ensureNativeLibIsLoaded\n\nimport com.beeper.android_seshat.database.DatabaseErrorType\n\nimport com.beeper.android_seshat.util.NativeResult\n\nimport com.beeper.android_seshat.util.Success\n\n\n\n\n", "file_path": "android-seshat/src/main/java/com/beeper/android_seshat/event/Event.kt", "rank": 99, "score": 76123.94611193353 } ]
Rust
src/traits.rs
dhylands/serial-framing-protocol-rs
2f8cee9611891e494023f797ecb847249dffca3f
use core::cmp::min; use core::fmt; use core::mem::size_of; use log::info; use pretty_hex::*; use crate::crc::{Crc, CrcAccum}; pub const SOF: u8 = 0x7e; pub const ESC: u8 = 0x7d; pub const ESC_FLIP: u8 = 0x20; pub trait PacketBuffer { fn capacity(&self) -> usize; fn len(&self) -> usize; fn set_len(&mut self, len: usize); fn data(&self) -> &[u8]; fn data_mut(&mut self) -> &mut [u8]; fn store_byte_at(&mut self, idx: usize, byte: u8) { self.data_mut()[idx] = byte; } fn store_data(&mut self, data: &[u8]) { let copy_len = min(data.len(), self.capacity()); self.data_mut()[..copy_len].copy_from_slice(&data[..copy_len]); self.set_len(copy_len); } fn is_empty(&self) -> bool { self.len() == 0 } fn reset(&mut self) { self.set_len(0); } fn append(&mut self, byte: u8) -> Result<(), ()> { let len = self.len(); if len < self.capacity() { self.set_len(len + 1); self.store_byte_at(len, byte); Ok(()) } else { Err(()) } } fn remove_crc(&mut self) -> CrcAccum { let mut len = self.len(); if len < size_of::<CrcAccum>() { return 0; } len -= 2; let data = self.data(); let crc = ((data[len + 1] as CrcAccum) << 8) | (data[len] as CrcAccum); self.set_len(len); crc } fn dump(&self) { info!("{:?}", self.data().hex_dump()); } } impl fmt::Debug for dyn PacketBuffer { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{:?}", self.data().hex_dump()) } } pub trait PacketWriter { fn start_write(&mut self) {} fn write_byte(&mut self, byte: u8); fn end_write(&mut self) {} fn write_packet_data(&mut self, header: u8, bytes: &[u8]) { info!( "write_packet_data header: 0x{:02x} len: {}", header, bytes.len() ); let mut crc = Crc::new(); self.start_write(); self.write_byte(SOF); self.write_escaped_byte(&mut crc, header); self.write_escaped_bytes(&mut crc, bytes); self.write_crc(&mut crc); self.write_byte(SOF); self.end_write(); } fn write_crc(&mut self, crc: &mut Crc) { let crc_lsb = crc.lsb(); let crc_msb = crc.msb(); self.write_escaped_byte(crc, crc_lsb); self.write_escaped_byte(crc, crc_msb); } fn write_escaped_bytes(&mut self, crc: &mut Crc, bytes: &[u8]) { for byte in bytes { self.write_escaped_byte(crc, *byte); } } fn write_escaped_byte(&mut self, crc: &mut Crc, byte: u8) { crc.accum(byte); if byte == ESC || byte == SOF { self.write_byte(ESC); self.write_byte(byte ^ ESC_FLIP); } else { self.write_byte(byte); } } } pub trait PacketQueue { fn capacity(&self) -> usize; fn len(&self) -> usize; fn set_len(&mut self, len: usize); fn idx(&self) -> usize; fn set_idx(&mut self, len: usize); fn packet(&mut self, idx: usize) -> Option<&mut dyn PacketBuffer>; fn clear(&mut self) { self.set_len(0); self.set_idx(0); } fn next(&mut self) -> &mut dyn PacketBuffer { if self.len() < self.capacity() { self.set_len(self.len() + 1); } self.set_idx((self.idx() + 1) % self.capacity()); self.packet(self.idx()).unwrap() } fn get(&mut self, offset: usize) -> Option<&mut dyn PacketBuffer> { if offset < self.len() { let idx = if self.idx() < offset { self.idx() + self.capacity() - offset } else { self.idx() - offset }; self.packet(idx) } else { None } } } pub trait Storage { fn rx_buf(&mut self) -> &mut dyn PacketBuffer; fn tx_writer(&mut self) -> &mut dyn PacketWriter; fn tx_queue(&mut self) -> &mut dyn PacketQueue; }
use core::cmp::min; use core::fmt; use core::mem::size_of; use log::info; use pretty_hex::*; use crate::crc::{Crc, CrcAccum}; pub const SOF: u8 = 0x7e; pub const ESC: u8 = 0x7d; pub const ESC_FLIP: u8 = 0x20; pub trait PacketBuffer { fn capacity(&self) -> usize; fn len(&self) -> usize; fn set_len(&mut self, len: usize); fn data(&self) -> &[u8]; fn data_mut(&mut self) -> &mut [u8]; fn store_byte_at(&mut self, idx: usize, byte: u8) { self.data_mut()[idx] = byte; } fn store_data(&mut self, data: &[u8]) { let copy_len = min(data.len(), self.capacity()); self.data_mut()[..copy_len].copy_from_slice(&data[..copy_len]); self.set_len(copy_len); } fn is_empty(&self) -> bool { self.len() == 0 } fn reset(&mut self) { self.set_len(0); } fn append(&mut self, byte: u8) -> Result<(), ()> { let len = self.len(); if len < self.capacity() { self.set_len(len + 1); self.store_byte_at(len, byte); Ok(()) } else { Err(()) } } fn remove_crc(&mut self) -> CrcAccum { l
] as CrcAccum); self.set_len(len); crc } fn dump(&self) { info!("{:?}", self.data().hex_dump()); } } impl fmt::Debug for dyn PacketBuffer { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{:?}", self.data().hex_dump()) } } pub trait PacketWriter { fn start_write(&mut self) {} fn write_byte(&mut self, byte: u8); fn end_write(&mut self) {} fn write_packet_data(&mut self, header: u8, bytes: &[u8]) { info!( "write_packet_data header: 0x{:02x} len: {}", header, bytes.len() ); let mut crc = Crc::new(); self.start_write(); self.write_byte(SOF); self.write_escaped_byte(&mut crc, header); self.write_escaped_bytes(&mut crc, bytes); self.write_crc(&mut crc); self.write_byte(SOF); self.end_write(); } fn write_crc(&mut self, crc: &mut Crc) { let crc_lsb = crc.lsb(); let crc_msb = crc.msb(); self.write_escaped_byte(crc, crc_lsb); self.write_escaped_byte(crc, crc_msb); } fn write_escaped_bytes(&mut self, crc: &mut Crc, bytes: &[u8]) { for byte in bytes { self.write_escaped_byte(crc, *byte); } } fn write_escaped_byte(&mut self, crc: &mut Crc, byte: u8) { crc.accum(byte); if byte == ESC || byte == SOF { self.write_byte(ESC); self.write_byte(byte ^ ESC_FLIP); } else { self.write_byte(byte); } } } pub trait PacketQueue { fn capacity(&self) -> usize; fn len(&self) -> usize; fn set_len(&mut self, len: usize); fn idx(&self) -> usize; fn set_idx(&mut self, len: usize); fn packet(&mut self, idx: usize) -> Option<&mut dyn PacketBuffer>; fn clear(&mut self) { self.set_len(0); self.set_idx(0); } fn next(&mut self) -> &mut dyn PacketBuffer { if self.len() < self.capacity() { self.set_len(self.len() + 1); } self.set_idx((self.idx() + 1) % self.capacity()); self.packet(self.idx()).unwrap() } fn get(&mut self, offset: usize) -> Option<&mut dyn PacketBuffer> { if offset < self.len() { let idx = if self.idx() < offset { self.idx() + self.capacity() - offset } else { self.idx() - offset }; self.packet(idx) } else { None } } } pub trait Storage { fn rx_buf(&mut self) -> &mut dyn PacketBuffer; fn tx_writer(&mut self) -> &mut dyn PacketWriter; fn tx_queue(&mut self) -> &mut dyn PacketQueue; }
et mut len = self.len(); if len < size_of::<CrcAccum>() { return 0; } len -= 2; let data = self.data(); let crc = ((data[len + 1] as CrcAccum) << 8) | (data[len
function_block-random_span
[ { "content": "// Parse a bunch of bytes and return the first return code that isn't\n\n// MoreDataNeeded. This means that this function will parse at most one\n\n// error or packet from the input stream, which is fine for testing.\n\npub fn parse_bytes(\n\n parser: &mut RawPacketParser,\n\n bytes: &[u8],\n\n rx_packet: &mut dyn PacketBuffer,\n\n) -> RawParseResult {\n\n for byte in bytes {\n\n let parse_result = parser.parse_byte(*byte, rx_packet);\n\n match parse_result {\n\n RawParseResult::RawPacketReceived(header) => {\n\n info!(\n\n \"Header = {:02x} data = {:?}\",\n\n header,\n\n rx_packet.data().hex_dump()\n\n );\n\n return RawParseResult::RawPacketReceived(header);\n\n }\n\n\n\n RawParseResult::MoreDataNeeded => {\n\n continue;\n\n }\n", "file_path": "src/testutils.rs", "rank": 0, "score": 86125.34771751867 }, { "content": "pub fn parse_bytes_as_packet(\n\n parser: &mut RawPacketParser,\n\n bytes: &[u8],\n\n rx_packet: &mut dyn PacketBuffer,\n\n) -> Vec<u8> {\n\n let parse_result = parse_bytes(parser, bytes, rx_packet);\n\n match parse_result {\n\n RawParseResult::RawPacketReceived(header) => {\n\n let mut vec = Vec::new();\n\n vec.push(header);\n\n vec.extend_from_slice(rx_packet.data());\n\n return vec;\n\n }\n\n _ => {\n\n error!(\"{:?}\", parse_result);\n\n return Vec::new();\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/testutils.rs", "rank": 1, "score": 82861.4425460126 }, { "content": "fn handle_connection(mut stream: TcpStream) -> io::Result<()> {\n\n println!(\"Connected to: {}\", stream.peer_addr().unwrap());\n\n let mut buf = [0u8; 4096];\n\n stream.write(b\"Hello World\")?;\n\n\n\n let bytes_read = stream.read(&mut buf)?;\n\n if bytes_read == 0 {\n\n // Is it possible? Or IoError will be raised anyway?\n\n return Ok(());\n\n }\n\n println!(\"Read: {:?}\", (&buf[0..bytes_read]).hex_dump());\n\n\n\n stream.shutdown(Shutdown::Both)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/client.rs", "rank": 3, "score": 73955.00555702923 }, { "content": "fn handle_connection(mut stream: TcpStream) -> io::Result<()> {\n\n println!(\"Client connected from: {}\", stream.peer_addr().unwrap());\n\n stream.set_read_timeout(Some(Duration::new(5, 0)))?;\n\n let mut buf = [0u8; 4096];\n\n loop {\n\n let bytes_read = stream.read(&mut buf)?;\n\n if bytes_read == 0 {\n\n // Is it possible? Or IoError will be raised anyway?\n\n break;\n\n }\n\n println!(\"Read: {:?}\", (&buf[0..bytes_read]).hex_dump());\n\n\n\n stream.write(&buf[0..bytes_read])?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/server.rs", "rank": 4, "score": 73955.00555702923 }, { "content": "pub trait Driver {\n\n /// Maximum size of a packet sent or received (doesn't include framing or escape bytes).\n\n type PACKET_SIZE: ArrayLength<u8>;\n\n\n\n /// Called at the beginning of writing a packet. Allows the driver implementation to implement\n\n /// buffering.\n\n fn start_write(&mut self) {}\n\n\n\n /// Called to write some data (not necessarily a complete packet) to the hardware.\n\n fn write_byte(&mut self, byte: u8);\n\n\n\n /// Called at the end of the writing a packet. Allows the driver to flush a\n\n /// buffer if a buffered implementation is used.\n\n fn end_write(&mut self) {}\n\n}\n", "file_path": "src/driver.rs", "rank": 8, "score": 67735.8877400742 }, { "content": "pub fn setup_log() {\n\n INIT.call_once(|| {\n\n simple_logger::init().unwrap();\n\n });\n\n}\n\n\n\nconst PACKET_SIZE: usize = 256;\n\n\n\npub struct TestPacketBuffer {\n\n len: usize,\n\n buf: [u8; PACKET_SIZE],\n\n}\n\n\n\nimpl Default for TestPacketBuffer {\n\n fn default() -> Self {\n\n TestPacketBuffer {\n\n len: 0,\n\n buf: [0; PACKET_SIZE],\n\n }\n\n }\n", "file_path": "src/testutils.rs", "rank": 9, "score": 65150.743847175065 }, { "content": "fn main() {\n\n let opt = Opt::from_args();\n\n\n\n if opt.verbose {\n\n println!(\"{:#?}\", opt);\n\n }\n\n\n\n let server_addr = format!(\"0.0.0.0:{}\", opt.port);\n\n let listener = TcpListener::bind(server_addr).unwrap();\n\n println!(\"Server listening on port {} ...\", opt.port);\n\n for stream in listener.incoming() {\n\n match stream {\n\n Ok(s) => {\n\n handle_connection(s).unwrap();\n\n }\n\n Err(e) => {\n\n println!(\"Error: {}\", e);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "examples/server.rs", "rank": 10, "score": 37299.69334402615 }, { "content": "fn main() {\n\n let opt = Opt::from_args();\n\n\n\n if opt.verbose {\n\n println!(\"{:#?}\", opt);\n\n }\n\n\n\n let server_addr = format!(\"127.0.0.1:{}\", opt.port);\n\n match TcpStream::connect(server_addr) {\n\n Ok(stream) => {\n\n handle_connection(stream).unwrap();\n\n }\n\n Err(e) => {\n\n println!(\"Error writing data {}\", e);\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "examples/client.rs", "rank": 11, "score": 37299.69334402615 }, { "content": "// ===========================================================================\n\n//\n\n// Tests\n\n//\n\n// ===========================================================================\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::testutils::{setup_log, TestStorage};\n\n use crate::traits::SOF;\n\n use log::info;\n\n\n\n impl EndPoint {\n\n // Parse a bunch of bytes and return the first return code that isn't\n\n // MoreDataNeeded. This means that this function will parse at most one\n\n // error or packet from the input stream, which is fine for testing.\n\n\n\n // bytes, rx_packet, writer\n\n pub fn parse_bytes(&mut self, bytes: &[u8], storage: &mut dyn Storage) -> ParseResult {\n", "file_path": "src/lib.rs", "rank": 20, "score": 16.193007231395484 }, { "content": "\n\n fn data(&self) -> &[u8] {\n\n &self.buf[0..self.len]\n\n }\n\n\n\n fn data_mut(&mut self) -> &mut [u8] {\n\n &mut self.buf[..]\n\n }\n\n}\n\n\n\nimpl PacketWriter for TestPacketBuffer {\n\n fn start_write(&mut self) {\n\n //info!(\"start_write\");\n\n self.reset();\n\n }\n\n\n\n fn write_byte(&mut self, byte: u8) {\n\n //info!(\"write_byte 0x{:02x} self.len = {}\", byte, self.len());\n\n self.append(byte).unwrap();\n\n }\n", "file_path": "src/testutils.rs", "rank": 21, "score": 14.280570826802698 }, { "content": " fn set_idx(&mut self, idx: usize) {\n\n self.idx = min(idx, QUEUE_SIZE - 1);\n\n self.len = max(self.len, self.idx + 1);\n\n }\n\n\n\n /// Returns the i'th packet from the queue.\n\n fn packet(&mut self, idx: usize) -> Option<&mut dyn PacketBuffer> {\n\n if idx < self.len {\n\n Some(&mut self.packet[idx])\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl TestPacketQueue {\n\n fn new() -> Self {\n\n TestPacketQueue {\n\n len: 0,\n\n idx: 0,\n\n packet: Default::default(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/testutils.rs", "rank": 22, "score": 13.563037597477468 }, { "content": " info!(\"header: {:02x}\", self.header);\n\n info!(\" escape_state: {:?}\", self.escape_state);\n\n info!(\" frame_state: {:?}\", self.frame_state);\n\n }\n\n\n\n pub fn header(&self) -> u8 {\n\n self.header\n\n }\n\n\n\n /// Feeds a single byte into the raw packet parser. Once a complete packet\n\n /// has been parsed, a RawPacketReceived variant will be returned. The\n\n /// packet data will be stored in the PacketBuffer object that was passed\n\n /// to RawPacketParser::new() and will remain valid until the next time\n\n /// that parse_byte is called.\n\n pub fn parse_byte(&mut self, byte: u8, rx_data: &mut dyn PacketBuffer) -> RawParseResult {\n\n //info!(\"parse_byte 0x{:02x}\", byte);\n\n let mut byte = byte;\n\n if self.escape_state == EscapeState::Escaping {\n\n self.escape_state = EscapeState::Normal;\n\n if byte == SOF {\n", "file_path": "src/rawpacket.rs", "rank": 23, "score": 12.80697608974781 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::testutils::{parse_bytes, parse_bytes_as_packet, setup_log, TestPacketBuffer};\n\n use crate::traits::PacketWriter;\n\n use log::info;\n\n use pretty_hex::*;\n\n use std::vec::Vec;\n\n\n\n fn encode_decode_packet(parser: &mut RawPacketParser, header: u8, data: &[u8]) -> Vec<u8> {\n\n // Write the packet out and collect it.\n\n info!(\"=== encode_decode_packet ===\");\n\n info!(\"=== Input ===\");\n\n info!(\"Header: 0x{:02x} Data: {:?}\", header, data.hex_dump());\n\n let mut writer = TestPacketBuffer::new();\n\n writer.write_packet_data(header, data);\n\n\n\n info!(\"=== Output ===\");\n\n info!(\"{:?}\", (&writer.data()).hex_dump());\n\n\n", "file_path": "src/rawpacket.rs", "rank": 24, "score": 12.634506108489262 }, { "content": " // ESC SOF is treated as an abort sequence\n\n self.frame_state = FrameState::New;\n\n self.reset();\n\n rx_data.reset();\n\n return RawParseResult::AbortedPacket;\n\n }\n\n byte ^= ESC_FLIP;\n\n } else if byte == SOF {\n\n if self.frame_state == FrameState::Collecting {\n\n // We've got a raw frame.\n\n self.frame_state = FrameState::New;\n\n\n\n if rx_data.len() < size_of::<CrcAccum>() {\n\n return RawParseResult::PacketTooSmall;\n\n }\n\n\n\n let crc = rx_data.remove_crc();\n\n if self.crc.crc() != CRC_GOOD {\n\n return RawParseResult::CrcError(crc);\n\n }\n", "file_path": "src/rawpacket.rs", "rank": 25, "score": 12.534404804764167 }, { "content": "}\n\n\n\nimpl TestPacketBuffer {\n\n pub fn new() -> Self {\n\n Default::default()\n\n }\n\n}\n\n\n\nimpl PacketBuffer for TestPacketBuffer {\n\n fn capacity(&self) -> usize {\n\n PACKET_SIZE\n\n }\n\n\n\n fn len(&self) -> usize {\n\n self.len\n\n }\n\n\n\n fn set_len(&mut self, len: usize) {\n\n self.len = min(len, PACKET_SIZE);\n\n }\n", "file_path": "src/testutils.rs", "rank": 26, "score": 12.318129713661865 }, { "content": "\n\n return RawParseResult::RawPacketReceived(self.header);\n\n }\n\n // Receving a SOF while in the New state is considered a no-op\n\n return RawParseResult::MoreDataNeeded;\n\n } else if byte == ESC {\n\n self.escape_state = EscapeState::Escaping;\n\n return RawParseResult::MoreDataNeeded;\n\n }\n\n\n\n if self.frame_state == FrameState::New {\n\n // We're just starting a new frame. The first byte will be the header\n\n // and everything after that will be user bytes.\n\n self.reset();\n\n rx_data.reset();\n\n self.header = byte;\n\n self.frame_state = FrameState::Collecting;\n\n } else if rx_data.append(byte).is_err() {\n\n // The payload was too big for the packet. This means that the SOF\n\n // was corrupted or a bad stream or something. We just reset the\n", "file_path": "src/rawpacket.rs", "rank": 27, "score": 12.23438676303035 }, { "content": "\n\n pub fn connect(&mut self, storage: &mut dyn Storage) {\n\n self.tx.reset(storage);\n\n self.rx.reset();\n\n self.tx.transmit_syn0(storage.tx_writer());\n\n self.tx.connect_state = ConnectState::SentSyn0;\n\n }\n\n\n\n pub fn is_connected(&self) -> bool {\n\n return self.tx.connect_state == ConnectState::Connected;\n\n }\n\n\n\n pub fn parse_byte<'a>(&mut self, byte: u8, storage: &mut dyn Storage) -> ParseResult {\n\n let parse_result = self.rx.parser.parse_byte(byte, storage.rx_buf());\n\n match parse_result {\n\n PacketTypeResult::PacketReceived(packet_type) => {\n\n self.tx.handle_packet(packet_type, storage)\n\n }\n\n PacketTypeResult::AbortedPacket => ParseResult::AbortedPacket,\n\n PacketTypeResult::PacketTooSmall => ParseResult::PacketTooSmall,\n", "file_path": "src/lib.rs", "rank": 28, "score": 12.085270298145389 }, { "content": " // Then run the generated packet through the packet parser\n\n let mut rx_data = TestPacketBuffer::new();\n\n let ret = parse_bytes_as_packet(parser, &writer.data(), &mut rx_data);\n\n info!(\"=== Reparsed ===\");\n\n info!(\"{:?}\", &ret.hex_dump());\n\n ret\n\n }\n\n\n\n // Uses the WritePacket trait to convert a user packet into a Vec<u8> of\n\n // the bytes that would be written to a real device.\n\n fn write_packet(header: u8, data: &[u8]) -> Vec<u8> {\n\n info!(\"=== write_packet ===\");\n\n info!(\"=== Input ===\");\n\n info!(\"Header: 0x{:02x} Data: {:?}\", header, data.hex_dump());\n\n let mut writer = TestPacketBuffer::new();\n\n writer.write_packet_data(header, data);\n\n\n\n info!(\"=== Output ===\");\n\n info!(\"{:?}\", (&writer.data()).hex_dump());\n\n\n", "file_path": "src/rawpacket.rs", "rank": 29, "score": 11.945143039233276 }, { "content": " if let Some(frame_type) = FrameType::from_u8(header & FRAME_TYPE_MASK) {\n\n frame_type\n\n } else {\n\n // We can't actually get this case since FrameType covers all of the bit patterns that\n\n // are contained in FRAME_TYPE_MASK.\n\n FrameType::NAK\n\n }\n\n }\n\n\n\n fn get_frame_seq(&self, header: u8) -> u8 {\n\n return header & SEQ_MASK;\n\n }\n\n\n\n pub fn parse_byte(&mut self, byte: u8, rx_data: &mut dyn PacketBuffer) -> PacketTypeResult {\n\n let parse_result = self.raw_parser.parse_byte(byte, rx_data);\n\n match parse_result {\n\n RawParseResult::RawPacketReceived(header) => {\n\n let frame_type = self.get_frame_type(header);\n\n let seq = self.get_frame_seq(header);\n\n match frame_type {\n", "file_path": "src/packet.rs", "rank": 30, "score": 11.890266619394549 }, { "content": " fn len(&self) -> usize {\n\n self.len\n\n }\n\n\n\n /// Sets the number of packets currently in the queue.\n\n fn set_len(&mut self, len: usize) {\n\n self.len = min(len, QUEUE_SIZE);\n\n self.idx = if self.len > 0 {\n\n min(self.idx, self.len - 1)\n\n } else {\n\n 0\n\n }\n\n }\n\n\n\n /// Returns the index of the most recently added packet to the queue.\n\n fn idx(&self) -> usize {\n\n self.idx\n\n }\n\n\n\n /// Sets the index of the nmost recently added packet to the queue.\n", "file_path": "src/testutils.rs", "rank": 31, "score": 11.446269871324327 }, { "content": " //info!(\"CRC new\");\n\n Default::default()\n\n }\n\n\n\n pub fn accum(&mut self, byte: u8) {\n\n //info!(\"CRC accum 0x{:02x}\", byte);\n\n let byte = byte ^ ((self.val & 0xff) as u8);\n\n let byte = byte ^ (byte << 4);\n\n let byte16 = byte as u16;\n\n self.val = ((byte16 << 8) | ((self.val >> 8) & 0x00ff)) ^ (byte16 >> 4) ^ (byte16 << 3);\n\n }\n\n\n\n pub fn accum_bytes(&mut self, bytes: &[u8]) -> CrcAccum {\n\n for byte in bytes.iter() {\n\n self.accum(*byte);\n\n }\n\n self.val\n\n }\n\n\n\n pub fn accum_crc(&mut self) -> CrcAccum {\n", "file_path": "src/crc.rs", "rank": 32, "score": 11.105237430912645 }, { "content": "\n\n ep1.connect(&mut storage1);\n\n\n\n // This should put a SYN0 packet into packet2\n\n assert_eq!(storage1.tx_vec(), vec![SOF, 0xc0, 0x74, 0x36, SOF]);\n\n\n\n // Sending the SYN0 to the other side, should generate a SYN1 in response\n\n assert_eq!(\n\n ep2.parse_bytes(storage1.tx_data(), &mut storage2),\n\n ParseResult::MoreDataNeeded\n\n );\n\n assert_eq!(storage2.tx_vec(), vec![SOF, 0xc1, 0xfd, 0x27, SOF]);\n\n\n\n // Sending SYN1 to initial side should generate a SYN2 in response Side 1 should be connected\n\n assert_eq!(\n\n ep1.parse_bytes(storage2.tx_data(), &mut storage1),\n\n ParseResult::MoreDataNeeded\n\n );\n\n assert!(ep1.is_connected());\n\n assert_eq!(storage1.tx_vec(), vec![SOF, 0xc2, 0x66, 0x15, SOF]);\n", "file_path": "src/lib.rs", "rank": 33, "score": 10.843531325528073 }, { "content": "#![no_std]\n\n\n\n#[cfg(test)]\n\n#[macro_use]\n\nextern crate std;\n\n\n\nuse log::{debug, error, warn};\n\n\n\n#[macro_use]\n\npub mod macros;\n\n\n\npub mod crc;\n\npub mod driver;\n\npub mod packet;\n\npub mod rawpacket;\n\npub mod traits;\n\n\n\n#[cfg(test)]\n\nmod testutils;\n\n\n\nuse crc::CrcAccum;\n\nuse packet::{FrameType, PacketParser, PacketType, PacketTypeResult, SeqSyn, SEQ_MASK};\n\nuse traits::{PacketWriter, Storage};\n\n\n\nconst SEQ_INIT: u8 = 0;\n\n\n\n#[derive(PartialEq)]\n", "file_path": "src/lib.rs", "rank": 34, "score": 10.841686017311925 }, { "content": " PacketTypeResult::CrcError(rcvd_crc) => ParseResult::CrcError(rcvd_crc),\n\n PacketTypeResult::MoreDataNeeded => ParseResult::MoreDataNeeded,\n\n }\n\n }\n\n\n\n pub fn write_packet(&mut self, data: &[u8], storage: &mut dyn Storage) {\n\n if !self.is_connected() {\n\n error!(\"Not connected\");\n\n return;\n\n }\n\n let header: u8 = FrameType::USR as u8 | self.tx.tx_seq;\n\n\n\n let tx_buf = storage.tx_queue().next();\n\n tx_buf.store_data(data);\n\n\n\n storage.tx_writer().write_packet_data(header, data);\n\n self.tx.tx_seq = self.tx.next_frame_seq(self.tx.tx_seq);\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 35, "score": 10.832806956201555 }, { "content": "\n\n // Sending the SYN2 to Side 2 should then put it into a connected state\n\n assert_eq!(\n\n ep2.parse_bytes(storage1.tx_data(), &mut storage2),\n\n ParseResult::MoreDataNeeded\n\n );\n\n assert!(ep2.is_connected());\n\n assert_eq!(storage2.tx_vec(), vec![]);\n\n\n\n // Send a User packet from Side 1 to Side 2\n\n\n\n ep1.write_packet(\"Testing\".as_bytes(), &mut storage1);\n\n assert_eq!(\n\n storage1.tx_vec(),\n\n vec![SOF, 0x00, 0x54, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0xc5, 0x5c, SOF]\n\n );\n\n assert_eq!(\n\n ep2.parse_bytes(storage1.tx_data(), &mut storage2),\n\n ParseResult::UserPacket\n\n );\n", "file_path": "src/lib.rs", "rank": 36, "score": 10.635585516894047 }, { "content": " ) {\n\n let header = (frame_type as u8) | (seq & SEQ_MASK);\n\n let data: &[u8] = &[];\n\n\n\n writer.write_packet_data(header, data);\n\n }\n\n}\n\n\n\npub struct EndPoint {\n\n tx: Transmitter,\n\n rx: Receiver,\n\n}\n\n\n\nimpl EndPoint {\n\n pub fn new() -> Self {\n\n Self {\n\n tx: Transmitter::new(),\n\n rx: Receiver::new(),\n\n }\n\n }\n", "file_path": "src/lib.rs", "rank": 37, "score": 10.423550908949563 }, { "content": "use crate::crc::{Crc, CrcAccum, CRC_GOOD};\n\n\n\nuse core::mem::size_of;\n\nuse log::info;\n\n\n\nuse crate::traits::{PacketBuffer, ESC, ESC_FLIP, SOF};\n\n\n\n#[derive(PartialEq, Debug)]\n", "file_path": "src/rawpacket.rs", "rank": 38, "score": 10.287877394533504 }, { "content": "\n\n fn end_write(&mut self) {\n\n //info!(\"end_write\");\n\n }\n\n}\n\n\n\nconst QUEUE_SIZE: usize = 8;\n\npub struct TestPacketQueue {\n\n len: usize,\n\n idx: usize,\n\n packet: [TestPacketBuffer; QUEUE_SIZE],\n\n}\n\n\n\nimpl PacketQueue for TestPacketQueue {\n\n /// Returns the maximum number of packets which can be stored.\n\n fn capacity(&self) -> usize {\n\n QUEUE_SIZE\n\n }\n\n\n\n /// Returns the number of packets currently in the queue.\n", "file_path": "src/testutils.rs", "rank": 39, "score": 10.036622535803016 }, { "content": " // receiver. Things will get resynchronized on the next valid frame.\n\n self.reset();\n\n rx_data.reset();\n\n }\n\n self.crc.accum(byte);\n\n RawParseResult::MoreDataNeeded\n\n }\n\n\n\n pub fn reset(&mut self) {\n\n self.crc.reset();\n\n self.escape_state = EscapeState::Normal;\n\n }\n\n}\n\n\n\n// ===========================================================================\n\n//\n\n// Tests\n\n//\n\n// ===========================================================================\n\n\n", "file_path": "src/rawpacket.rs", "rank": 40, "score": 10.0105248894697 }, { "content": "}\n\n\n\n// A few methods to help out with testing.\n\nimpl TestStorage {\n\n pub fn new() -> Self {\n\n TestStorage {\n\n rx_buf: TestPacketBuffer::new(),\n\n tx_buf: TestPacketBuffer::new(),\n\n tx_queue: TestPacketQueue::new(),\n\n }\n\n }\n\n\n\n pub fn rx_data(&self) -> &[u8] {\n\n self.rx_buf.data()\n\n }\n\n\n\n pub fn tx_data(&self) -> &[u8] {\n\n self.tx_buf.data()\n\n }\n\n\n\n pub fn tx_vec(&self) -> Vec<u8> {\n\n self.tx_data().to_vec()\n\n }\n\n}\n", "file_path": "src/testutils.rs", "rank": 41, "score": 10.005669015790572 }, { "content": "pub enum PacketTypeResult {\n\n PacketReceived(PacketType),\n\n AbortedPacket,\n\n PacketTooSmall,\n\n CrcError(CrcAccum),\n\n MoreDataNeeded,\n\n}\n\n\n\npub struct PacketParser {\n\n raw_parser: RawPacketParser,\n\n}\n\n\n\nimpl PacketParser {\n\n pub fn new() -> Self {\n\n Self {\n\n raw_parser: RawPacketParser::new(),\n\n }\n\n }\n\n\n\n fn get_frame_type(&self, header: u8) -> FrameType {\n", "file_path": "src/packet.rs", "rank": 42, "score": 9.911928335253608 }, { "content": " vec![ESC, ESC],\n\n vec![ESC, ESC, ESC],\n\n ];\n\n\n\n info!(\"----- Testing encode/decode -----\");\n\n for test in tests.iter() {\n\n let header = test[0];\n\n let data = &test[1..test.len()];\n\n assert_eq!(&encode_decode_packet(&mut parser, header, data), test);\n\n }\n\n }\n\n}\n", "file_path": "src/rawpacket.rs", "rank": 43, "score": 9.841403476800615 }, { "content": " (&writer.data()).to_vec()\n\n }\n\n\n\n #[test]\n\n fn test_raw_parser() {\n\n setup_log();\n\n let mut packet = TestPacketBuffer::new();\n\n let mut parser = RawPacketParser::new();\n\n\n\n // Cover every type of return code from the parser\n\n let tests = &vec![\n\n (vec![SOF, SOF], RawParseResult::MoreDataNeeded),\n\n (vec![SOF, 0x00, SOF], RawParseResult::PacketTooSmall),\n\n (vec![SOF, 0x00, 0x00, SOF], RawParseResult::PacketTooSmall),\n\n (\n\n vec![SOF, 0x00, 0x00, 0x00, SOF],\n\n RawParseResult::CrcError(0x0000),\n\n ),\n\n (\n\n vec![SOF, 0xc0, 0x74, 0x36, SOF],\n", "file_path": "src/rawpacket.rs", "rank": 44, "score": 9.638511420218773 }, { "content": "use crate::crc::CrcAccum;\n\nuse crate::rawpacket::{RawPacketParser, RawParseResult};\n\nuse crate::traits::PacketBuffer;\n\n\n\npub const FRAME_TYPE_MASK: u8 = 0xc0;\n\npub const SEQ_MASK: u8 = 0x3f;\n\n\n\n// FrameType makes up the top 2 bits of the 8-it sequence number.\n\nc_like_enum! {\n\n FrameType {\n\n USR = 0x00,\n\n RTX = 0x40,\n\n NAK = 0x80,\n\n SYN = 0xc0,\n\n }\n\n}\n\n\n\n// When the FrameType is Syn, then the following enumeration populates\n\n// the sequence number (lower 6 bits).\n\nc_like_enum! {\n", "file_path": "src/packet.rs", "rank": 45, "score": 9.610993647315048 }, { "content": "// So a packet will look like something like the following:\n\n// SOF HEADER ...data... CRC-LSB CRC-MSB SOF\n\npub struct RawPacketParser {\n\n header: u8,\n\n crc: Crc,\n\n escape_state: EscapeState,\n\n frame_state: FrameState,\n\n}\n\n\n\nimpl<'a> RawPacketParser {\n\n pub fn new() -> Self {\n\n RawPacketParser {\n\n header: 0,\n\n crc: Crc::new(),\n\n escape_state: EscapeState::Normal,\n\n frame_state: FrameState::New,\n\n }\n\n }\n\n\n\n pub fn dump(&self) {\n", "file_path": "src/rawpacket.rs", "rank": 46, "score": 9.147210887416701 }, { "content": " info!(\"----- Testing parsing results -----\");\n\n for test in tests.iter() {\n\n assert_eq!(parse_bytes(&mut parser, &test.0, &mut packet), test.1);\n\n }\n\n\n\n // Verify that escaping works\n\n\n\n // NOTE: packets with a header of 0xcc won't be run through the writer test since those tests\n\n // don't regenerate the input.\n\n let tests = &vec![\n\n // Plain unescaped packet\n\n (\n\n vec![SOF, 0xc0, 0x11, 0x5e, 0xe4, 0xfb, SOF],\n\n vec![0xc0, 0x11, 0x5e],\n\n ),\n\n // Packet with an escaped SOF\n\n (\n\n vec![SOF, 0xc0, 0x11, ESC, 0x5e, 0xe6, 0xda, SOF],\n\n vec![0xc0, 0x11, SOF],\n\n ),\n", "file_path": "src/rawpacket.rs", "rank": 47, "score": 9.114487568213995 }, { "content": " return PacketTypeResult::MoreDataNeeded;\n\n }\n\n }\n\n }\n\n RawParseResult::AbortedPacket => PacketTypeResult::AbortedPacket,\n\n RawParseResult::PacketTooSmall => PacketTypeResult::PacketTooSmall,\n\n RawParseResult::CrcError(rcvd_crc) => PacketTypeResult::CrcError(rcvd_crc),\n\n RawParseResult::MoreDataNeeded => PacketTypeResult::MoreDataNeeded,\n\n }\n\n }\n\n\n\n pub fn reset(&mut self) {\n\n self.raw_parser.reset();\n\n }\n\n}\n", "file_path": "src/packet.rs", "rank": 48, "score": 8.637009835977093 }, { "content": "\n\n RawParseResult::CrcError(rcvd_crc) => {\n\n let mut crc = Crc::new();\n\n crc.accum(parser.header());\n\n let expected_crc = !crc.accum_bytes(rx_packet.data());\n\n warn!(\n\n \"CRC Error: Rcvd {:04x} Expected {:04x}\",\n\n rcvd_crc, expected_crc\n\n );\n\n return RawParseResult::CrcError(rcvd_crc);\n\n }\n\n\n\n _ => {\n\n info!(\"{:?}\", parse_result);\n\n return parse_result;\n\n }\n\n }\n\n }\n\n info!(\"MoreDataNeeded\");\n\n RawParseResult::MoreDataNeeded\n\n}\n\n\n", "file_path": "src/testutils.rs", "rank": 49, "score": 8.431890997689539 }, { "content": " vec![0xcc, 0x11, 0x5e],\n\n ),\n\n ];\n\n\n\n info!(\"----- Testing parser -----\");\n\n // Test that the packet parser produces the correct results\n\n for test in tests.iter() {\n\n assert_eq!(\n\n parse_bytes_as_packet(&mut parser, &test.0, &mut packet),\n\n test.1\n\n );\n\n }\n\n\n\n info!(\"----- Testing writer -----\");\n\n // Flip things around and verify that given the raw packet, we get the written stream.\n\n for test in tests.iter() {\n\n let header = test.1[0];\n\n if header != 0xcc {\n\n let data = &test.1[1..test.1.len()];\n\n assert_eq!(write_packet(header, data), test.0);\n", "file_path": "src/rawpacket.rs", "rank": 50, "score": 8.398844294082599 }, { "content": " pub fn msb(&self) -> u8 {\n\n ((!self.val >> 8) & 0x00ff) as u8\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn test0() {\n\n use crate::crc::Crc;\n\n let mut crc = Crc::new();\n\n crc.accum(0xc0);\n\n assert_eq!(!crc.val, 0x3674);\n\n assert_eq!(crc.accum_crc(), crate::crc::CRC_GOOD);\n\n }\n\n #[test]\n\n fn test1() {\n\n use crate::crc::Crc;\n\n let mut crc = Crc::new();\n\n crc.accum(0xc0);\n", "file_path": "src/crc.rs", "rank": 51, "score": 8.099161951137146 }, { "content": "//use log::info;\n\n\n\npub type CrcAccum = u16;\n\n\n\nconst CRC_INIT: CrcAccum = 0xffff;\n\npub const CRC_GOOD: CrcAccum = 0xf0b8;\n\n\n\n#[derive(Debug)]\n\npub struct Crc {\n\n val: CrcAccum,\n\n}\n\n\n\nimpl Default for Crc {\n\n fn default() -> Self {\n\n Self { val: CRC_INIT }\n\n }\n\n}\n\n\n\nimpl Crc {\n\n pub fn new() -> Self {\n", "file_path": "src/crc.rs", "rank": 52, "score": 7.816282163036169 }, { "content": " let crc = !self.val;\n\n self.accum((crc & 0xff) as u8);\n\n self.accum(((crc >> 8) & 0xff) as u8);\n\n\n\n self.val\n\n }\n\n\n\n pub fn reset(&mut self) {\n\n //info!(\"CRC reset\");\n\n *self = Default::default();\n\n }\n\n\n\n pub fn crc(&self) -> CrcAccum {\n\n self.val\n\n }\n\n\n\n pub fn lsb(&self) -> u8 {\n\n (!self.val & 0x00ff) as u8\n\n }\n\n\n", "file_path": "src/crc.rs", "rank": 53, "score": 7.789667057672094 }, { "content": " storage.tx_writer().start_write(); // Clears the outout buffer.\n\n for byte in bytes.iter() {\n\n let parse_result = self.parse_byte(*byte, storage);\n\n match parse_result {\n\n ParseResult::UserPacket => {\n\n return ParseResult::UserPacket;\n\n }\n\n\n\n ParseResult::MoreDataNeeded => {\n\n continue;\n\n }\n\n\n\n ParseResult::AbortedPacket => {\n\n return ParseResult::AbortedPacket;\n\n }\n\n\n\n ParseResult::PacketTooSmall => {\n\n return ParseResult::PacketTooSmall;\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 54, "score": 7.781381408649626 }, { "content": " RawParseResult::RawPacketReceived(0xc0),\n\n ),\n\n (\n\n vec![SOF, 0x00, 0x78, 0xf0, SOF],\n\n RawParseResult::RawPacketReceived(0x00),\n\n ),\n\n (\n\n vec![SOF, 0xc0, 0xee, 0x9d, 0xcb, SOF],\n\n RawParseResult::RawPacketReceived(0xc0),\n\n ),\n\n (\n\n vec![SOF, 0xc0, 0x11, 0x22, 0x33, 0x44, 0x73, 0x75, SOF],\n\n RawParseResult::RawPacketReceived(0xc0),\n\n ),\n\n (\n\n vec![SOF, 0xc0, 0x11, ESC, SOF],\n\n RawParseResult::AbortedPacket,\n\n ),\n\n ];\n\n\n", "file_path": "src/rawpacket.rs", "rank": 55, "score": 7.681032025404935 }, { "content": " }\n\n PacketType::Disconnect => {\n\n self.handle_frame_disconnect();\n\n }\n\n }\n\n ParseResult::MoreDataNeeded\n\n }\n\n\n\n fn handle_frame_usr_rtx(\n\n &mut self,\n\n frame_type: FrameType,\n\n seq: u8,\n\n writer: &mut dyn PacketWriter,\n\n ) -> ParseResult {\n\n match self.connect_state {\n\n ConnectState::Disconnected => {\n\n self.transmit_dis(writer);\n\n }\n\n ConnectState::SentSyn0 => {\n\n self.transmit_syn0(writer);\n", "file_path": "src/lib.rs", "rank": 56, "score": 7.664609031465361 }, { "content": " ParseResult::CrcError(rcvd_crc) => {\n\n return ParseResult::CrcError(rcvd_crc);\n\n }\n\n }\n\n }\n\n ParseResult::MoreDataNeeded\n\n }\n\n }\n\n\n\n #[test]\n\n fn test() {\n\n setup_log();\n\n\n\n info!(\"Running test_connect\");\n\n\n\n let mut storage1 = TestStorage::new();\n\n let mut storage2 = TestStorage::new();\n\n\n\n let mut ep1 = EndPoint::new();\n\n let mut ep2 = EndPoint::new();\n", "file_path": "src/lib.rs", "rank": 57, "score": 6.976216984785882 }, { "content": "use core::cmp::{max, min};\n\nuse log::{error, info, warn};\n\nuse pretty_hex::*;\n\nuse simple_logger;\n\nuse std::sync::Once;\n\nuse std::vec::Vec;\n\n\n\nuse super::crc::Crc;\n\nuse super::rawpacket::{RawPacketParser, RawParseResult};\n\nuse super::traits::{PacketBuffer, PacketQueue, PacketWriter, Storage};\n\n\n\nstatic INIT: Once = Once::new();\n\n\n", "file_path": "src/testutils.rs", "rank": 58, "score": 6.745777680907228 }, { "content": " }\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_packet_encode_decode() {\n\n setup_log();\n\n let mut parser = RawPacketParser::new();\n\n\n\n // Take each of the folloing \"user packets\", write them out, and then\n\n // reparse to make sure that we get the original packets back.\n\n\n\n let tests = &vec![\n\n vec![0xc0],\n\n vec![0xc0, 0x11],\n\n vec![0xc0, 0x11, 0x22],\n\n vec![SOF],\n\n vec![SOF, SOF],\n\n vec![SOF, SOF, SOF],\n\n vec![ESC],\n", "file_path": "src/rawpacket.rs", "rank": 59, "score": 6.523625565424613 }, { "content": " // Packet with an escaped ESC and a second time because the CRC happens to have the ESC character\n\n (\n\n vec![SOF, 0xc0, 0x11, ESC, 0x5d, ESC, 0x5d, 0xe8, SOF],\n\n vec![0xc0, 0x11, ESC],\n\n ),\n\n // Packet with an escaped space\n\n (\n\n vec![SOF, 0xcc, 0x11, ESC, 0x00, 0xbe, 0xc4, SOF],\n\n vec![0xcc, 0x11, 0x20],\n\n ),\n\n // Make sure double SOF is ignored\n\n (\n\n vec![SOF, SOF, 0xcc, 0x11, 0x5e, 0x47, 0x5e, SOF],\n\n vec![0xcc, 0x11, 0x5e],\n\n ),\n\n // Make sure that 2 packets with just a single SOF between then is parsed\n\n // properly. For this we just leave off the leading SOF since the trailing SOF\n\n // from the previous packet should be sufficient.\n\n (\n\n vec![0xcc, 0x11, 0x5e, 0x47, 0x5e, SOF],\n", "file_path": "src/rawpacket.rs", "rank": 60, "score": 6.479228771783912 }, { "content": " rx_seq: SEQ_INIT,\n\n tx_seq: SEQ_INIT,\n\n }\n\n }\n\n\n\n fn reset(&mut self, storage: &mut dyn Storage) {\n\n self.connect_state = ConnectState::Disconnected;\n\n self.rx_seq = SEQ_INIT;\n\n self.tx_seq = SEQ_INIT;\n\n self.clear_history(storage);\n\n }\n\n\n\n fn next_frame_seq(&self, seq: u8) -> u8 {\n\n return (seq + 1) & SEQ_MASK;\n\n }\n\n\n\n pub fn handle_packet(\n\n &mut self,\n\n packet_type: PacketType,\n\n storage: &mut dyn Storage,\n", "file_path": "src/lib.rs", "rank": 61, "score": 6.316825568755368 }, { "content": " self.transmit_control_packet(FrameType::SYN, SeqSyn::DIS as u8, writer);\n\n }\n\n\n\n fn transmit_syn0(&mut self, writer: &mut dyn PacketWriter) {\n\n self.transmit_control_packet(FrameType::SYN, SeqSyn::SYN0 as u8, writer);\n\n }\n\n\n\n fn transmit_syn1(&mut self, writer: &mut dyn PacketWriter) {\n\n self.transmit_control_packet(FrameType::SYN, SeqSyn::SYN1 as u8, writer);\n\n }\n\n\n\n fn transmit_syn2(&mut self, writer: &mut dyn PacketWriter) {\n\n self.transmit_control_packet(FrameType::SYN, SeqSyn::SYN2 as u8, writer);\n\n }\n\n\n\n fn transmit_control_packet(\n\n &mut self,\n\n frame_type: FrameType,\n\n seq: u8,\n\n writer: &mut dyn PacketWriter,\n", "file_path": "src/lib.rs", "rank": 62, "score": 5.424738443029449 }, { "content": " }\n\n }\n\n\n\n fn handle_frame_disconnect(&mut self) {\n\n self.connect_state = ConnectState::Disconnected;\n\n }\n\n\n\n fn clear_history(&mut self, storage: &mut dyn Storage) {\n\n storage.tx_queue().clear();\n\n }\n\n\n\n fn transmit_history_from_seq(&mut self, _seq: u8, _storage: &mut dyn Storage) {\n\n // TODO\n\n }\n\n\n\n fn transmit_nak(&mut self, seq: u8, writer: &mut dyn PacketWriter) {\n\n self.transmit_control_packet(FrameType::NAK, seq, writer);\n\n }\n\n\n\n fn transmit_dis(&mut self, writer: &mut dyn PacketWriter) {\n", "file_path": "src/lib.rs", "rank": 63, "score": 5.12669451933407 }, { "content": " SeqSyn {\n\n SYN0 = 0,\n\n SYN1 = 1,\n\n SYN2 = 2,\n\n DIS = 3,\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum PacketType {\n\n USR { seq: u8 },\n\n RTX { seq: u8 },\n\n NAK { seq: u8 },\n\n Syn0,\n\n Syn1,\n\n Syn2,\n\n Disconnect,\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/packet.rs", "rank": 64, "score": 4.981598844681407 }, { "content": "#[macro_export]\n\nmacro_rules! c_like_enum {\n\n ( $name: ident { $($variant: ident = $value: expr,)* } ) => {\n\n #[derive(Debug, Clone, Copy, PartialEq)]\n\n pub enum $name {\n\n $($variant = $value,)+\n\n }\n\n\n\n impl $name {\n\n pub fn from_u8(value: u8) -> Option<$name> {\n\n match value {\n\n $($value => Some($name::$variant),)+\n\n _ => None\n\n }\n\n }\n\n }\n\n };\n\n}\n", "file_path": "src/macros.rs", "rank": 65, "score": 4.907566225791556 }, { "content": " assert_eq!(storage2.rx_data(), \"Testing\".as_bytes());\n\n assert_eq!(storage2.tx_vec(), vec![]);\n\n\n\n // Make sure that the user packet is in the history buffer\n\n\n\n let last_pkt = storage1.tx_queue().get(0).unwrap();\n\n assert_eq!(last_pkt.data(), \"Testing\".as_bytes());\n\n\n\n //info!(\"packet1to2: {:?}\", packet1to2.dump());\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 66, "score": 4.893562217042609 }, { "content": "pub struct TestStorage {\n\n rx_buf: TestPacketBuffer,\n\n tx_buf: TestPacketBuffer,\n\n tx_queue: TestPacketQueue,\n\n}\n\nimpl Storage for TestStorage {\n\n /// Returns a reference to Rx PacketBuffer\n\n fn rx_buf(&mut self) -> &mut dyn PacketBuffer {\n\n &mut self.rx_buf\n\n }\n\n\n\n /// Returns a reference to the PacketWriter\n\n fn tx_writer(&mut self) -> &mut dyn PacketWriter {\n\n &mut self.tx_buf\n\n }\n\n\n\n /// Returns a reference to the PacketQueue\n\n fn tx_queue(&mut self) -> &mut dyn PacketQueue {\n\n &mut self.tx_queue\n\n }\n", "file_path": "src/testutils.rs", "rank": 67, "score": 4.356003480202485 }, { "content": " }\n\n\n\n fn handle_frame_nak(&mut self, _seq: u8, _writer: &mut dyn Storage) {\n\n //TODO\n\n }\n\n\n\n fn handle_frame_syn0(&mut self, storage: &mut dyn Storage) {\n\n self.rx_seq = SEQ_INIT;\n\n self.tx_seq = SEQ_INIT;\n\n self.clear_history(storage);\n\n self.connect_state = ConnectState::SentSyn1;\n\n self.transmit_syn1(storage.tx_writer());\n\n }\n\n\n\n fn handle_frame_syn1(&mut self, storage: &mut dyn Storage) {\n\n if self.connect_state == ConnectState::Disconnected {\n\n self.transmit_dis(storage.tx_writer());\n\n return;\n\n }\n\n self.connect_state = ConnectState::Connected;\n", "file_path": "src/lib.rs", "rank": 68, "score": 4.092829318270251 }, { "content": " FrameType::USR => {\n\n return PacketTypeResult::PacketReceived(PacketType::USR { seq });\n\n }\n\n FrameType::RTX => {\n\n return PacketTypeResult::PacketReceived(PacketType::RTX { seq });\n\n }\n\n FrameType::NAK => {\n\n return PacketTypeResult::PacketReceived(PacketType::NAK { seq });\n\n }\n\n FrameType::SYN => {\n\n if let Some(seq_syn) = SeqSyn::from_u8(seq) {\n\n return match seq_syn {\n\n SeqSyn::SYN0 => PacketTypeResult::PacketReceived(PacketType::Syn0),\n\n SeqSyn::SYN1 => PacketTypeResult::PacketReceived(PacketType::Syn1),\n\n SeqSyn::SYN2 => PacketTypeResult::PacketReceived(PacketType::Syn2),\n\n SeqSyn::DIS => {\n\n PacketTypeResult::PacketReceived(PacketType::Disconnect)\n\n }\n\n };\n\n }\n", "file_path": "src/packet.rs", "rank": 69, "score": 3.922750209171509 }, { "content": " }\n\n ConnectState::SentSyn1 => {\n\n self.transmit_syn1(writer);\n\n }\n\n ConnectState::Connected => {\n\n if seq != self.rx_seq {\n\n if frame_type == FrameType::USR {\n\n warn!(\"Out of order frame received - sending NAK\");\n\n self.transmit_nak(self.rx_seq, writer);\n\n } else {\n\n warn!(\"Out of order retransmitted frame frame received - ignoring\");\n\n }\n\n } else {\n\n // Good user frame received and accepted. Deliver it.\n\n self.rx_seq = self.next_frame_seq(self.rx_seq);\n\n return ParseResult::UserPacket;\n\n }\n\n }\n\n }\n\n ParseResult::MoreDataNeeded\n", "file_path": "src/lib.rs", "rank": 70, "score": 3.5227549911729423 }, { "content": "use std::io;\n\nuse std::io::{Read, Write};\n\nuse std::net::{TcpListener, TcpStream};\n\nuse std::time::Duration;\n\n\n\nuse pretty_hex::*;\n\n\n\nuse structopt::StructOpt;\n\n\n\n#[derive(StructOpt, Debug)]\n\n#[structopt(name = \"server\")]\n", "file_path": "examples/server.rs", "rank": 71, "score": 3.460586441268473 }, { "content": " use super::*;\n\n use log::{error, info, warn};\n\n use simple_logger;\n\n use std::sync::Once;\n\n use std::vec::Vec;\n\n\n\n static INIT: Once = Once::new();\n\n\n\n fn setup() {\n\n INIT.call_once(|| {\n\n simple_logger::init().unwrap();\n\n });\n\n }\n\n}\n", "file_path": "examples/client.rs", "rank": 72, "score": 3.438905881052581 }, { "content": "use std::io;\n\nuse std::io::{Read, Write};\n\nuse std::net::{Shutdown, TcpStream};\n\n\n\nuse pretty_hex::*;\n\n\n\nuse structopt::StructOpt;\n\n\n\n#[derive(StructOpt, Debug)]\n\n#[structopt(name = \"server\")]\n", "file_path": "examples/client.rs", "rank": 73, "score": 3.4245267033459172 }, { "content": " crc.accum(0x11);\n\n crc.accum(0x22);\n\n crc.accum(0x33);\n\n assert_eq!(!crc.val, 0x0bd5);\n\n assert_eq!(crc.accum_crc(), crate::crc::CRC_GOOD);\n\n }\n\n #[test]\n\n fn test2() {\n\n use crate::crc::Crc;\n\n let mut crc = Crc::new();\n\n crc.accum(0x7d);\n\n assert_eq!(!crc.val, 0x581a);\n\n assert_eq!(crc.accum_crc(), crate::crc::CRC_GOOD);\n\n }\n\n}\n", "file_path": "src/crc.rs", "rank": 74, "score": 3.03299943254721 }, { "content": "use generic_array::ArrayLength;\n\n\n", "file_path": "src/driver.rs", "rank": 75, "score": 2.884144429251701 }, { "content": "# serial-framing-protocol-rs\n\nAn HDLC like framing protocol suitable for serial transmission, implemented in rust.\n\n\n\nThis crate was inspired by https://github.com/BaroboRobotics/libsfp/wiki/Serial-Framing-Protocol\n\n\n", "file_path": "README.md", "rank": 76, "score": 1.9548150725255895 }, { "content": " debug!(\"Connected (after SYN1)\");\n\n self.transmit_syn2(storage.tx_writer());\n\n if self.tx_seq != SEQ_INIT {\n\n self.transmit_history_from_seq(SEQ_INIT, storage);\n\n }\n\n }\n\n\n\n fn handle_frame_syn2(&mut self, storage: &mut dyn Storage) {\n\n if self.connect_state == ConnectState::Disconnected {\n\n self.transmit_dis(storage.tx_writer());\n\n return;\n\n }\n\n if self.connect_state == ConnectState::SentSyn0 {\n\n self.transmit_syn0(storage.tx_writer());\n\n return;\n\n }\n\n self.connect_state = ConnectState::Connected;\n\n debug!(\"Connected (after SYN2)\");\n\n if self.tx_seq != SEQ_INIT {\n\n self.transmit_history_from_seq(SEQ_INIT, storage);\n", "file_path": "src/lib.rs", "rank": 77, "score": 1.8506600310220256 }, { "content": " ) -> ParseResult {\n\n debug!(\"Received {:?}\", packet_type);\n\n match packet_type {\n\n PacketType::USR { seq } => {\n\n return self.handle_frame_usr_rtx(FrameType::USR, seq, storage.tx_writer());\n\n }\n\n PacketType::RTX { seq } => {\n\n return self.handle_frame_usr_rtx(FrameType::RTX, seq, storage.tx_writer());\n\n }\n\n PacketType::NAK { seq } => {\n\n self.handle_frame_nak(seq, storage);\n\n }\n\n PacketType::Syn0 => {\n\n self.handle_frame_syn0(storage);\n\n }\n\n PacketType::Syn1 => {\n\n self.handle_frame_syn1(storage);\n\n }\n\n PacketType::Syn2 => {\n\n self.handle_frame_syn2(storage);\n", "file_path": "src/lib.rs", "rank": 78, "score": 1.2904150185159113 } ]
Rust
src/tokenizer/words.rs
naughie/kytea-tokenizer
8dd76fae807c2bd0d9f773cef2089b13b3ef7570
use super::Word; use crate::kytea::{DELIM, ESCAPE}; use std::num::NonZeroUsize; #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub struct Words<'a> { inner: &'a str, } impl<'a> Iterator for Words<'a> { type Item = Word<'a>; fn next(&mut self) -> Option<Self::Item> { let pos = self.find_sow(); self.inner = &self.inner[pos..]; let pos = self.find_eow()?.get(); let inner = &self.inner[..pos]; self.inner = &self.inner[pos..]; Some(Word { inner }) } } impl<'a> DoubleEndedIterator for Words<'a> { fn next_back(&mut self) -> Option<Self::Item> { let pos = self.rfind_eow()?.get(); self.inner = &self.inner[..pos]; let pos = self.rfind_sow(); let inner = &self.inner[pos..]; self.inner = &self.inner[..pos]; Some(Word { inner }) } } impl<'a> From<&'a str> for Words<'a> { fn from(inner: &'a str) -> Self { Self { inner } } } impl Words<'_> { #[inline] fn len(self) -> usize { self.inner.len() } #[inline] fn is_empty(self) -> bool { self.inner.is_empty() } fn enumerate(&self) -> impl DoubleEndedIterator<Item = (usize, u8)> + '_ { self.inner.as_bytes().iter().copied().enumerate() } #[inline] fn renumerate(&self) -> impl Iterator<Item = (usize, u8)> + '_ { self.enumerate().rev() } fn find_sow(&self) -> usize { let mut it = self.enumerate().skip_while(|&(_, c)| c == DELIM); if let Some((i, _)) = it.next() { i } else { self.len() } } fn find_eow(&self) -> Option<NonZeroUsize> { if self.is_empty() { return None; } let mut prev_char = 0u8; for (i, c) in self.enumerate() { if c == DELIM && prev_char != ESCAPE { return unsafe { Some(NonZeroUsize::new_unchecked(i)) }; } prev_char = if c == ESCAPE && prev_char == ESCAPE { 0 } else { c }; } unsafe { Some(NonZeroUsize::new_unchecked(self.inner.len())) } } #[inline] fn iso_parity(i: usize, j: usize) -> bool { (i & 1) == (j & 1) } fn rfind_eow(&self) -> Option<NonZeroUsize> { let mut it = self.renumerate().skip_while(|&(_, c)| c == DELIM); if let Some((i, c)) = it.next() { if c == ESCAPE { let last = match it.filter(|&(_, c)| c == ESCAPE).last() { Some((j, _)) if !Self::iso_parity(i, j) => i + 1, _ => i + 2, }; unsafe { Some(NonZeroUsize::new_unchecked(last)) } } else { unsafe { Some(NonZeroUsize::new_unchecked(i + 1)) } } } else { None } } fn rfind_sow(&self) -> usize { let mut delim = 0; let mut delim_found = false; let mut broken = false; for (i, c) in self.renumerate() { if delim_found && c != ESCAPE { if Self::iso_parity(i, delim) { delim_found = false; } else { broken = true; break; } } if c == DELIM { delim_found = true; delim = i; } } if delim_found && (broken || Self::iso_parity(delim, 0)) { delim + 1 } else { 0 } } } #[cfg(test)] mod test { use super::*; #[test] fn test_words() { let s = ""; let mut words = Words::from(s); assert_eq!(words.next(), None); let s = "吾輩/名詞\tは/助詞\t\t猫/名詞\t /補助記号"; let mut words = Words::from(s); assert_eq!(words.next(), Some(Word::from("吾輩/名詞"))); assert_eq!(words.next(), Some(Word::from("は/助詞"))); assert_eq!(words.next(), Some(Word::from("猫/名詞"))); assert_eq!(words.next(), Some(Word::from(" /補助記号"))); assert_eq!(words.next(), None); let s = "ab\t\\\t/補助記号\t\\/\\\t\t\\\\\\\t/\\\\\t"; let mut words = Words::from(s); assert_eq!(words.next(), Some(Word::from("ab"))); assert_eq!(words.next(), Some(Word::from("\\\t/補助記号"))); assert_eq!(words.next(), Some(Word::from("\\/\\\t"))); assert_eq!(words.next(), Some(Word::from("\\\\\\\t/\\\\"))); assert_eq!(words.next(), None); } #[test] fn test_words_rev() { let s = ""; let mut words = Words::from(s).rev(); assert_eq!(words.next(), None); let s = "吾輩/名詞\tは/助詞\t\t猫/名詞\t /補助記号"; let mut words = Words::from(s).rev(); assert_eq!(words.next(), Some(Word::from(" /補助記号"))); assert_eq!(words.next(), Some(Word::from("猫/名詞"))); assert_eq!(words.next(), Some(Word::from("は/助詞"))); assert_eq!(words.next(), Some(Word::from("吾輩/名詞"))); assert_eq!(words.next(), None); let s = "\t\tab\t\\\t/補助記号\t\\/\\\t\t\\\\\\\t/\\\\\t"; let mut words = Words::from(s).rev(); assert_eq!(words.next(), Some(Word::from("\\\\\\\t/\\\\"))); assert_eq!(words.next(), Some(Word::from("\\/\\\t"))); assert_eq!(words.next(), Some(Word::from("\\\t/補助記号"))); assert_eq!(words.next(), Some(Word::from("ab"))); assert_eq!(words.next(), None); let s = "\ta"; let mut words = Words::from(s).rev(); assert_eq!(words.next(), Some(Word::from("a"))); assert_eq!(words.next(), None); let s = "\\\ta"; let mut words = Words::from(s).rev(); assert_eq!(words.next(), Some(Word::from("\\\ta"))); assert_eq!(words.next(), None); let s = "\\\\\ta"; let mut words = Words::from(s).rev(); assert_eq!(words.next(), Some(Word::from("a"))); assert_eq!(words.next(), Some(Word::from("\\\\"))); assert_eq!(words.next(), None); } #[test] fn test_words_mixed() { let s = "吾輩/名詞\tは/助詞\t\t猫/名詞\t /補助記号"; let mut words = Words::from(s); assert_eq!(words.next(), Some(Word::from("吾輩/名詞"))); assert_eq!(words.next_back(), Some(Word::from(" /補助記号"))); assert_eq!(words.next(), Some(Word::from("は/助詞"))); assert_eq!(words.next_back(), Some(Word::from("猫/名詞"))); assert_eq!(words.next(), None); assert_eq!(words.next_back(), None); } }
use super::Word; use crate::kytea::{DELIM, ESCAPE}; use std::num::NonZeroUsize; #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub struct Words<'a> { inner: &'a str, } impl<'a> Iterator for Words<'a> { type Item = Word<'a>; fn next(&mut self) -> Option<Self::Item> { let pos = self.find_sow(); self.inner = &self.inner[pos..]; let pos = self.find_eow()?.get(); let inner = &self.inner[..pos]; self.inner = &self.inner[pos..]; Some(Word { inner }) } } impl<'a> DoubleEndedIterator for Words<'a> { fn next_back(&mut self) -> Option<Self::Item> { let pos = self.rfind_eow()?.get(); self.inner = &self.inner[..pos]; let pos = self.rfind_sow(); let inner = &self.inner[pos..]; self.inner = &self.inner[..pos]; Some(Word { inner }) } } impl<'a> From<&'a str> for Words<'a> { fn from(inner: &'a str) -> Self { Self { inner } } } impl Words<'_> { #[inline] fn len(self) -> usize { self.inner.len() } #[inline] fn is_empty(self) -> bool { self.inner.is_empty() } fn enumerate(&self) -> impl DoubleEndedIterator<Item = (usize, u8)> + '_ { self.inner.as_bytes().iter().copied().enumerate() } #[inline] fn renumerate(&self) -> impl Iterator<Item = (usize, u8)> + '_ { self.enumerate().rev() } fn find_sow(&self) -> usize { let mut it = self.enumerate().skip_while(|&(_, c)| c == DELIM); if let Some((i, _)) = it.next() { i } else { self.len() } } fn find_eow(&self) -> Option<NonZeroUsize> { if self.is_empty() { return None; } let mut prev_char = 0u8; for (i, c) in self.enumerate() { if c == DELIM && prev_char != ESCAPE { return unsafe { Some(NonZeroUsize::new_unchecked(i)) }; } prev_char = if c == ESCAPE && prev_char == ESCAPE { 0 } else { c }; } unsafe { Some(NonZeroUsize::new_unchecked(self.inner.len())) } } #[inline] fn iso_parity(i: usize, j: usize) -> bool { (i & 1) == (j & 1) } fn rfind_eow(&self) -> Option<NonZeroUsize> { let mut it = self.renumerate().skip_while(|&(_, c)| c == DELIM); if let Some((i, c)) = it.next() { if c == ESCAPE { let last = match it.filter(|&(_, c)| c == ESCAPE).last() { Some((j, _)) if !Self::iso_parity(i, j) => i + 1, _ => i + 2, }; unsafe { Some(NonZeroUsize::new_unchecked(last)) } } else { unsafe { Some(NonZeroUsize::new_unchecked(i + 1)) } } } else { None } } fn rfind_sow(&self) -> usize { let mut delim = 0; let mut delim_found = false; let mut broken = false; for (i, c) in self.renumerate() { if delim_found && c != ESCAPE { if Self::iso_parity(i, delim) { delim_found = false; } else { broken = true; break; } } if c == DELIM { delim_found = true; delim = i; } } if delim_found && (broken || Self::iso_parity(delim, 0)) { delim + 1 } else { 0 } } } #[cfg(test)] mod test { use super::*; #[test] fn test_words() { let s = ""; let mut words = Words::from(s); assert_eq!(words.next(), None); let s = "吾輩/名詞\tは/助詞\t\t猫/名詞\t /補助記号"; let mut words = Words::from(s); assert_eq!(words.next(), Some(Word::from("吾輩/名詞"))); assert_eq!(words.next(), Some(Word::from("は/助詞"))); assert_eq!(words.next(), Some(Word::from("猫/名詞"))); assert_eq!(words.next(), Some(Word::from(" /補助記号"))); assert_eq!(words.next(), None); let s = "ab\t\\\t/補助記号\t\\/\\\t\t\\\\\\\t/\\\\\t"; let mut words = Words::from(s); assert_eq!(words.next(), Some(Word::from("ab"))); assert_eq!(words.next(), Some(Word::from("\\\t/補助記号"))); assert_eq!(words.next(), Some(Word::from("\\/\\\t"))); assert_eq!(words.next(), Some(Word::from("\\\\\\\t/\\\\"))); assert_eq!(words.next(), None); } #[test] fn test_words_rev() { let s = ""; let mut words = Words::from(s).rev(); assert_eq!(words.next(), None); let s = "吾輩/名詞\tは/助詞\t\t猫/名詞\t /補助記号"; let mut words = Words::from(s).rev(); assert_eq!(words.next(), Some(Word::from(" /補助
Words::from(s).rev(); assert_eq!(words.next(), Some(Word::from("a"))); assert_eq!(words.next(), Some(Word::from("\\\\"))); assert_eq!(words.next(), None); } #[test] fn test_words_mixed() { let s = "吾輩/名詞\tは/助詞\t\t猫/名詞\t /補助記号"; let mut words = Words::from(s); assert_eq!(words.next(), Some(Word::from("吾輩/名詞"))); assert_eq!(words.next_back(), Some(Word::from(" /補助記号"))); assert_eq!(words.next(), Some(Word::from("は/助詞"))); assert_eq!(words.next_back(), Some(Word::from("猫/名詞"))); assert_eq!(words.next(), None); assert_eq!(words.next_back(), None); } }
記号"))); assert_eq!(words.next(), Some(Word::from("猫/名詞"))); assert_eq!(words.next(), Some(Word::from("は/助詞"))); assert_eq!(words.next(), Some(Word::from("吾輩/名詞"))); assert_eq!(words.next(), None); let s = "\t\tab\t\\\t/補助記号\t\\/\\\t\t\\\\\\\t/\\\\\t"; let mut words = Words::from(s).rev(); assert_eq!(words.next(), Some(Word::from("\\\\\\\t/\\\\"))); assert_eq!(words.next(), Some(Word::from("\\/\\\t"))); assert_eq!(words.next(), Some(Word::from("\\\t/補助記号"))); assert_eq!(words.next(), Some(Word::from("ab"))); assert_eq!(words.next(), None); let s = "\ta"; let mut words = Words::from(s).rev(); assert_eq!(words.next(), Some(Word::from("a"))); assert_eq!(words.next(), None); let s = "\\\ta"; let mut words = Words::from(s).rev(); assert_eq!(words.next(), Some(Word::from("\\\ta"))); assert_eq!(words.next(), None); let s = "\\\\\ta"; let mut words =
random
[ { "content": "type WordsFrom<'a> = fn(&'a str) -> Words<'a>;\n", "file_path": "src/tokenizer/mod.rs", "rank": 0, "score": 100778.44098460334 }, { "content": "pub fn strip(out: impl AsRef<str>) -> String {\n\n let mut stripped = String::new();\n\n\n\n for line in out.as_ref().lines() {\n\n for word in Words::from(line) {\n\n word.pushed_to(&mut stripped);\n\n stripped.push(' ');\n\n }\n\n stripped.push('\\n');\n\n }\n\n\n\n stripped\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 1, "score": 79667.65401976765 }, { "content": "pub fn get_surface_and_pos(s: &str) -> (Surface<'_>, PoS) {\n\n Word::from(s).surface_and_pos()\n\n}\n", "file_path": "src/lib.rs", "rank": 2, "score": 74379.98931490032 }, { "content": "type FlattenWords<'a> = FlatMap<Lines<'a>, Words<'a>, WordsFrom<'a>>;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct WordIterator<'a> {\n\n word_it: FlattenWords<'a>,\n\n}\n\n\n\nimpl<'a> WordIterator<'a> {\n\n pub fn from_lines(lines: &'a str) -> Self {\n\n Self {\n\n word_it: lines.lines().flat_map(Words::from),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for WordIterator<'a> {\n\n type Item = (Surface<'a>, PoS);\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.word_it.next().map(Word::surface_and_pos)\n", "file_path": "src/tokenizer/mod.rs", "rank": 3, "score": 57396.312848143425 }, { "content": "pub fn run_cmd(\n\n in_path: impl AsRef<Path>,\n\n out_path: impl AsRef<Path>,\n\n model: Option<&str>,\n\n) -> Result<()> {\n\n kytea_command(model)\n\n .stdin(File::open(in_path)?)\n\n .stdout(File::create(out_path)?)\n\n .output()\n\n .map_err(Into::into)\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "src/kytea.rs", "rank": 4, "score": 48469.7052898821 }, { "content": "fn kytea_command(model: Option<&str>) -> Command {\n\n let mut comm = Command::new(\"kytea\");\n\n if let Some(model) = model {\n\n comm.args(&[\"-model\", model]);\n\n }\n\n comm.args(&[\"-wordbound\", DELIM_STR]);\n\n comm\n\n}\n", "file_path": "src/kytea.rs", "rank": 5, "score": 38916.94675284038 }, { "content": "mod surface;\n\npub use surface::Surface;\n\n\n\nmod word;\n\npub use word::Word;\n\n\n\nmod words;\n\npub use words::Words;\n\n\n\nuse crate::PoS;\n\n\n\nuse std::iter::FlatMap;\n\nuse std::str::Lines;\n\n\n", "file_path": "src/tokenizer/mod.rs", "rank": 6, "score": 18226.935359314113 }, { "content": " }\n\n}\n\n\n\nimpl<'a> DoubleEndedIterator for WordIterator<'a> {\n\n fn next_back(&mut self) -> Option<Self::Item> {\n\n self.word_it.next_back().map(Word::surface_and_pos)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"tantivy\")]\n\npub use tantivy_tokenizer::KyTea;\n\n\n\n#[cfg(feature = \"tantivy\")]\n\nmod tantivy_tokenizer {\n\n use tantivy::tokenizer::Token;\n\n use tantivy::tokenizer::Tokenizer;\n\n use tantivy::tokenizer::{BoxTokenStream, TokenStream};\n\n\n\n use super::{PoS, Surface, WordIterator};\n\n\n", "file_path": "src/tokenizer/mod.rs", "rank": 7, "score": 18226.3652541664 }, { "content": " use std::iter::Enumerate;\n\n\n\n #[derive(Debug, Clone)]\n\n pub struct KyTea;\n\n\n\n impl Tokenizer for KyTea {\n\n fn token_stream<'a>(&self, text: &'a str) -> BoxTokenStream<'a> {\n\n KyTeaStream::from(text).into()\n\n }\n\n }\n\n\n\n struct KyTeaStream<'a> {\n\n original: &'a str,\n\n word_it: Enumerate<WordIterator<'a>>,\n\n token: Token,\n\n }\n\n\n\n impl<'a> KyTeaStream<'a> {\n\n fn from(text: &'a str) -> Self {\n\n KyTeaStream {\n", "file_path": "src/tokenizer/mod.rs", "rank": 8, "score": 18224.119845420613 }, { "content": "\n\n fn token_mut(&mut self) -> &mut Token {\n\n &mut self.token\n\n }\n\n }\n\n\n\n fn to_token<'a>(\n\n original: &'a str,\n\n Surface(surface): Surface<'a>,\n\n pos: PoS,\n\n position: usize,\n\n ) -> Token {\n\n // SAFETY: `original` and `surface` are both parts of the same text, i.e. the `original`.\n\n let offset_from = unsafe { surface.as_ptr().offset_from(original.as_ptr()) } as usize;\n\n let offset_to = offset_from + surface.len();\n\n let text = if pos != PoS::None {\n\n let mut word = format!(\"{}/\", surface);\n\n word.push_str(pos.into());\n\n word\n\n } else {\n", "file_path": "src/tokenizer/mod.rs", "rank": 9, "score": 18223.93847718473 }, { "content": " ) {\n\n assert_eq!(res, Some((Surface(expected_surface), expected_pos)));\n\n }\n\n\n\n #[test]\n\n fn word_iterator() {\n\n let words = \"\\na/名詞\\tb/形容詞\\nc/d\\n\\ne/UNK\\n\";\n\n let mut it = WordIterator::from_lines(words);\n\n assert_surface_and_pos(it.next(), \"a\", PoS::名詞);\n\n assert_surface_and_pos(it.next(), \"b\", PoS::形容詞);\n\n assert_surface_and_pos(it.next(), \"c\", PoS::None);\n\n assert_surface_and_pos(it.next(), \"e\", PoS::UNK);\n\n assert!(it.next().is_none());\n\n }\n\n}\n", "file_path": "src/tokenizer/mod.rs", "rank": 10, "score": 18223.4422266526 }, { "content": " String::from(surface)\n\n };\n\n Token {\n\n offset_from,\n\n offset_to,\n\n position,\n\n text,\n\n position_length: 1,\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n fn assert_surface_and_pos(\n\n res: Option<(Surface<'_>, PoS)>,\n\n expected_surface: &str,\n\n expected_pos: PoS,\n", "file_path": "src/tokenizer/mod.rs", "rank": 11, "score": 18222.73873199471 }, { "content": " original: text,\n\n word_it: WordIterator::from_lines(text).enumerate(),\n\n token: Token::default(),\n\n }\n\n }\n\n }\n\n\n\n impl<'a> TokenStream for KyTeaStream<'a> {\n\n fn advance(&mut self) -> bool {\n\n if let Some((i, (surface, pos))) = self.word_it.next() {\n\n self.token = to_token(self.original, surface, pos, i);\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n fn token(&self) -> &Token {\n\n &self.token\n\n }\n", "file_path": "src/tokenizer/mod.rs", "rank": 12, "score": 18221.922634271832 }, { "content": "use crate::kytea::{DELIM, ESCAPE};\n\nuse crate::PoS;\n\n\n\nuse super::Surface;\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\npub struct Word<'a> {\n\n pub inner: &'a str,\n\n}\n\n\n\nimpl<'a> From<&'a str> for Word<'a> {\n\n fn from(inner: &'a str) -> Self {\n\n Self { inner }\n\n }\n\n}\n\n\n\nimpl<'a> Word<'a> {\n\n pub fn is_ascii_whitespace(self) -> bool {\n\n let inner = self.inner.as_bytes();\n\n !inner.is_empty() && inner[0] == b' ' || inner[0] == ESCAPE && inner[1] == DELIM\n", "file_path": "src/tokenizer/word.rs", "rank": 13, "score": 16586.932780254483 }, { "content": " let (first, eow) = self.find_end_of_surface_and_pos();\n\n let surface = Surface(&self.inner[..first]);\n\n let pos = if first == self.inner.len() {\n\n None\n\n } else {\n\n Some(&self.inner[(first + 1)..eow])\n\n };\n\n let pos = pos.and_then(|pos| pos.parse().ok());\n\n let pos = pos.unwrap_or_default();\n\n (surface, pos)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n fn assert_pushed_word_eq(word: &str, expected: &str) {\n\n let mut s = String::new();\n\n let word = Word::from(word);\n", "file_path": "src/tokenizer/word.rs", "rank": 25, "score": 16577.18397693365 }, { "content": " }\n\n\n\n pub fn pushed_to(self, s: &mut String) {\n\n if self.inner.is_empty() {\n\n return;\n\n }\n\n\n\n if !self.is_ascii_whitespace() {\n\n let (_, eow) = self.find_end_of_surface_and_pos();\n\n s.push_str(&self.inner[..eow]);\n\n }\n\n }\n\n\n\n pub fn find_next_slash(self, start: usize) -> usize {\n\n let mut prev_char = 0u8;\n\n for (i, &c) in self.inner[start..].as_bytes().iter().enumerate() {\n\n if c == b'/' && prev_char != ESCAPE {\n\n return start + i;\n\n }\n\n\n", "file_path": "src/tokenizer/word.rs", "rank": 26, "score": 16576.377606835063 }, { "content": "\n\n fn assert_word_eq(word: &str, expected_surface: &str, expected_pos: PoS) {\n\n let (surface, pos) = Word::from(word).surface_and_pos();\n\n assert_eq!(surface, expected_surface);\n\n assert_eq!(pos, expected_pos);\n\n }\n\n\n\n #[test]\n\n fn test_get_surface_and_pos() {\n\n assert_word_eq(\"\", \"\", PoS::None);\n\n assert_word_eq(\"吾輩\", \"吾輩\", PoS::None);\n\n assert_word_eq(\"吾輩/\", \"吾輩\", PoS::None);\n\n assert_word_eq(\"吾輩/名詞\", \"吾輩\", PoS::名詞);\n\n assert_word_eq(\"/名詞\", \"\", PoS::名詞);\n\n assert_word_eq(\"吾輩/名詞/\", \"吾輩\", PoS::名詞);\n\n assert_word_eq(\"吾輩/IllegalPoS\", \"吾輩\", PoS::None);\n\n }\n\n}\n", "file_path": "src/tokenizer/word.rs", "rank": 27, "score": 16575.925520062767 }, { "content": " prev_char = if c == ESCAPE && prev_char == ESCAPE {\n\n 0\n\n } else {\n\n c\n\n };\n\n }\n\n self.inner.len()\n\n }\n\n\n\n pub fn find_end_of_surface_and_pos(self) -> (usize, usize) {\n\n let first = self.find_next_slash(0);\n\n let eow = if first == self.inner.len() {\n\n first\n\n } else {\n\n self.find_next_slash(first + 1)\n\n };\n\n (first, eow)\n\n }\n\n\n\n pub fn surface_and_pos(self) -> (Surface<'a>, PoS) {\n", "file_path": "src/tokenizer/word.rs", "rank": 29, "score": 16572.924893709493 }, { "content": " word.pushed_to(&mut s);\n\n assert_eq!(s, expected);\n\n }\n\n\n\n #[test]\n\n fn test_word() {\n\n assert_pushed_word_eq(\"\", \"\");\n\n assert_pushed_word_eq(\" /補助記号\", \"\");\n\n assert_pushed_word_eq(\"\\\\\\t/補助記号\", \"\");\n\n\n\n assert_pushed_word_eq(\"吾輩/名詞\", \"吾輩/名詞\");\n\n assert_pushed_word_eq(\"吾輩/名詞/わがはい\", \"吾輩/名詞\");\n\n assert_pushed_word_eq(\"吾輩/\", \"吾輩/\");\n\n assert_pushed_word_eq(\"吾輩/名詞/\", \"吾輩/名詞\");\n\n assert_pushed_word_eq(\"/名詞/わがはい\", \"/名詞\");\n\n\n\n assert_pushed_word_eq(\"\\\\/吾輩/名詞/\", \"\\\\/吾輩/名詞\");\n\n assert_pushed_word_eq(\"吾\\\\\\\\/輩/名詞/\", \"吾\\\\\\\\/輩\");\n\n assert_pushed_word_eq(\"吾\\\\\\\\\\\\/輩/名詞/\", \"吾\\\\\\\\\\\\/輩/名詞\");\n\n }\n", "file_path": "src/tokenizer/word.rs", "rank": 30, "score": 16572.472932710392 }, { "content": " PosIterator::new()\n\n }\n\n\n\n pub fn repeat<F, T>(f: F) -> impl Iterator<Item = T>\n\n where\n\n F: Fn(Self) -> T,\n\n {\n\n PosIterator::new().map(f)\n\n }\n\n\n\n fn next_pos(self) -> Option<Self> {\n\n use self::PoS::*;\n\n\n\n match self {\n\n 名詞 => Some(動詞),\n\n 動詞 => Some(接尾辞),\n\n 接尾辞 => Some(形容詞),\n\n 形容詞 => Some(代名詞),\n\n 代名詞 => Some(副詞),\n\n 副詞 => Some(形状詞),\n", "file_path": "src/pos.rs", "rank": 31, "score": 17.82526395223778 }, { "content": "use super::Word;\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub struct Surface<'a>(pub &'a str);\n\n\n\nimpl<'a> Surface<'a> {\n\n pub fn is_ascii_whitespace(self) -> bool {\n\n Word::from(self.0).is_ascii_whitespace()\n\n }\n\n\n\n #[inline]\n\n pub const fn as_ptr(self) -> *const u8 {\n\n self.0.as_ptr()\n\n }\n\n\n\n #[inline]\n\n pub const fn as_bytes(self) -> &'a [u8] {\n\n self.0.as_bytes()\n\n }\n\n\n", "file_path": "src/tokenizer/surface.rs", "rank": 32, "score": 16.82432541436145 }, { "content": " type Item = PoS;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if let Some(pos) = self.current {\n\n self.current = pos.next_pos();\n\n Some(pos)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn pos_from_str() {\n\n assert_eq!(Ok(PoS::名詞), \"名詞\".parse());\n\n }\n", "file_path": "src/pos.rs", "rank": 33, "score": 15.372505210462261 }, { "content": " }\n\n\n\n #[inline]\n\n pub fn from_prim(n: u8) -> Self {\n\n use num_traits::FromPrimitive;\n\n <Self as FromPrimitive>::from_u8(n).unwrap_or(Self::None)\n\n }\n\n\n\n #[inline]\n\n pub fn to_prim(self) -> u8 {\n\n self as u8\n\n }\n\n\n\n pub fn append_to(self, surface: &mut String) {\n\n surface.push('/');\n\n surface.push_str(self.into());\n\n }\n\n\n\n #[inline]\n\n pub fn iter() -> PosIterator {\n", "file_path": "src/pos.rs", "rank": 34, "score": 14.735585794629053 }, { "content": " #[inline]\n\n pub const fn as_str(self) -> &'a str {\n\n self.0\n\n }\n\n}\n\n\n\nimpl<'a, 'b> PartialEq<&'b str> for Surface<'a> {\n\n #[inline]\n\n fn eq(&self, &other: &&'b str) -> bool {\n\n self.0 == other\n\n }\n\n}\n\n\n\nimpl<'a> From<Surface<'a>> for String {\n\n #[inline]\n\n fn from(Surface(s): Surface) -> Self {\n\n Self::from(s)\n\n }\n\n}\n\n\n", "file_path": "src/tokenizer/surface.rs", "rank": 35, "score": 13.863425961141605 }, { "content": " URL,\n\n 記号,\n\n 空白,\n\n 言いよどみ,\n\n 英単語,\n\n UNK,\n\n None,\n\n}\n\n\n\nimpl Default for PoS {\n\n #[inline]\n\n fn default() -> Self {\n\n Self::None\n\n }\n\n}\n\n\n\nimpl PoS {\n\n #[inline]\n\n pub const fn count() -> usize {\n\n <Self as EnumCount>::COUNT\n", "file_path": "src/pos.rs", "rank": 36, "score": 12.984020296886865 }, { "content": "pub struct PosIterator {\n\n current: Option<PoS>,\n\n}\n\n\n\nimpl Default for PosIterator {\n\n fn default() -> Self {\n\n Self {\n\n current: Some(PoS::名詞),\n\n }\n\n }\n\n}\n\n\n\nimpl PosIterator {\n\n #[inline]\n\n pub fn new() -> Self {\n\n Default::default()\n\n }\n\n}\n\n\n\nimpl Iterator for PosIterator {\n", "file_path": "src/pos.rs", "rank": 37, "score": 11.474223133183145 }, { "content": "use std::fs::File;\n\nuse std::io::Result;\n\nuse std::path::Path;\n\nuse std::process::Command;\n\n\n\npub(crate) const DELIM: u8 = b'\\t';\n\npub(crate) const DELIM_STR: &str = \"\\t\";\n\npub(crate) const ESCAPE: u8 = b'\\\\';\n\n\n", "file_path": "src/kytea.rs", "rank": 38, "score": 11.305591933983594 }, { "content": "//! `kytea-tokenizer` is a wrapper of KyTea, the japanese morphological analyzer.\n\n\n\npub(crate) mod kytea;\n\npub use kytea::run_cmd;\n\n\n\nmod pos;\n\npub use pos::PoS;\n\npub use pos::PosIterator;\n\n\n\nmod tokenizer;\n\npub use tokenizer::Surface;\n\npub use tokenizer::WordIterator;\n\n\n\n#[cfg(feature = \"tantivy\")]\n\npub use tokenizer::KyTea as KyTeaTokenizer;\n\n\n\nuse tokenizer::{Word, Words};\n\n\n", "file_path": "src/lib.rs", "rank": 39, "score": 10.195942626689405 }, { "content": "#[cfg(feature = \"json\")]\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse strum::{Display, EnumCount, EnumString, IntoStaticStr};\n\n\n\nuse num_derive::FromPrimitive;\n\n\n\n#[derive(\n\n Debug,\n\n PartialEq,\n\n Eq,\n\n Clone,\n\n Copy,\n\n PartialOrd,\n\n Ord,\n\n Hash,\n\n EnumString,\n\n Display,\n\n IntoStaticStr,\n\n EnumCount,\n", "file_path": "src/pos.rs", "rank": 40, "score": 7.4142321169116085 }, { "content": "impl AsRef<str> for Surface<'_> {\n\n #[inline]\n\n fn as_ref(&self) -> &str {\n\n self.0\n\n }\n\n}\n\n\n\nimpl std::ops::Deref for Surface<'_> {\n\n type Target = str;\n\n\n\n #[inline]\n\n fn deref(&self) -> &str {\n\n self.0\n\n }\n\n}\n", "file_path": "src/tokenizer/surface.rs", "rank": 41, "score": 6.926205948825741 }, { "content": "\n\n #[test]\n\n fn pos_to_string() {\n\n assert_eq!(PoS::名詞.to_string(), \"名詞\");\n\n }\n\n\n\n #[test]\n\n #[cfg(feature = \"json\")]\n\n fn pos_ser() {\n\n let pos = PoS::名詞;\n\n assert!(serde_json::to_string(&pos).is_ok());\n\n assert_eq!(serde_json::to_string(&pos).unwrap(), r#\"\"名詞\"\"#);\n\n }\n\n}\n", "file_path": "src/pos.rs", "rank": 42, "score": 5.369375665805452 }, { "content": " FromPrimitive,\n\n)]\n\n#[repr(u8)]\n\n#[cfg_attr(feature = \"json\", derive(Serialize, Deserialize))]\n\npub enum PoS {\n\n 名詞,\n\n 動詞,\n\n 接尾辞,\n\n 形容詞,\n\n 代名詞,\n\n 副詞,\n\n 形状詞,\n\n 連体詞,\n\n 接頭辞,\n\n 接続詞,\n\n 感動詞,\n\n 助詞,\n\n 補助記号,\n\n 語尾,\n\n 助動詞,\n", "file_path": "src/pos.rs", "rank": 43, "score": 5.261499230596365 }, { "content": " 形状詞 => Some(連体詞),\n\n 連体詞 => Some(接頭辞),\n\n 接頭辞 => Some(接続詞),\n\n 接続詞 => Some(感動詞),\n\n 感動詞 => Some(助詞),\n\n 助詞 => Some(補助記号),\n\n 補助記号 => Some(語尾),\n\n 語尾 => Some(助動詞),\n\n 助動詞 => Some(URL),\n\n URL => Some(記号),\n\n 記号 => Some(空白),\n\n 空白 => Some(言いよどみ),\n\n 言いよどみ => Some(英単語),\n\n 英単語 => Some(UNK),\n\n UNK => Some(None),\n\n None => Option::None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/pos.rs", "rank": 44, "score": 2.7153498156807085 } ]
Rust
src/auth.rs
FamiizCEO/modio-rs
101460ef3ba1c197025ba80cd01568defbc2d7e9
use std::fmt; use futures::Future as StdFuture; use url::form_urlencoded; use crate::Future; use crate::Modio; use crate::ModioMessage; #[derive(Clone, Debug, PartialEq)] pub enum Credentials { ApiKey(String), Token(String), } impl fmt::Display for Credentials { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Credentials::ApiKey(key) => f.write_str(&key), Credentials::Token(token) => f.write_str(&token), } } } pub enum Service { Steam(u64), Gog(u64), } pub struct Auth { modio: Modio, } #[derive(Deserialize)] struct AccessToken { access_token: String, } impl Auth { pub(crate) fn new(modio: Modio) -> Self { Self { modio } } pub fn request_code(&self, email: &str) -> Future<()> { apikey_required!(self.modio); let data = form_urlencoded::Serializer::new(String::new()) .append_pair("email", email) .finish(); Box::new( self.modio .post::<ModioMessage, _>("/oauth/emailrequest", data) .map(|_| ()), ) } pub fn security_code(&self, code: &str) -> Future<Credentials> { apikey_required!(self.modio); let data = form_urlencoded::Serializer::new(String::new()) .append_pair("security_code", code) .finish(); Box::new( self.modio .post::<AccessToken, _>("/oauth/emailexchange", data) .map(|token| Credentials::Token(token.access_token)), ) } pub fn link(&self, email: &str, service: Service) -> Future<()> { token_required!(self.modio); let (service, id) = match service { Service::Steam(id) => ("steam", id.to_string()), Service::Gog(id) => ("gog", id.to_string()), }; let data = form_urlencoded::Serializer::new(String::new()) .append_pair("email", email) .append_pair("service", service) .append_pair("service_id", &id) .finish(); Box::new( self.modio .post::<ModioMessage, _>("/external/link", data) .map(|_| ()), ) } pub fn gog_auth(&self, ticket: &str) -> Future<Credentials> { apikey_required!(self.modio); let data = form_urlencoded::Serializer::new(String::new()) .append_pair("appdata", ticket) .finish(); Box::new( self.modio .post::<AccessToken, _>("/external/galaxyauth", data) .map(|token| Credentials::Token(token.access_token)), ) } pub fn steam_auth(&self, ticket: &str) -> Future<Credentials> { apikey_required!(self.modio); let data = form_urlencoded::Serializer::new(String::new()) .append_pair("appdata", ticket) .finish(); Box::new( self.modio .post::<AccessToken, _>("/external/steamauth", data) .map(|token| Credentials::Token(token.access_token)), ) } }
use std::fmt; use futures::Future as StdFuture; use url::form_urlencoded; use crate::Future; use crate::Modio; use crate::ModioMessage; #[derive(Clone, Debug, PartialEq)] pub enum Credentials { ApiKey(String), Token(String), } impl fmt::Display for Credentials { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Credentials::ApiKey(key) => f.write_str(&key), Credentials::Token(token) => f.write_str(&token), } } } pub enum Service { Steam(u64), Gog(u64), } pub struct Auth { modio: Modio, } #[derive(Deserialize)] struct AccessToken { access_token: String, } impl Auth { pub(crate) fn new(modio: Modio) -> Self { Self { modio } } pub fn request_code(&self, email: &str) -> Future<()> { apikey_required!(self.modio); let data = form_urlencoded::Serializer::new(String::new()) .append_pair("email", email) .finish(); Box::new( self.modio .post::<ModioMessage, _>("/oauth/emailrequest", data) .map(|_| ()), ) }
pub fn link(&self, email: &str, service: Service) -> Future<()> { token_required!(self.modio); let (service, id) = match service { Service::Steam(id) => ("steam", id.to_string()), Service::Gog(id) => ("gog", id.to_string()), }; let data = form_urlencoded::Serializer::new(String::new()) .append_pair("email", email) .append_pair("service", service) .append_pair("service_id", &id) .finish(); Box::new( self.modio .post::<ModioMessage, _>("/external/link", data) .map(|_| ()), ) } pub fn gog_auth(&self, ticket: &str) -> Future<Credentials> { apikey_required!(self.modio); let data = form_urlencoded::Serializer::new(String::new()) .append_pair("appdata", ticket) .finish(); Box::new( self.modio .post::<AccessToken, _>("/external/galaxyauth", data) .map(|token| Credentials::Token(token.access_token)), ) } pub fn steam_auth(&self, ticket: &str) -> Future<Credentials> { apikey_required!(self.modio); let data = form_urlencoded::Serializer::new(String::new()) .append_pair("appdata", ticket) .finish(); Box::new( self.modio .post::<AccessToken, _>("/external/steamauth", data) .map(|token| Credentials::Token(token.access_token)), ) } }
pub fn security_code(&self, code: &str) -> Future<Credentials> { apikey_required!(self.modio); let data = form_urlencoded::Serializer::new(String::new()) .append_pair("security_code", code) .finish(); Box::new( self.modio .post::<AccessToken, _>("/oauth/emailexchange", data) .map(|token| Credentials::Token(token.access_token)), ) }
function_block-full_function
[ { "content": "fn prompt(prompt: &str) -> io::Result<String> {\n\n print!(\"{}\", prompt);\n\n io::stdout().flush()?;\n\n let mut buffer = String::new();\n\n io::stdin().read_line(&mut buffer)?;\n\n Ok(buffer.trim().to_string())\n\n}\n\n\n", "file_path": "examples/auth.rs", "rank": 0, "score": 127871.27771856409 }, { "content": "/// Create a custom sorting `Filter` in ascending order.\n\n///\n\n/// ```\n\n/// use modio::filter::{custom_filter, custom_order_by_asc, Operator};\n\n///\n\n/// let filter = custom_filter(\"foo\", Operator::Like, \"bar*\")\n\n/// .order_by(custom_order_by_asc(\"foo\"));\n\n/// ```\n\npub fn custom_order_by_asc<S: Into<String>>(name: S) -> Filter {\n\n Filter::new_order_by_asc(name)\n\n}\n\n\n", "file_path": "src/filter.rs", "rank": 2, "score": 89711.66449462132 }, { "content": "/// Create a custom sorting `Filter` in descending order.\n\n///\n\n/// ```\n\n/// use modio::filter::{custom_filter, custom_order_by_desc, Operator};\n\n///\n\n/// let filter = custom_filter(\"foo\", Operator::Like, \"bar*\")\n\n/// .order_by(custom_order_by_desc(\"foo\"));\n\n/// ```\n\npub fn custom_order_by_desc<S: Into<String>>(name: S) -> Filter {\n\n Filter::new_order_by_desc(name)\n\n}\n\n\n\n#[derive(Default)]\n\npub struct Filter {\n\n filters: BTreeSet<FilterEntry>,\n\n order_by: Option<Sorting>,\n\n limit: Option<usize>,\n\n offset: Option<usize>,\n\n}\n\n\n\nimpl Filter {\n\n pub(crate) fn new<S, T, V>(name: S, op: Operator, value: V) -> Filter\n\n where\n\n S: Into<String>,\n\n T: fmt::Display,\n\n V: Into<OneOrMany<T>>,\n\n {\n\n let mut filters = BTreeSet::new();\n", "file_path": "src/filter.rs", "rank": 3, "score": 89711.66449462132 }, { "content": "fn main() -> Result<(), Error> {\n\n dotenv::dotenv().ok();\n\n env_logger::init();\n\n\n\n let host = env::var(\"MODIO_HOST\").unwrap_or_else(|_| \"https://api.test.mod.io/v1\".to_string());\n\n\n\n let api_key = prompt(\"Enter api key: \").expect(\"read api key\");\n\n let email = prompt(\"Enter email: \").expect(\"read email\");\n\n\n\n let mut rt = Runtime::new().expect(\"new rt\");\n\n let modio = Modio::host(host, Credentials::ApiKey(api_key))?;\n\n\n\n rt.block_on(modio.auth().request_code(&email))?;\n\n\n\n let code = prompt(\"Enter security code: \").expect(\"read code\");\n\n let token = rt.block_on(modio.auth().security_code(&code))?;\n\n println!(\"Access token:\\n{}\", token);\n\n\n\n // Consume the endpoint and create an endpoint with new credentials.\n\n let modio = modio.with_credentials(token);\n\n\n\n let user = rt.block_on(modio.me().authenticated_user())?;\n\n println!(\"Authenticated user:\\n{:#?}\", user);\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/auth.rs", "rank": 4, "score": 69914.91885314578 }, { "content": "fn prompt(prompt: &str) -> io::Result<u32> {\n\n print!(\"{}\", prompt);\n\n io::stdout().flush()?;\n\n let mut buffer = String::new();\n\n io::stdin().read_line(&mut buffer)?;\n\n Ok(buffer.trim().parse().expect(\"Invalid value\"))\n\n}\n\n\n", "file_path": "examples/download.rs", "rank": 5, "score": 69439.26535987147 }, { "content": "pub trait QueryString: private::Sealed {\n\n fn to_query_string(&self) -> String;\n\n}\n", "file_path": "src/lib.rs", "rank": 6, "score": 62629.4503125224 }, { "content": "enum Sorting {\n\n Asc(String),\n\n Desc(String),\n\n}\n\n\n\nimpl fmt::Display for Sorting {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n use std::fmt::Write;\n\n\n\n match self {\n\n Sorting::Asc(field) => f.write_str(field),\n\n Sorting::Desc(field) => {\n\n f.write_char('-')?;\n\n f.write_str(field)\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// Filter operators of mod.io.\n", "file_path": "src/filter.rs", "rank": 7, "score": 52098.413002960624 }, { "content": "enum State {\n\n File(OpenFuture<PathBuf>),\n\n Read(FramedRead<Box<dyn AsyncRead + Send + Sync>, BytesCodec>),\n\n}\n\n\n\npub struct FileStream {\n\n state: Option<State>,\n\n}\n\n\n\nimpl FileStream {\n\n pub fn new<T: 'static + AsyncRead + Send + Sync>(inner: T) -> FileStream {\n\n let framed = FramedRead::new(\n\n Box::new(inner) as Box<dyn AsyncRead + Send + Sync>,\n\n BytesCodec::new(),\n\n );\n\n FileStream {\n\n state: Some(State::Read(framed)),\n\n }\n\n }\n\n\n", "file_path": "src/multipart.rs", "rank": 8, "score": 52098.413002960624 }, { "content": "struct Config {\n\n host: Option<String>,\n\n agent: Option<String>,\n\n credentials: Credentials,\n\n builder: Option<ClientBuilder>,\n\n proxies: Vec<Proxy>,\n\n #[cfg(feature = \"tls\")]\n\n tls: TlsBackend,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 9, "score": 52098.413002960624 }, { "content": "#[cfg(feature = \"tls\")]\n\nenum TlsBackend {\n\n #[cfg(feature = \"default-tls\")]\n\n Default,\n\n #[cfg(feature = \"rustls-tls\")]\n\n Rustls,\n\n}\n\n\n\n#[cfg(feature = \"tls\")]\n\nimpl Default for TlsBackend {\n\n fn default() -> TlsBackend {\n\n #[cfg(feature = \"default-tls\")]\n\n {\n\n TlsBackend::Default\n\n }\n\n #[cfg(all(feature = \"rustls-tls\", not(feature = \"default-tls\")))]\n\n {\n\n TlsBackend::Rustls\n\n }\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 10, "score": 50323.41726967519 }, { "content": "struct FilterEntry {\n\n name: String,\n\n op: Operator,\n\n value: OneOrMany<String>,\n\n}\n\n\n\nimpl FilterEntry {\n\n fn new(name: String, op: Operator, value: OneOrMany<String>) -> FilterEntry {\n\n FilterEntry { name, op, value }\n\n }\n\n}\n\n\n\n// impl PartialEq, Eq, PartialOrd, Ord for FilterEntry {{{\n\nimpl std::cmp::Eq for FilterEntry {}\n\n\n\nimpl PartialEq for FilterEntry {\n\n fn eq(&self, other: &FilterEntry) -> bool {\n\n match self.cmp(other) {\n\n std::cmp::Ordering::Equal => true,\n\n _ => false,\n", "file_path": "src/filter.rs", "rank": 11, "score": 50323.41726967519 }, { "content": "/// Create a custom `Filter`.\n\n///\n\n/// ```\n\n/// use modio::filter::{custom_filter, Operator};\n\n///\n\n/// let filter = custom_filter(\"foo\", Operator::Equals, \"bar\");\n\n/// ```\n\npub fn custom_filter<S, T, V>(name: S, op: Operator, value: V) -> Filter\n\nwhere\n\n S: Into<String>,\n\n T: fmt::Display,\n\n V: Into<OneOrMany<T>>,\n\n{\n\n Filter::new(name, op, value)\n\n}\n\n\n", "file_path": "src/filter.rs", "rank": 12, "score": 49291.45695642013 }, { "content": " pub trait Sealed {}\n\n\n\n impl Sealed for super::filter::Filter {}\n\n impl Sealed for super::files::EditFileOptions {}\n\n impl Sealed for super::games::AddTagsOptions {}\n\n impl Sealed for super::games::EditGameOptions {}\n\n impl Sealed for super::games::DeleteTagsOptions {}\n\n impl Sealed for super::mods::DeleteMediaOptions {}\n\n impl Sealed for super::mods::EditDependenciesOptions {}\n\n impl Sealed for super::mods::EditTagsOptions {}\n\n impl Sealed for super::mods::EditModOptions {}\n\n impl Sealed for super::mods::Rating {}\n\n impl Sealed for super::reports::Report {}\n\n impl Sealed for super::reports::Resource {}\n\n impl Sealed for super::teams::EditTeamMemberOptions {}\n\n impl Sealed for super::teams::InviteTeamMemberOptions {}\n\n impl Sealed for super::types::mods::MetadataMap {}\n\n impl Sealed for super::users::Resource {}\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 13, "score": 46068.414648446895 }, { "content": "fn current_timestamp() -> u64 {\n\n SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .unwrap()\n\n .as_secs()\n\n}\n\n\n", "file_path": "examples/events.rs", "rank": 14, "score": 44842.76197812995 }, { "content": "fn main() -> Result<(), Error> {\n\n dotenv::dotenv().ok();\n\n env_logger::init();\n\n\n\n // Fetch the access token / api key from the environment of the current process.\n\n let creds = match (env::var(\"MODIO_TOKEN\"), env::var(\"MODIO_API_KEY\")) {\n\n (Ok(token), _) => Credentials::Token(token),\n\n (_, Ok(apikey)) => Credentials::ApiKey(apikey),\n\n _ => {\n\n eprintln!(\"missing MODIO_TOKEN or MODIO_API_KEY environment variable\");\n\n process::exit(1);\n\n }\n\n };\n\n let host = env::var(\"MODIO_HOST\").unwrap_or_else(|_| \"https://api.test.mod.io/v1\".to_string());\n\n\n\n // tokio runtime to execute the modio futures.\n\n let mut rt = Runtime::new().expect(\"new rt\");\n\n\n\n // Creates a `Modio` endpoint for the test environment.\n\n let modio = Modio::host(host, creds)?;\n", "file_path": "examples/events.rs", "rank": 15, "score": 42565.138349733315 }, { "content": "fn main() -> Result<(), Error> {\n\n dotenv::dotenv().ok();\n\n env_logger::init();\n\n\n\n // Fetch the access token / api key from the environment of the current process.\n\n let creds = match (env::var(\"MODIO_TOKEN\"), env::var(\"MODIO_API_KEY\")) {\n\n (Ok(token), _) => Credentials::Token(token),\n\n (_, Ok(apikey)) => Credentials::ApiKey(apikey),\n\n _ => {\n\n eprintln!(\"missing MODIO_TOKEN or MODIO_API_KEY environment variable\");\n\n process::exit(1);\n\n }\n\n };\n\n let host = env::var(\"MODIO_HOST\").unwrap_or_else(|_| \"https://api.test.mod.io/v1\".to_string());\n\n\n\n // tokio runtime to execute the modio futures.\n\n let mut rt = Runtime::new().expect(\"new rt\");\n\n\n\n // Creates a `Modio` endpoint for the test environment.\n\n let modio = Modio::host(host, creds)?;\n", "file_path": "examples/download.rs", "rank": 16, "score": 42565.138349733315 }, { "content": "fn main() -> Result<(), Error> {\n\n dotenv::dotenv().ok();\n\n env_logger::init();\n\n\n\n // Fetch the access token / api key from the environment of the current process.\n\n let creds = match (env::var(\"MODIO_TOKEN\"), env::var(\"MODIO_API_KEY\")) {\n\n (Ok(token), _) => Credentials::Token(token),\n\n (_, Ok(apikey)) => Credentials::ApiKey(apikey),\n\n _ => {\n\n eprintln!(\"missing MODIO_TOKEN or MODIO_API_KEY environment variable\");\n\n process::exit(1);\n\n }\n\n };\n\n let host = env::var(\"MODIO_HOST\").unwrap_or_else(|_| \"https://api.test.mod.io/v1\".to_string());\n\n\n\n // tokio runtime to execute the modio futures.\n\n let mut rt = Runtime::new().expect(\"new rt\");\n\n\n\n // Creates a `Modio` endpoint for the test environment.\n\n let modio = Modio::host(host, creds)?;\n", "file_path": "examples/mymods.rs", "rank": 17, "score": 42565.138349733315 }, { "content": "pub trait DeleteOptions: private::Sealed {}\n\n\n", "file_path": "src/lib.rs", "rank": 18, "score": 37854.415631486365 }, { "content": "pub trait AddOptions: private::Sealed {}\n", "file_path": "src/lib.rs", "rank": 19, "score": 37854.415631486365 }, { "content": "pub trait In: crate::private::Sealed {\n\n /// Creates [`In`](enum.Operator.html#variant.In) filter.\n\n fn _in<T: fmt::Display, V: Into<OneOrMany<T>>>(value: V) -> Filter;\n\n}\n\n\n", "file_path": "src/filter.rs", "rank": 20, "score": 37756.297522616995 }, { "content": "pub trait NotIn: crate::private::Sealed {\n\n /// Creates [`NotIn`](enum.Operator.html#variant.NotIn) filter.\n\n fn not_in<T: fmt::Display, V: Into<OneOrMany<T>>>(value: V) -> Filter;\n\n}\n\n\n", "file_path": "src/filter.rs", "rank": 21, "score": 37756.297522616995 }, { "content": "pub trait NotEq: crate::private::Sealed {\n\n /// Creates [`Not`](enum.Operator.html#variant.Not) filter.\n\n fn ne<T: fmt::Display, V: Into<OneOrMany<T>>>(value: V) -> Filter;\n\n}\n\n\n", "file_path": "src/filter.rs", "rank": 22, "score": 36486.22343401995 }, { "content": "pub trait Eq: crate::private::Sealed {\n\n /// Creates [`Equals`](enum.Operator.html#variant.Equals) filter.\n\n fn eq<T: fmt::Display, V: Into<OneOrMany<T>>>(value: V) -> Filter;\n\n}\n\n\n", "file_path": "src/filter.rs", "rank": 23, "score": 36486.22343401995 }, { "content": "pub trait OrderBy: crate::private::Sealed {\n\n /// Creates sorting filter in ascending order.\n\n fn asc() -> Filter;\n\n\n\n /// Creates sorting filter in descending order.\n\n fn desc() -> Filter;\n\n}\n\n\n", "file_path": "src/filter.rs", "rank": 24, "score": 36486.22343401995 }, { "content": "pub trait NotLike: crate::private::Sealed {\n\n /// Creates [`NotLike`](enum.Operator.html#variant.Like) filter.\n\n fn not_like<T: fmt::Display, V: Into<OneOrMany<T>>>(value: V) -> Filter;\n\n}\n\n\n", "file_path": "src/filter.rs", "rank": 25, "score": 36486.22343401995 }, { "content": "pub trait BitwiseAnd: crate::private::Sealed {\n\n /// Creates [`BitwiseAnd`](enum.Operator.html#variant.BitwiseAnd) filter.\n\n fn bit_and<T: fmt::Display, V: Into<OneOrMany<T>>>(value: V) -> Filter;\n\n}\n\n\n", "file_path": "src/filter.rs", "rank": 26, "score": 36486.22343401995 }, { "content": "pub trait Like: crate::private::Sealed {\n\n /// Creates [`Like`](enum.Operator.html#variant.Like) filter.\n\n fn like<T: fmt::Display, V: Into<OneOrMany<T>>>(value: V) -> Filter;\n\n}\n\n\n", "file_path": "src/filter.rs", "rank": 27, "score": 36486.22343401995 }, { "content": "pub trait Cmp: crate::private::Sealed {\n\n /// Creates [`Max`](enum.Operator.html#variant.Max) filter.\n\n fn le<T: fmt::Display, V: Into<OneOrMany<T>>>(value: V) -> Filter;\n\n\n\n /// Creates [`SmallerThan`](enum.Operator.html#variant.SmallerThan) filter.\n\n fn lt<T: fmt::Display, V: Into<OneOrMany<T>>>(value: V) -> Filter;\n\n\n\n /// Creates [`Min`](enum.Operator.html#variant.Min) filter.\n\n fn ge<T: fmt::Display, V: Into<OneOrMany<T>>>(value: V) -> Filter;\n\n\n\n /// Creates [`GreaterThan`](enum.Operator.html#variant.GreaterThan) filter.\n\n fn gt<T: fmt::Display, V: Into<OneOrMany<T>>>(value: V) -> Filter;\n\n}\n\n\n", "file_path": "src/filter.rs", "rank": 28, "score": 36486.22343401995 }, { "content": "use std::env;\n\nuse std::io::{self, Write};\n\nuse tokio::runtime::Runtime;\n\n\n\nuse modio::error::Error;\n\nuse modio::{auth::Credentials, Modio};\n\n\n", "file_path": "examples/auth.rs", "rank": 35, "score": 32701.132923563327 }, { "content": "fn deserialize_message<'de, D>(deserializer: D) -> Result<(), D::Error>\n\nwhere\n\n D: serde::Deserializer<'de>,\n\n{\n\n ModioMessage::deserialize(deserializer).map(|_| ())\n\n}\n\n\n\n/// See the [Multiple Item Response](https://docs.mod.io/#response-formats) docs for more\n\n/// information.\n\n#[derive(Debug, Deserialize)]\n\npub struct List<T> {\n\n pub data: Vec<T>,\n\n #[serde(rename = \"result_count\")]\n\n pub count: u32,\n\n #[serde(rename = \"result_total\")]\n\n pub total: u32,\n\n #[serde(rename = \"result_limit\")]\n\n pub limit: u32,\n\n #[serde(rename = \"result_offset\")]\n\n pub offset: u32,\n", "file_path": "src/types.rs", "rank": 38, "score": 29833.7738871135 }, { "content": "/// Deserialize empty objects for the `avatar` property of the User object as `None`.\n\n///\n\n/// The mod.io api returns `{\"avatar\": {}}` for users without avatars instead of returning\n\n/// `{\"avatar\": null}`.\n\nfn deserialize_avatar<'de, D>(deserializer: D) -> Result<Option<Avatar>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n match Avatar::deserialize(deserializer) {\n\n Ok(avatar) => Ok(Some(avatar)),\n\n Err(err) => {\n\n let err_s = err.to_string();\n\n if err_s.starts_with(\"missing field `filename`\")\n\n || err_s.starts_with(\"invalid type: null\")\n\n {\n\n Ok(None)\n\n } else {\n\n Err(err)\n\n }\n\n }\n\n }\n\n}\n\n\n\npub mod game {\n", "file_path": "src/types.rs", "rank": 39, "score": 27014.89866367209 }, { "content": "### v0.4.1 (not released)\n\n\n\n* New `Error::is_authentication` accessor\n\n\n\n* Fix typo `EditDependenciesOptions`\n\n\n\n* Replace `ModioResult` with deprecated type alias for `EntityResult`.\n\n\n\n* Replace `ModioListResponse` with deprecated type alias for `List`.\n\n\n\n### v0.4 (2019-04-01)\n\n\n\n#### Features\n\n\n\n* A `Builder` to create a `Modio` client with custom configuration. ([45de8cc6][45de8cc6])\n\n\n\n```rust\n\nlet creds = Credentials::Token(\"<token>\".to_string());\n\nlet modio = Modio::builder(creds)\n\n .host(\"host\")\n\n .agent(\"user-agent\")\n\n .build()?;\n\n```\n\n\n\n* Proxy support ([2b12b40a][2b12b40a])\n\n\n\n```rust\n\nlet proxy = modio::client::Proxy::all(\"http://127.0.0.1:8888\")?;\n\nlet modio = Modio::builder(creds)\n\n .proxy(proxy)\n\n .build()?;\n\n```\n\n\n\n* Add optional `rustls-tls` feature to use rustls instead of native-tls. ([a12b4aa8][a12b4aa8])\n\n\n\n if compiled with `default-tls` and `rustls-tls` features then it's possible to choose the backend with `Builder::use_default_tls()` and `Builder::use_rustls_tls()`.\n\n\n\n* Add methods to provide streams over entities. ([39bd3287][39bd3287], [2a47d67c][2a47d67c])\n\n\n\n```rust\n\nuse modio::filter::prelude::*;\n\nlet filter = Fulltext::eq(\"foobar\");\n\n\n\nlet mods = game.mods().iter(&filter).for_each(|m| {\n\n // do stuff\n\n});\n\nlet stats = game.mods().statistics(&Default::default()).for_each(|stats| {\n\n // do stuff\n\n});\n\n```\n\n\n\n* Add type alias `List<T>` for `ModioListResponse<T>`.\n\n\n\n* Add Steam authentication `modio.auth().steam_auth(\"<auth-ticket>\")`. ([60072f86][60072f86])\n\n\n\n* Add GOG Galaxy authentication `modio.auth().gog_auth(\"<auth-ticket>\")`. ([6e1b1e67][6e1b1e67])\n\n\n\n* Link external accounts `modio.auth().link(\"email\", modio::auth::Service)`. ([30b158ab][30b158ab])\n\n\n\n* `modio::me::Event` with new field `game_id`.\n\n\n\n* Validate credentials before sending requests.\n\n\n\n* debug & trace log for requests & responses.\n\n\n", "file_path": "CHANGELOG.md", "rank": 40, "score": 18429.114082169584 }, { "content": "### Authentication\n\n```rust\n\n// Request a security code be sent to the email address.\n\nrt.block_on(modio.auth().request_code(\"[email protected]\"))?;\n\n\n\n// Wait for the 5-digit security code\n\nlet token = rt.block_on(modio.auth().security_code(\"QWERT\"))?;\n\n\n\n// Create an endpoint with the new credentials\n\nlet modio = modio.with_credentials(Credentials::Token(token));\n\n```\n\nSee [full example](examples/auth.rs).\n\n\n\n### Games\n\n```rust\n\nuse modio::filter::prelude::*;\n\n\n\n// List games with filter `name_id = \"0ad\"`\n\nlet task = modio.games().list(&NameId::eq(\"0ad\"));\n\n\n\nlet games = rt.block_on(task)?;\n\n```\n\n\n\n### Mods\n\n```rust\n\n// List all mods for 0 A.D.\n\nlet mods = rt.block_on(modio.game(5).mods().list(&Default::default))?;\n\n\n\n// Get the details of the `balancing-mod` mod\n\nlet balancing_mod = rt.block_on(modio.mod_(5, 110).get())?;\n\n```\n\n\n\n### Download\n\n```rust\n\nuse modio::download::{ResolvePolicy, DownloadAction};\n\n\n\n// Download the primary file of a mod.\n\nlet action = DownloadAction::Primary {\n\n game_id: 5,\n\n mod_id: 19,\n\n};\n\nlet (len, out) = rt.block_on(modio.download(action, out))?;\n\n\n\n// Download the specific file of a mod.\n\nlet action = DownloadAction::File {\n\n game_id: 5,\n\n mod_id: 19,\n\n file_id: 101,\n\n};\n\nlet (len, out) = rt.block_on(modio.download(action, out))?;\n\n\n\n// Download the specific version of a mod.\n\n// if multiple files are found then the latest file is downloaded.\n\n// Set policy to `ResolvePolicy::Fail` to return with `ErrorKind::Download(DownloadError::MultipleFilesFound)`.\n\nlet action = DownloadAction::Version {\n\n game_id: 5,\n\n mod_id: 19,\n\n version: \"0.1\".to_string(),\n\n policy: ResolvePolicy::Latest,\n\n};\n\nlet (len, out) = rt.block_on(modio.download(action, out))?;\n\n```\n\n\n\n### Examples\n\n\n\nSee [examples directory](examples/) for some getting started examples.\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n- Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://apache.org/licenses/LICENSE-2.0)\n\n- MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\n### Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you,\n\nas defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions.\n", "file_path": "README.md", "rank": 41, "score": 18424.07469604401 }, { "content": "<a href=\"https://mod.io\"><img src=\"https://static.mod.io/v1/images/branding/modio-color-dark.svg\" alt=\"mod.io\" width=\"400\"/></a>\n\n\n\n# modio-rs\n\n[![Crates.io][crates-badge]][crates-url]\n\n![License][license-badge]\n\n[![Released API docs][docs-badge]][docs-url]\n\n[![Master API docs][master-docs-badge]][master-docs-url]\n\n[![Travis Build Status][travis-badge]][travis-url]\n\n\n\n[crates-badge]: https://img.shields.io/crates/v/modio.svg\n\n[crates-url]: https://crates.io/crates/modio\n\n[docs-badge]: https://docs.rs/modio/badge.svg\n\n[docs-url]: https://docs.rs/modio\n\n[license-badge]: https://img.shields.io/crates/l/modio.svg\n\n[master-docs-badge]: https://img.shields.io/badge/docs-master-green.svg\n\n[master-docs-url]: https://nickelc.github.io/modio-rs/master/\n\n[travis-badge]: https://travis-ci.org/nickelc/modio-rs.svg?branch=master\n\n[travis-url]: https://travis-ci.org/nickelc/modio-rs\n\n\n\n`modio` provides a set of building blocks for interacting with the [mod.io](https://mod.io) API.\n\n\n\nThe client uses asynchronous I/O, backed by the `futures` and `tokio` crates, and requires both to be used alongside.\n\n\n\n## mod.io\n\n[mod.io](https://mod.io) is a drop-in modding solution from the founders of [ModDB.com](https://www.moddb.com),\n\nthat facilitates the upload, search, browsing, downloading and trading of mods in-game.\n\n\n\n## Usage\n\n\n\nTo use `modio`, add this to your `Cargo.toml`\n\n```toml\n\n[dependencies]\n\nmodio = \"0.4\"\n\n```\n\n\n\n### Basic Setup\n\n```rust\n\nuse modio::{Credentials, Error, Modio};\n\nuse tokio::runtime::Runtime;\n\n\n\nfn main() -> Result<(), Error> {\n\n let mut rt = Runtime::new()?;\n\n let modio = Modio::new(\n\n Credentials::ApiKey(String::from(\"user-or-game-apikey\")),\n\n )?;\n\n\n\n // create some tasks and execute them\n\n // let result = rt.block_on(task)?;\n\n Ok(())\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 42, "score": 18423.349064616366 }, { "content": "#### Breaking Changes\n\n\n\n* Rewrite of filtering and sorting. ([e94c4dcd][e94c4dcd])\n\n\n\n ```rust\n\n // Before\n\n use modio::filter::{Operator, Order};\n\n\n\n let mut opts = ModsListOptions::new();\n\n opts.game_id(Operator::In, vec![1, 2]);\n\n opts.limit(10);\n\n opts.sort_by(ModsListOptions::POPULAR, Order::Desc);\n\n\n\n // After\n\n use modio::filter::prelude::*;\n\n use modio::mods::filters::{GameId, Popular};\n\n\n\n let filter = GameId::_in(vec![1, 2])\n\n .limit(10)\n\n .order_by(Popular::desc());\n\n ```\n\n\n\n* Removed builders of all \\*Options types and changed the options to be by-value instead of by-ref.\n\n ([7fe661b6][7fe661b6], [07c3ecb6][07c3ecb6])\n\n\n\n ```rust\n\n // Before\n\n let mut builder = EditModOptions::builder();\n\n if some_val {\n\n builder.name(\"foobar\");\n\n }\n\n let opts = builder.build();\n\n modio.mod_(34, 101).edit(&opts);\n\n\n\n // After\n\n let mut opts = EditModOptions::default();\n\n if some_val {\n\n opts = opts.name(\"foobar\");\n\n }\n\n modio.mod_(34, 101).edit(&opts);\n\n ```\n\n\n\n* `GameRef::edit`, `ModRef::edit` and `FileRef::edit` are now returning `Future<modio::ModioResult<T>>`.\n\n ([6b31ac4a][6b31ac4a])\n\n\n\n* Switch from `hyper` to `reqwest`. Type parameter for `Modio` is no longer necessary.\n\n\n\n* Drop `failure` crate again and implement std error trait.\n\n\n\n* Restrict conversion to `Error` to internal use only. ([1ac2b471][1ac2b471])\n\n\n\n* `Modio::new` and `Modio::host` return `Result<Modio>`.\n\n\n\n* `Modio::custom` removed in flavor of `Builder`.\n\n\n\n* User-Agent parameter removed from `Modio::new` and `Modio::host`.\n\n\n\n* No longer expose `ModioMessage`.\n\n\n\n* New ErrorKind for validation errors. ([ca4fe09b][ca4fe09b])\n\n\n\n* Map status, visibility and other options as enums and bitfields as `bitflags`.\n\n ([97a86e8a][97a86e8a], [f2f1acec][f2f1acec])\n\n\n\n* Break up event & event types to `modio::me::{Event, EventType}` and `modio::mods::{Event, EventType}`.\n\n ([57fc4447][57fc4447])\n\n\n\n* Change `Me::{events, subscriptions, ratings}`, `Mods::{events, statistics}` and `Mod::events` to streams over entities.\n", "file_path": "CHANGELOG.md", "rank": 43, "score": 18423.109652049556 }, { "content": "### v0.3 (2018-10-04)\n\n* builtin method `Modio::download` for downloading files\n\n ([c4029f1b](https://github.com/nickelc/modio-rs/commit/c4029f1bd9ba099df582f2c5ce10420d7a85db9c))\n\n\n\n#### Breaking Changes\n\n* reworked errors with `failure` crate\n\n ([0acc1e80](https://github.com/nickelc/modio-rs/commit/0acc1e807ef5de36950604d3d15e7ef86ea88027))\n\n\n\n### v0.2.2 (2018-09-20)\n\n* add missing `Mod::stats` property\n\n ([0af0580b](https://github.com/nickelc/modio-rs/commit/0af0580b9a588024fa38ca60ad419fc499321574))\n\n\n\n* update dev dependencies to fix build issues with openssl\n\n ([41a143e5](https://github.com/nickelc/modio-rs/commit/41a143e54cca35c26517810a3ceecc9aa45a9968))\n\n\n\n* new method to add custom filters to list options\n\n ([a81771c4](https://github.com/nickelc/modio-rs/commit/a81771c4902448d45379eedc4a98aa5f24394827))\n\n\n\n### v0.2.1 (2018-09-10)\n\n* use the new endpoint `/me/ratings` to list the submitted mod ratings\n\n ([09117df5](https://github.com/nickelc/modio-rs/commit/09117df59e6f9a9de2fc104fc458b7f99d5740a8))\n\n\n\n* new property `total` for `ModioListResponse` added\n\n ([f2d84642](https://github.com/nickelc/modio-rs/commit/f2d84642a09159203d7e11ceb6c8cf0cf7414a37))\n\n\n\n* new read-only property `Mod::description_plaintext`\n\n ([743b5c5c](https://github.com/nickelc/modio-rs/commit/743b5c5cbfbfdc16038c76c161e6b8222688ab95))\n\n\n\n* fixed query string separator\n\n ([fa90195c](https://github.com/nickelc/modio-rs/commit/fa90195cab717e27a5a7912f781c2dd8cc350af8))\n\n\n\n### v0.2.0 (2018-08-09)\n\n\n\n#### Breaking Changes\n\n\n\n* `Mod::rating_summary` is gone.\n\n Replaced with the new statistics endpoints `Mods::statistics` and `ModRef::statistics`.\n\n\n\n ([33388dd3](https://github.com/nickelc/modio-rs/commit/33388dd3686ad8056f92444176ea7b0df6c497b2))\n", "file_path": "CHANGELOG.md", "rank": 44, "score": 18415.849621848403 }, { "content": " ([2a47d67c][2a47d67c])\n\n\n\n[45de8cc6]: https://github.com/nickelc/modio-rs/commit/45de8cc6f13c15abacbf55d43c956efd2f781950\n\n[2b12b40a]: https://github.com/nickelc/modio-rs/commit/2b12b40afdf87e42460e3a37a3fd69dfc2e8db6b\n\n[a12b4aa8]: https://github.com/nickelc/modio-rs/commit/a12b4aa89c1126dc83100646d8d84dd789bc7f61\n\n[39bd3287]: https://github.com/nickelc/modio-rs/commit/39bd3287b65066c9bfe410f16165b0383d4fa444\n\n[2a47d67c]: https://github.com/nickelc/modio-rs/commit/2a47d67c2a272af8c4e03593e801cb455b121e0e\n\n[60072f86]: https://github.com/nickelc/modio-rs/commit/60072f8672f06f2cea815aa6f4f659d44be974a0\n\n[30b158ab]: https://github.com/nickelc/modio-rs/commit/30b158abedae6b9e71cae66fcdc440f89eafa413\n\n[6e1b1e67]: https://github.com/nickelc/modio-rs/commit/6e1b1e675187c4df6d51972b2bc938353dac7071\n\n[e94c4dcd]: https://github.com/nickelc/modio-rs/commit/e94c4dcdd0a8ef23df338b1945bade4bdb2896a1\n\n[7fe661b6]: https://github.com/nickelc/modio-rs/commit/7fe661b68f50794b40db475993e3cab8acc19dd3\n\n[07c3ecb6]: https://github.com/nickelc/modio-rs/commit/07c3ecb6c9946c64565d8c28c28ccc3a040aed53\n\n[ca4fe09b]: https://github.com/nickelc/modio-rs/commit/ca4fe09b506d9fc393ccf4084879a8e97068eb37\n\n[97a86e8a]: https://github.com/nickelc/modio-rs/commit/97a86e8ad50f3251d1b561fe75e997627fd8e19a\n\n[f2f1acec]: https://github.com/nickelc/modio-rs/commit/f2f1acec4f4c011e60de613d3c86547bc60c019a\n\n[6b31ac4a]: https://github.com/nickelc/modio-rs/commit/6b31ac4abee97521376803f150e1f9f0ce5c8781\n\n[1ac2b471]: https://github.com/nickelc/modio-rs/commit/1ac2b4710373c598c87a9b78e293b68329266c38\n\n[57fc4447]: https://github.com/nickelc/modio-rs/commit/57fc444761499a21ef58ffa6bb81e4ff6f99be1f\n\n\n", "file_path": "CHANGELOG.md", "rank": 45, "score": 18414.023659538077 }, { "content": "## Examples of using modio\n\n\n\nRun examples with `cargo run --example example_name`\n\n\n\n### Available examples\n\n\n\n* [`auth`](auth.rs) - Request an access token and print the authenticated user. See [Email Authentication Flow](https://docs.mod.io/#email-authentication-flow).\n\n\n\n* [`download`](download.rs) - Download the latest modfile for a given mod of a game.\n\n\n\n* [`events`](events.rs) - Poll the user events from [`/me/events`](https://docs.mod.io/#get-user-events) every 10 seconds.\n\n\n\n* [`mymods`](mymods.rs) - List all mods the *authenticated user* added or is team member of. See [`/me/mods`](https://docs.mod.io/#get-user-mods).\n", "file_path": "examples/README.md", "rank": 46, "score": 17770.379250421447 }, { "content": "}\n\n\n\nimpl Games {\n\n pub(crate) fn new(modio: Modio) -> Self {\n\n Self { modio }\n\n }\n\n\n\n fn path(&self, more: &str) -> String {\n\n format!(\"/games{}\", more)\n\n }\n\n\n\n /// List all games.\n\n ///\n\n /// See [Filters and sorting](filters/index.html).\n\n pub fn list(&self, filter: &Filter) -> Future<List<Game>> {\n\n let mut uri = vec![self.path(\"\")];\n\n let query = filter.to_query_string();\n\n if !query.is_empty() {\n\n uri.push(query);\n\n }\n", "file_path": "src/games.rs", "rank": 47, "score": 23.136066204257723 }, { "content": "//! Mod comments interface\n\nuse crate::prelude::*;\n\npub use crate::types::mods::Comment;\n\n\n\npub struct Comments {\n\n modio: Modio,\n\n game: u32,\n\n mod_id: u32,\n\n}\n\n\n\nimpl Comments {\n\n pub fn new(modio: Modio, game: u32, mod_id: u32) -> Self {\n\n Self {\n\n modio,\n\n game,\n\n mod_id,\n\n }\n\n }\n\n\n\n fn path(&self, more: &str) -> String {\n", "file_path": "src/comments.rs", "rank": 48, "score": 22.172245086073623 }, { "content": "//! Reports interface\n\nuse url::form_urlencoded;\n\n\n\nuse crate::prelude::*;\n\n\n\npub struct Reports {\n\n modio: Modio,\n\n}\n\n\n\nimpl Reports {\n\n pub(crate) fn new(modio: Modio) -> Self {\n\n Self { modio }\n\n }\n\n\n\n /// Submit a report for any resource on mod.io. [required: token]\n\n pub fn submit(&self, report: &Report) -> Future<()> {\n\n token_required!(self.modio);\n\n Box::new(\n\n self.modio\n\n .post::<ModioMessage, _>(\"/report\", report.to_query_string())\n", "file_path": "src/reports.rs", "rank": 49, "score": 21.472191287531523 }, { "content": "pub use crate::types::Status;\n\n\n\n/// Interface for mods the authenticated user added or is team member of.\n\npub struct MyMods {\n\n modio: Modio,\n\n}\n\n\n\nimpl MyMods {\n\n pub(crate) fn new(modio: Modio) -> Self {\n\n Self { modio }\n\n }\n\n\n\n /// List all mods the authenticated user added or is team member of. [required: token]\n\n ///\n\n /// See [Filters and sorting](filters/index.html).\n\n pub fn list(&self, filter: &Filter) -> Future<List<Mod>> {\n\n token_required!(self.modio);\n\n let mut uri = vec![\"/me/mods\".to_owned()];\n\n let query = filter.to_query_string();\n\n if !query.is_empty() {\n", "file_path": "src/mods.rs", "rank": 50, "score": 20.905088768381514 }, { "content": "\n\n/// Reference interface of a game.\n\npub struct GameRef {\n\n modio: Modio,\n\n id: u32,\n\n}\n\n\n\nimpl GameRef {\n\n pub(crate) fn new(modio: Modio, id: u32) -> Self {\n\n Self { modio, id }\n\n }\n\n\n\n fn path(&self, more: &str) -> String {\n\n format!(\"/games/{}{}\", self.id, more)\n\n }\n\n\n\n /// Get a reference to the Modio game object that this `GameRef` refers to.\n\n pub fn get(&self) -> Future<Game> {\n\n self.modio.get::<Game>(&format!(\"/games/{}\", self.id))\n\n }\n", "file_path": "src/games.rs", "rank": 51, "score": 20.550007856603948 }, { "content": "//! Mod metadata KVP interface\n\nuse futures::future;\n\nuse url::form_urlencoded;\n\n\n\nuse crate::error::Error;\n\nuse crate::prelude::*;\n\nuse crate::types::mods::MetadataMap;\n\n\n\npub struct Metadata {\n\n modio: Modio,\n\n game: u32,\n\n mod_id: u32,\n\n}\n\n\n\nimpl Metadata {\n\n pub(crate) fn new(modio: Modio, game: u32, mod_id: u32) -> Self {\n\n Self {\n\n modio,\n\n game,\n\n mod_id,\n", "file_path": "src/metadata.rs", "rank": 52, "score": 20.186831837465135 }, { "content": "use crate::reports::Reports;\n\nuse crate::types::ModioMessage;\n\nuse crate::users::Users;\n\n\n\npub use crate::auth::Credentials;\n\npub use crate::download::DownloadAction;\n\npub use crate::error::{Error, Result};\n\n#[doc(hidden)]\n\npub use crate::types::ModioErrorResponse;\n\n#[doc(hidden)]\n\n#[allow(deprecated)]\n\npub use crate::types::ModioResult;\n\npub use crate::types::{EntityResult, List};\n\n\n\nconst DEFAULT_HOST: &str = \"https://api.mod.io/v1\";\n\nconst TEST_HOST: &str = \"https://api.test.mod.io/v1\";\n\nconst DEFAULT_AGENT: &str = concat!(env!(\"CARGO_PKG_NAME\"), '/', env!(\"CARGO_PKG_VERSION\"));\n\n\n\npub type Future<T> = Box<dyn StdFuture<Item = T, Error = Error> + Send>;\n\npub type Stream<T> = Box<dyn StdStream<Item = T, Error = Error> + Send>;\n", "file_path": "src/lib.rs", "rank": 53, "score": 19.40113545287999 }, { "content": "}\n\n\n\nimpl ModRef {\n\n pub(crate) fn new(modio: Modio, game: u32, id: u32) -> Self {\n\n Self { modio, game, id }\n\n }\n\n\n\n fn path(&self, more: &str) -> String {\n\n format!(\"/games/{}/mods/{}{}\", self.game, self.id, more)\n\n }\n\n\n\n /// Get a reference to the Modio mod object that this `ModRef` refers to.\n\n pub fn get(&self) -> Future<Mod> {\n\n self.modio.get(&self.path(\"\"))\n\n }\n\n\n\n /// Return a reference to an interface that provides access to the files of a mod.\n\n pub fn files(&self) -> Files {\n\n Files::new(self.modio.clone(), self.game, self.id)\n\n }\n", "file_path": "src/mods.rs", "rank": 54, "score": 19.40012295080741 }, { "content": " }\n\n }\n\n}\n\n\n\n/// Generic endpoint for sub-resources\n\npub struct Endpoint<Out>\n\nwhere\n\n Out: DeserializeOwned + 'static,\n\n{\n\n modio: Modio,\n\n path: String,\n\n phantom: PhantomData<Out>,\n\n}\n\n\n\nimpl<Out> Endpoint<Out>\n\nwhere\n\n Out: DeserializeOwned + 'static + Send,\n\n{\n\n pub(crate) fn new(modio: Modio, path: String) -> Endpoint<Out> {\n\n Self {\n", "file_path": "src/lib.rs", "rank": 55, "score": 19.291862450203798 }, { "content": "pub struct MyGames {\n\n modio: Modio,\n\n}\n\n\n\nimpl MyGames {\n\n pub(crate) fn new(modio: Modio) -> Self {\n\n Self { modio }\n\n }\n\n\n\n /// List all games the authenticated user added or is team member of. [required: token]\n\n ///\n\n /// See [Filters and sorting](filters/index.html).\n\n pub fn list(&self, filter: &Filter) -> Future<List<Game>> {\n\n token_required!(self.modio);\n\n let mut uri = vec![\"/me/games\".to_owned()];\n\n let query = filter.to_query_string();\n\n if !query.is_empty() {\n\n uri.push(query);\n\n }\n\n self.modio.get(&uri.join(\"?\"))\n", "file_path": "src/games.rs", "rank": 56, "score": 19.11905115132764 }, { "content": "use std::io::Error;\n\nuse std::path::{Path, PathBuf};\n\n\n\nuse bytes::{Bytes, BytesMut};\n\nuse futures::{task, Async, Future, Poll, Stream};\n\nuse mime::Mime;\n\nuse reqwest::r#async::multipart::Part;\n\nuse tokio_codec::{BytesCodec, FramedRead};\n\nuse tokio_fs::file::{File, OpenFuture};\n\nuse tokio_io::AsyncRead;\n\n\n\npub struct FileSource {\n\n pub inner: FileStream,\n\n pub filename: String,\n\n pub mime: Mime,\n\n}\n\n\n\nimpl From<FileSource> for Part {\n\n fn from(source: FileSource) -> Part {\n\n Part::stream(source.inner)\n\n .file_name(source.filename)\n\n .mime_str(&source.mime.to_string())\n\n .expect(\"FileSource::into::<Part>()\")\n\n }\n\n}\n\n\n", "file_path": "src/multipart.rs", "rank": 57, "score": 18.26343299841892 }, { "content": "/// Interface for mods of a game.\n\npub struct Mods {\n\n modio: Modio,\n\n game: u32,\n\n}\n\n\n\nimpl Mods where {\n\n pub(crate) fn new(modio: Modio, game: u32) -> Self {\n\n Self { modio, game }\n\n }\n\n\n\n fn path(&self, more: &str) -> String {\n\n format!(\"/games/{}/mods{}\", self.game, more)\n\n }\n\n\n\n /// Return a reference to a mod.\n\n pub fn get(&self, id: u32) -> ModRef {\n\n ModRef::new(self.modio.clone(), self.game, id)\n\n }\n\n\n", "file_path": "src/mods.rs", "rank": 58, "score": 18.140579603825422 }, { "content": "//! Modfile interface\n\nuse std::ffi::OsStr;\n\nuse std::path::Path;\n\n\n\nuse mime::APPLICATION_OCTET_STREAM;\n\nuse tokio_io::AsyncRead;\n\nuse url::form_urlencoded;\n\n\n\nuse crate::multipart::{FileSource, FileStream};\n\nuse crate::prelude::*;\n\n\n\npub use crate::types::mods::{Download, File, FileHash};\n\n\n\n/// Interface for the modfiles the authenticated user uploaded.\n\npub struct MyFiles {\n\n modio: Modio,\n\n}\n\n\n\nimpl MyFiles {\n\n pub(crate) fn new(modio: Modio) -> Self {\n", "file_path": "src/files.rs", "rank": 59, "score": 17.917002016666846 }, { "content": "//! Me interface\n\nuse crate::files::MyFiles;\n\nuse crate::games::MyGames;\n\nuse crate::mods::MyMods;\n\nuse crate::prelude::*;\n\nuse crate::types::mods::Mod;\n\nuse crate::types::User;\n\n\n\npub use crate::types::mods::Rating;\n\npub use crate::types::{Event, EventType};\n\n\n\n/// Interface for resources owned by the authenticated user or is team member of.\n\npub struct Me {\n\n modio: Modio,\n\n}\n\n\n\nimpl Me {\n\n pub(crate) fn new(modio: Modio) -> Self {\n\n Self { modio }\n\n }\n", "file_path": "src/me.rs", "rank": 60, "score": 17.6248745434128 }, { "content": " token_required!(s self.modio);\n\n let mut uri = vec![\"/me/files\".to_owned()];\n\n let query = filter.to_query_string();\n\n if !query.is_empty() {\n\n uri.push(query);\n\n }\n\n self.modio.stream(&uri.join(\"?\"))\n\n }\n\n}\n\n\n\n/// Interface for the modfiles of a mod.\n\npub struct Files {\n\n modio: Modio,\n\n game: u32,\n\n mod_id: u32,\n\n}\n\n\n\nimpl Files {\n\n pub(crate) fn new(modio: Modio, game: u32, mod_id: u32) -> Self {\n\n Self {\n", "file_path": "src/files.rs", "rank": 61, "score": 17.428898071258963 }, { "content": "//! Users interface\n\nuse url::form_urlencoded;\n\n\n\nuse crate::prelude::*;\n\n\n\npub use crate::types::{Avatar, User};\n\n\n\n/// Interface for users.\n\npub struct Users {\n\n modio: Modio,\n\n}\n\n\n\nimpl Users {\n\n pub(crate) fn new(modio: Modio) -> Self {\n\n Self { modio }\n\n }\n\n\n\n /// List all users registered on [mod.io](https:://mod.io).\n\n ///\n\n /// See [Filters and sorting](filters/index.html).\n", "file_path": "src/users.rs", "rank": 62, "score": 17.357956176744842 }, { "content": " client,\n\n })\n\n }\n\n\n\n /// Configure the underlying `reqwest` client using `reqwest::async::ClientBuilder`.\n\n pub fn client<F>(mut self, f: F) -> Builder\n\n where\n\n F: FnOnce(ClientBuilder) -> ClientBuilder,\n\n {\n\n self.config.builder = Some(f(Client::builder()));\n\n self\n\n }\n\n\n\n /// Set the mod.io api host.\n\n ///\n\n /// Defaults to `\"https://api.mod.io/v1\"`\n\n pub fn host<S: Into<String>>(mut self, host: S) -> Builder {\n\n self.config.host = Some(host.into());\n\n self\n\n }\n", "file_path": "src/lib.rs", "rank": 63, "score": 17.068287380596782 }, { "content": " modio,\n\n path,\n\n phantom: PhantomData,\n\n }\n\n }\n\n\n\n pub fn list(&self) -> Future<List<Out>> {\n\n self.modio.get(&self.path)\n\n }\n\n\n\n pub fn iter(&self) -> Stream<Out> {\n\n self.modio.stream(&self.path)\n\n }\n\n\n\n /// [required: token]\n\n pub fn add<T: AddOptions + QueryString>(&self, options: &T) -> Future<()> {\n\n token_required!(self.modio);\n\n let params = options.to_query_string();\n\n Box::new(\n\n self.modio\n", "file_path": "src/lib.rs", "rank": 64, "score": 17.010811026282134 }, { "content": "//! Team members interface\n\nuse url::form_urlencoded;\n\n\n\nuse crate::prelude::*;\n\n\n\npub use crate::types::mods::{TeamLevel, TeamMember};\n\n\n\n/// Interface for the team members of a mod.\n\npub struct Members {\n\n modio: Modio,\n\n game: u32,\n\n mod_id: u32,\n\n}\n\n\n\nimpl Members {\n\n pub(crate) fn new(modio: Modio, game: u32, mod_id: u32) -> Self {\n\n Self {\n\n modio,\n\n game,\n\n mod_id,\n", "file_path": "src/teams.rs", "rank": 65, "score": 16.88717585530713 }, { "content": "\n\n /// Use native TLS backend.\n\n #[cfg(feature = \"default-tls\")]\n\n pub fn use_default_tls(mut self) -> Builder {\n\n self.config.tls = TlsBackend::Default;\n\n self\n\n }\n\n\n\n /// Use rustls TLS backend.\n\n #[cfg(feature = \"rustls-tls\")]\n\n pub fn use_rustls_tls(mut self) -> Builder {\n\n self.config.tls = TlsBackend::Rustls;\n\n self\n\n }\n\n}\n\n\n\n/// Endpoint interface to interacting with the [mod.io](https://mod.io) API.\n\n#[derive(Clone, Debug)]\n\npub struct Modio {\n\n host: String,\n", "file_path": "src/lib.rs", "rank": 66, "score": 16.880231385438904 }, { "content": " filter!(Level, LEVEL, \"level\", Eq, NotEq, In, Cmp, OrderBy);\n\n filter!(Position, POSITION, \"position\", Eq, NotEq, In, Like, OrderBy);\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct InviteTeamMemberOptions {\n\n params: std::collections::BTreeMap<&'static str, String>,\n\n}\n\n\n\nimpl InviteTeamMemberOptions {\n\n pub fn new<T>(email: T, level: TeamLevel) -> InviteTeamMemberOptions\n\n where\n\n T: Into<String>,\n\n {\n\n let mut params = std::collections::BTreeMap::new();\n\n params.insert(\"email\", email.into());\n\n params.insert(\"level\", level.to_string());\n\n InviteTeamMemberOptions { params }\n\n }\n\n\n", "file_path": "src/teams.rs", "rank": 67, "score": 16.80945269912642 }, { "content": "#[doc(hidden)]\n\n#[deprecated(since = \"0.4.1\", note = \"Use `List`\")]\n\npub type ModioListResponse<T> = List<T>;\n\n\n\nmod prelude {\n\n pub use futures::{Future as StdFuture, Stream as StdStream};\n\n pub use reqwest::r#async::multipart::{Form, Part};\n\n pub use reqwest::r#async::Body;\n\n pub use reqwest::StatusCode;\n\n\n\n pub use crate::filter::Filter;\n\n pub use crate::EntityResult;\n\n pub use crate::List;\n\n pub use crate::Modio;\n\n pub(crate) use crate::ModioMessage;\n\n #[allow(deprecated)]\n\n pub use crate::ModioResult;\n\n pub use crate::QueryString;\n\n pub(crate) use crate::RequestBody;\n\n pub use crate::{AddOptions, DeleteOptions, Endpoint};\n", "file_path": "src/lib.rs", "rank": 68, "score": 16.781566954904967 }, { "content": "}\n\n\n\nimpl<T> List<T> {\n\n pub fn first(&self) -> Option<&T> {\n\n self.data.get(0)\n\n }\n\n\n\n pub fn shift(&mut self) -> Option<T> {\n\n if self.data.is_empty() {\n\n None\n\n } else {\n\n Some(self.data.remove(0))\n\n }\n\n }\n\n}\n\n\n\nimpl<T> Index<usize> for List<T> {\n\n type Output = T;\n\n\n\n fn index(&self, index: usize) -> &Self::Output {\n", "file_path": "src/types.rs", "rank": 69, "score": 16.463999662817848 }, { "content": "use std::env;\n\nuse std::process;\n\nuse std::time::{Duration, SystemTime, UNIX_EPOCH};\n\nuse tokio::prelude::*;\n\nuse tokio::runtime::Runtime;\n\nuse tokio::timer::Interval;\n\n\n\nuse modio::error::Error;\n\nuse modio::filter::prelude::*;\n\nuse modio::QueryString;\n\nuse modio::{auth::Credentials, Modio};\n\n\n", "file_path": "examples/events.rs", "rank": 70, "score": 16.439067277353814 }, { "content": " pub fn build(self) -> Result<Modio> {\n\n let config = self.config;\n\n let host = config.host.unwrap_or_else(|| DEFAULT_HOST.to_string());\n\n let credentials = config.credentials;\n\n\n\n let client = {\n\n let mut builder = {\n\n let builder = config.builder.unwrap_or_else(Client::builder);\n\n #[cfg(feature = \"tls\")]\n\n match config.tls {\n\n #[cfg(feature = \"default-tls\")]\n\n TlsBackend::Default => builder.use_default_tls(),\n\n #[cfg(feature = \"rustls-tls\")]\n\n TlsBackend::Rustls => builder.use_rustls_tls(),\n\n }\n\n\n\n #[cfg(not(feature = \"tls\"))]\n\n builder\n\n };\n\n\n", "file_path": "src/lib.rs", "rank": 71, "score": 16.402658394835147 }, { "content": " } else {\n\n uri.parse().map_err(error::from).into_future()\n\n };\n\n\n\n let instance = self.clone();\n\n\n\n let response = url.and_then(move |url| {\n\n debug!(\"request: {} {}\", method, url);\n\n let mut req = instance.client.request(method, url.as_str());\n\n\n\n if let Credentials::Token(ref token) = instance.credentials {\n\n req = req.header(AUTHORIZATION, &*format!(\"Bearer {}\", token));\n\n }\n\n\n\n match body.into() {\n\n RequestBody::Body(body, mime) => {\n\n trace!(\"body: {}\", body);\n\n if let Some(mime) = mime {\n\n req = req.header(CONTENT_TYPE, &*mime.to_string());\n\n }\n", "file_path": "src/lib.rs", "rank": 72, "score": 16.35957531330047 }, { "content": "//! For API requests using API key authentication are **unlimited** and for OAuth 2 authentication\n\n//! requests are limited to **120 requests per hour**.\n\n//!\n\n//! A special error [ErrorKind::RateLimit](error/enum.ErrorKind.html#variant.RateLimit) will\n\n//! be return from api operations when the rate limit associated with credentials has been\n\n//! exhausted.\n\n//!\n\n//! # Example: Basic setup\n\n//!\n\n//! ```no_run\n\n//! use modio::{Credentials, Error, Modio};\n\n//! use tokio::runtime::Runtime;\n\n//!\n\n//! fn main() -> Result<(), Error> {\n\n//! let mut rt = Runtime::new().expect(\"new rt\");\n\n//! let modio = Modio::new(\n\n//! Credentials::ApiKey(String::from(\"user-or-game-api-key\")),\n\n//! )?;\n\n//!\n\n//! // create some tasks and execute them\n", "file_path": "src/lib.rs", "rank": 73, "score": 16.03229879134973 }, { "content": "macro_rules! token_required {\n\n ($m:expr) => {\n\n if let crate::auth::Credentials::ApiKey(_) = $m.credentials {\n\n return future_err!(crate::error::token_required());\n\n }\n\n };\n\n (s $m:expr) => {\n\n if let crate::auth::Credentials::ApiKey(_) = $m.credentials {\n\n return stream_err!(crate::error::token_required());\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! option {\n\n ($(#[$outer:meta])* $name:ident) => {\n\n option!($(#[$outer])* $name: Into<String>);\n\n };\n\n ($(#[$outer:meta])* $name:ident: Into<$T:ty>) => {\n\n $(#[$outer])*\n\n pub fn $name<T: Into<$T>>(self, value: T) -> Self {\n", "file_path": "src/macros.rs", "rank": 74, "score": 15.808677074132694 }, { "content": "use std::io;\n\nuse std::io::prelude::*;\n\nuse std::marker::PhantomData;\n\n\n\nuse futures::{future, stream, Future as StdFuture, IntoFuture, Stream as StdStream};\n\nuse log::{debug, log_enabled, trace};\n\nuse mime::Mime;\n\nuse reqwest::header::{HeaderMap, HeaderValue};\n\nuse reqwest::header::{AUTHORIZATION, CONTENT_TYPE, USER_AGENT};\n\nuse reqwest::r#async::multipart::Form;\n\nuse reqwest::r#async::{Client, ClientBuilder};\n\nuse reqwest::{Method, Proxy, StatusCode};\n\nuse serde::de::DeserializeOwned;\n\nuse url::Url;\n\n\n\n#[macro_use]\n\nmod macros;\n\n\n\npub mod auth;\n\n#[macro_use]\n", "file_path": "src/lib.rs", "rank": 75, "score": 15.803432918380729 }, { "content": " ///\n\n /// See [Filters and sorting](filters/index.html).\n\n pub fn iter(&self, filter: &Filter) -> Stream<TeamMember> {\n\n let mut uri = vec![self.path(\"\")];\n\n let query = filter.to_query_string();\n\n if !query.is_empty() {\n\n uri.push(query);\n\n }\n\n self.modio.stream(&uri.join(\"?\"))\n\n }\n\n\n\n /// Add a team member by email. [required: token]\n\n pub fn add(&self, options: &InviteTeamMemberOptions) -> Future<()> {\n\n token_required!(self.modio);\n\n let params = options.to_query_string();\n\n Box::new(\n\n self.modio\n\n .post::<ModioMessage, _>(&self.path(\"\"), params)\n\n .map(|_| ()),\n\n )\n", "file_path": "src/teams.rs", "rank": 76, "score": 15.702440027168935 }, { "content": " pub name: String,\n\n #[serde(rename = \"type\")]\n\n pub kind: TagType,\n\n pub hidden: bool,\n\n pub tags: Vec<String>,\n\n }\n\n\n\n #[derive(Debug, Deserialize)]\n\n #[serde(rename_all = \"lowercase\")]\n\n pub enum TagType {\n\n Checkboxes,\n\n Dropdown,\n\n }\n\n\n\n impl fmt::Display for TagType {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {\n\n match *self {\n\n TagType::Checkboxes => write!(fmt, \"checkboxes\"),\n\n TagType::Dropdown => write!(fmt, \"dropdown\"),\n\n }\n", "file_path": "src/types.rs", "rank": 77, "score": 15.694274696879948 }, { "content": "\n\n/// See the [Message Object](https://docs.mod.io/#message-object) docs for more information.\n\n#[derive(Debug, Deserialize)]\n\npub struct ModioMessage {\n\n pub code: u16,\n\n pub message: String,\n\n}\n\n\n\n#[doc(hidden)]\n\n#[deprecated(since = \"0.4.1\", note = \"Use `EntityResult`\")]\n\npub type ModioResult<T> = EntityResult<T>;\n\n\n\n/// Result type for editing games, mods and files.\n\n#[derive(Debug, Deserialize)]\n\n#[serde(untagged)]\n\npub enum EntityResult<T> {\n\n Entity(T),\n\n /// The request was successful however no new data was submitted.\n\n #[serde(deserialize_with = \"deserialize_message\")]\n\n NoChanges,\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 78, "score": 15.61681691957911 }, { "content": " pub use crate::{Future, Stream};\n\n}\n\n\n\n/// Re-exports of the used reqwest types.\n\npub mod client {\n\n pub use reqwest::header;\n\n pub use reqwest::r#async::ClientBuilder;\n\n pub use reqwest::RedirectPolicy;\n\n #[cfg(feature = \"tls\")]\n\n pub use reqwest::{Certificate, Identity};\n\n pub use reqwest::{Proxy, Url};\n\n}\n\n\n\n#[allow(dead_code)]\n\nconst X_RATELIMIT_LIMIT: &str = \"x-ratelimit-limit\";\n\nconst X_RATELIMIT_REMAINING: &str = \"x-ratelimit-remaining\";\n\nconst X_RATELIMIT_RETRY_AFTER: &str = \"x-ratelimit-retryafter\";\n\n\n\n/// A `Builder` can be used to create a `Modio` client with custom configuration.\n\npub struct Builder {\n\n config: Config,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 79, "score": 15.462966710006016 }, { "content": " client: Client,\n\n pub(crate) credentials: Credentials,\n\n}\n\n\n\nimpl Modio {\n\n /// Constructs a new `Builder` to configure a `Modio` client.\n\n ///\n\n /// This is the same as `Builder::new(credentials)`.\n\n pub fn builder<C: Into<Credentials>>(credentials: C) -> Builder {\n\n Builder::new(credentials)\n\n }\n\n\n\n /// Create an endpoint to [https://api.mod.io/v1](https://docs.mod.io/#mod-io-api-v1).\n\n pub fn new<C>(credentials: C) -> Result<Self>\n\n where\n\n C: Into<Credentials>,\n\n {\n\n Builder::new(credentials).build()\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 80, "score": 15.439497074106217 }, { "content": " pub fn add(&self, options: AddFileOptions) -> Future<File> {\n\n token_required!(self.modio);\n\n self.modio.post_form(&self.path(\"\"), options)\n\n }\n\n}\n\n\n\n/// Reference interface of a modfile.\n\npub struct FileRef {\n\n modio: Modio,\n\n game: u32,\n\n mod_id: u32,\n\n id: u32,\n\n}\n\n\n\nimpl FileRef {\n\n pub(crate) fn new(modio: Modio, game: u32, mod_id: u32, id: u32) -> Self {\n\n Self {\n\n modio,\n\n game,\n\n mod_id,\n", "file_path": "src/files.rs", "rank": 81, "score": 15.438151026149018 }, { "content": "use std::env;\n\nuse std::process;\n\nuse tokio::runtime::Runtime;\n\n\n\nuse modio::error::Error;\n\nuse modio::filter::prelude::*;\n\nuse modio::{auth::Credentials, Modio};\n\n\n", "file_path": "examples/mymods.rs", "rank": 82, "score": 15.372803870842791 }, { "content": " ErrorKind::Auth(_) => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n pub fn is_validation(&self) -> bool {\n\n match *self.inner {\n\n ErrorKind::Validation(_, _) => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n pub fn kind(&self) -> &ErrorKind {\n\n &self.inner\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum ErrorKind {\n\n Message(String),\n", "file_path": "src/error.rs", "rank": 83, "score": 15.31995212309224 }, { "content": "\n\n /// Use the mod.io api test host.\n\n pub fn use_test(mut self) -> Builder {\n\n self.config.host = Some(TEST_HOST.into());\n\n self\n\n }\n\n\n\n /// Set the user agent used for every request.\n\n ///\n\n /// Defaults to `\"modio/{version}\"`\n\n pub fn agent<S: Into<String>>(mut self, agent: S) -> Builder {\n\n self.config.agent = Some(agent.into());\n\n self\n\n }\n\n\n\n /// Add a `Proxy` to the list of proxies the client will use.\n\n pub fn proxy(mut self, proxy: Proxy) -> Builder {\n\n self.config.proxies.push(proxy);\n\n self\n\n }\n", "file_path": "src/lib.rs", "rank": 84, "score": 15.181719657242967 }, { "content": " modio,\n\n game,\n\n mod_id,\n\n }\n\n }\n\n\n\n fn path(&self, more: &str) -> String {\n\n format!(\"/games/{}/mods/{}/files{}\", self.game, self.mod_id, more)\n\n }\n\n\n\n /// Return all files that are published for a mod this `Files` refers to.\n\n ///\n\n /// See [Filters and sorting](filters/index.html).\n\n pub fn list(&self, filter: &Filter) -> Future<List<File>> {\n\n let mut uri = vec![self.path(\"\")];\n\n let query = filter.to_query_string();\n\n if !query.is_empty() {\n\n uri.push(query);\n\n }\n\n self.modio.get(&uri.join(\"?\"))\n", "file_path": "src/files.rs", "rank": 85, "score": 15.114766128214322 }, { "content": " sketchfab: Option<Vec<String>>,\n\n}\n\n\n\nimpl DeleteMediaOptions {\n\n pub fn images(self, images: &[String]) -> Self {\n\n Self {\n\n images: Some(images.to_vec()),\n\n ..self\n\n }\n\n }\n\n\n\n pub fn youtube(self, urls: &[String]) -> Self {\n\n Self {\n\n youtube: Some(urls.to_vec()),\n\n ..self\n\n }\n\n }\n\n\n\n pub fn sketchfab(self, urls: &[String]) -> Self {\n\n Self {\n", "file_path": "src/mods.rs", "rank": 86, "score": 15.08255679352651 }, { "content": " pub fn header<P: AsRef<Path>>(self, header: P) -> Self {\n\n let header = header.as_ref();\n\n let filename = header\n\n .file_name()\n\n .and_then(OsStr::to_str)\n\n .map_or_else(String::new, ToString::to_string);\n\n\n\n Self {\n\n header: Some(FileSource {\n\n inner: FileStream::open(header),\n\n filename,\n\n mime: IMAGE_STAR,\n\n }),\n\n ..self\n\n }\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\nimpl From<GameMediaOptions> for Form {\n", "file_path": "src/games.rs", "rank": 87, "score": 15.060040816761013 }, { "content": "use std::env;\n\nuse std::io::{self, Write};\n\nuse std::process;\n\n\n\nuse md5;\n\nuse tokio::runtime::Runtime;\n\n\n\nuse modio::error::Error;\n\nuse modio::{auth::Credentials, Modio};\n\n\n", "file_path": "examples/download.rs", "rank": 88, "score": 15.01066403387415 }, { "content": "//! // let result = rt.block_on(task)?;\n\n//! Ok(())\n\n//! }\n\n//! ```\n\n//!\n\n//! For testing purposes use [`Modio::host`](struct.Modio.html#method.host) to create a client for the\n\n//! mod.io [test environment](https://docs.mod.io/#testing).\n\n//!\n\n//! # Example: Chaining api requests\n\n//!\n\n//! ```no_run\n\n//! use modio::{Credentials, Error, Modio};\n\n//! use tokio::prelude::*;\n\n//! use tokio::runtime::Runtime;\n\n//!\n\n//! fn main() -> Result<(), Error> {\n\n//! let mut rt = Runtime::new().expect(\"new rt\");\n\n//! let modio = Modio::new(\n\n//! Credentials::ApiKey(String::from(\"user-or-game-api-key\")),\n\n//! )?;\n", "file_path": "src/lib.rs", "rank": 89, "score": 14.974262979628666 }, { "content": "}\n\n\n\nimpl QueryString for EditModOptions {\n\n fn to_query_string(&self) -> String {\n\n form_urlencoded::Serializer::new(String::new())\n\n .extend_pairs(&self.params)\n\n .finish()\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\n#[deprecated(since = \"0.4.1\", note = \"Use `EditDependenciesOptions`\")]\n\npub type EditDepencenciesOptions = EditDependenciesOptions;\n\n\n\npub struct EditDependenciesOptions {\n\n dependencies: Vec<u32>,\n\n}\n\n\n\nimpl EditDependenciesOptions {\n\n pub fn new(dependencies: &[u32]) -> Self {\n", "file_path": "src/mods.rs", "rank": 90, "score": 14.931277753951296 }, { "content": "\n\nimpl Builder {\n\n /// Constructs a new `Builder`.\n\n ///\n\n /// This is the same as `Modio::builder(credentials)`.\n\n pub fn new<C: Into<Credentials>>(credentials: C) -> Builder {\n\n Builder {\n\n config: Config {\n\n host: None,\n\n agent: None,\n\n credentials: credentials.into(),\n\n builder: None,\n\n proxies: Vec::new(),\n\n #[cfg(feature = \"tls\")]\n\n tls: TlsBackend::default(),\n\n },\n\n }\n\n }\n\n\n\n /// Returns a `Modio` client that uses this `Builder` configuration.\n", "file_path": "src/lib.rs", "rank": 91, "score": 14.863487937091433 }, { "content": "impl StdError for ClientError {}\n\n\n\nimpl fmt::Display for ClientError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let mut buf = String::new();\n\n buf.push_str(&self.message);\n\n if let Some(ref errors) = self.errors {\n\n for (k, v) in errors {\n\n buf.push('\\n');\n\n buf.push_str(\" \");\n\n buf.push_str(&k);\n\n buf.push_str(\": \");\n\n buf.push_str(&v);\n\n }\n\n }\n\n fmt::Display::fmt(&buf, f)\n\n }\n\n}\n\n\n\npub(crate) fn apikey_required() -> Error {\n", "file_path": "src/error.rs", "rank": 92, "score": 14.835124808021408 }, { "content": " User(u32),\n\n}\n\n\n\nimpl Report {\n\n pub fn new<S: Into<String>>(name: S, summary: S, kind: ReportType, resource: Resource) -> Self {\n\n Self {\n\n name: name.into(),\n\n summary: summary.into(),\n\n kind,\n\n resource,\n\n }\n\n }\n\n}\n\n\n\nimpl QueryString for Report {\n\n fn to_query_string(&self) -> String {\n\n let (resource, id) = match self.resource {\n\n Resource::Game(id) => (\"games\", id),\n\n Resource::Mod(id) => (\"mods\", id),\n\n Resource::User(id) => (\"users\", id),\n", "file_path": "src/reports.rs", "rank": 93, "score": 14.530857855449447 }, { "content": " }\n\n }\n\n\n\n fn path(&self, more: &str) -> String {\n\n format!(\"/games/{}/mods/{}/team{}\", self.game, self.mod_id, more)\n\n }\n\n\n\n /// List all team members.\n\n ///\n\n /// See [Filters and sorting](filters/index.html).\n\n pub fn list(&self, filter: &Filter) -> Future<List<TeamMember>> {\n\n let mut uri = vec![self.path(\"\")];\n\n let query = filter.to_query_string();\n\n if !query.is_empty() {\n\n uri.push(query);\n\n }\n\n self.modio.get(&uri.join(\"?\"))\n\n }\n\n\n\n /// Provids a stream over all team members.\n", "file_path": "src/teams.rs", "rank": 94, "score": 14.46377319884344 }, { "content": " .iter()\n\n .map(|d| (\"dependencies[]\", d.to_string())),\n\n )\n\n .finish()\n\n }\n\n}\n\n\n\npub struct EditTagsOptions {\n\n tags: Vec<String>,\n\n}\n\n\n\nimpl EditTagsOptions {\n\n pub fn new(tags: &[String]) -> Self {\n\n Self {\n\n tags: tags.to_vec(),\n\n }\n\n }\n\n}\n\n\n\nimpl AddOptions for EditTagsOptions {}\n", "file_path": "src/mods.rs", "rank": 95, "score": 14.389171651260545 }, { "content": "\n\n /// Performs a download into a writer.\n\n ///\n\n /// Fails with [`ErrorKind::Download`](error/enum.ErrorKind.html#variant.Download) if a primary file,\n\n /// a specific file or a specific version is not found.\n\n /// # Example\n\n /// ```no_run\n\n /// use std::fs::File;\n\n ///\n\n /// use modio::download::ResolvePolicy;\n\n /// use modio::{Credentials, DownloadAction, Error, Modio};\n\n /// use tokio::runtime::Runtime;\n\n ///\n\n /// fn main() -> Result<(), Error> {\n\n /// let mut rt = Runtime::new().expect(\"new rt\");\n\n /// let modio = Modio::new(\n\n /// Credentials::ApiKey(String::from(\"user-or-game-api-key\")),\n\n /// )?;\n\n /// let out = File::create(\"mod.zip\").expect(\"new file\");\n\n ///\n", "file_path": "src/lib.rs", "rank": 96, "score": 14.292432814216927 }, { "content": "pub enum AuthenticationError {\n\n ApiKeyRequired,\n\n TokenRequired,\n\n}\n\n\n\nimpl fmt::Display for AuthenticationError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n AuthenticationError::ApiKeyRequired => f.write_str(\"API key is required\"),\n\n AuthenticationError::TokenRequired => f.write_str(\"Authentication token is required\"),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum DownloadError {\n\n /// The mod has no primary file.\n\n NoPrimaryFile { game_id: u32, mod_id: u32 },\n\n /// The specific file of a mod was not found.\n\n FileNotFound {\n", "file_path": "src/error.rs", "rank": 97, "score": 14.191464979231666 }, { "content": " Self { modio }\n\n }\n\n\n\n /// Return all modfiles the authenticated user uploaded. [required: token]\n\n ///\n\n /// See [Filters and sorting](filters/index.html).\n\n pub fn list(&self, filter: &Filter) -> Future<List<File>> {\n\n token_required!(self.modio);\n\n let mut uri = vec![\"/me/files\".to_owned()];\n\n let query = filter.to_query_string();\n\n if !query.is_empty() {\n\n uri.push(query);\n\n }\n\n self.modio.get(&uri.join(\"?\"))\n\n }\n\n\n\n /// Provides a stream over all modfiles the authenticated user uploaded. [required: token]\n\n ///\n\n /// See [Filters and sorting](filters/index.html).\n\n pub fn iter(&self, filter: &Filter) -> Stream<File> {\n", "file_path": "src/files.rs", "rank": 98, "score": 14.167220267452942 }, { "content": " logo: Option<FileSource>,\n\n icon: Option<FileSource>,\n\n header: Option<FileSource>,\n\n}\n\n\n\nimpl GameMediaOptions {\n\n pub fn logo<P: AsRef<Path>>(self, logo: P) -> Self {\n\n let logo = logo.as_ref();\n\n let filename = logo\n\n .file_name()\n\n .and_then(OsStr::to_str)\n\n .map_or_else(String::new, ToString::to_string);\n\n\n\n Self {\n\n logo: Some(FileSource {\n\n inner: FileStream::open(logo),\n\n filename,\n\n mime: IMAGE_STAR,\n\n }),\n\n ..self\n", "file_path": "src/games.rs", "rank": 99, "score": 14.066494863806346 } ]
Rust
kg-syntax/src/lexer/nfa.rs
kodegenix/kg-lang
baa79ac7fb8babe82b6b8eed2af34d869523d8f4
use super::*; #[derive(Debug, Clone)] pub struct Nfa { states: Vec<State>, } impl Nfa { pub fn from_program(prog: &Program) -> Nfa { Builder::new().build(prog) } pub fn states(&self) -> &[State] { &self.states } } impl std::fmt::Display for Nfa { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { for (i, s) in self.states.iter().enumerate() { write!(f, "({:4}): {}", i, s)?; } Ok(()) } } #[derive(Debug, Clone)] pub struct State { edges: Vec<Edge>, accept: Option<Accept>, } impl State { fn from(state: StateEx) -> State { debug_assert!(state.accepts.len() <= 1); debug_assert!(state.edges.len() > 0 || state.accepts.len() == 1); State { edges: state.edges, accept: state.accepts.get(0).cloned(), } } pub fn edges(&self) -> &[Edge] { &self.edges } pub fn accept(&self) -> Option<Accept> { self.accept } } impl std::fmt::Display for State { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { if let Some(a) = self.accept { write!(f, "{}\n", a)?; } else { write!(f, "\n")?; } if self.edges.is_empty() { write!(f, " -\n")?; } else { for e in self.edges.iter() { write!(f, " {}\n", e)?; } } Ok(()) } } struct Builder { states: Vec<StateEx>, } impl Builder { fn new() -> Builder { Builder { states: Vec::new(), } } fn merge_epsilon_states(&mut self, prog: &Program) { fn resolve_edges(pc: u32, edges: &mut SparseSet<u32>, prog: &Program, level: usize) { debug_assert!(level < prog.code().len()); if let Opcode::Split(g1, g2) = *prog.opcode(pc) { resolve_edges(g1, edges, prog, level + 1); resolve_edges(g2, edges, prog, level + 1); } else { edges.insert(pc); } } fn add_code(state: &mut StateEx, pc: u32, edges: &HashMap<u32, SparseSet<u32>>, prog: &Program, nested: bool) { debug_assert!(!state.merged_states.contains(&pc)); match *prog.opcode(pc) { Opcode::Match(m) => { state.accepts.push(Accept::new(pc, m)); state.merged_states.insert(pc); } Opcode::Byte(g, b) => { state.edges.push(Edge::new(b, g)); state.merged_states.insert(pc); } Opcode::Range(g, a, b) => { for i in a..=b { state.edges.push(Edge::new(i, g)); } state.merged_states.insert(pc); } Opcode::Mask(g, m) => { let m = prog.mask(m); for i in m.iter() { state.edges.push(Edge::new(i, g)); } state.merged_states.insert(pc); } Opcode::Split(..) => { debug_assert!(!nested); for s in edges.get(&pc).unwrap().iter().cloned() { add_code(state, s, edges, prog, true); } } } } let mut edge_map: HashMap<u32, SparseSet<u32>> = HashMap::with_capacity(prog.code().len()); for pc in 0..prog.code().len() as u32 { if let Opcode::Split(..) = prog.opcode(pc) { let mut e = SparseSet::with_capacity(prog.code().len()); resolve_edges(pc, &mut e, prog, 0); edge_map.insert(pc, e); } } let mut state_map = vec![EMPTY_GOTO; prog.code().len()]; let mut states: Vec<StateEx> = Vec::with_capacity(prog.code().len()); let mut pc = 0; let mut done = false; while !done { let mut state = StateEx::new(prog.code().len()); add_code(&mut state, pc, &edge_map, prog, false); let mut found = false; for (i, s) in states.iter().enumerate() { if *s == state { state_map[pc as usize] = i as u32; found = true; break; } } if !found { state_map[pc as usize] = states.len() as u32; states.push(state); } done = true; 'search_loop: for s in states.iter() { for e in s.edges.iter().cloned() { if state_map[e.state() as usize] == EMPTY_GOTO { pc = e.state(); done = false; break 'search_loop; } } } } for s in states.iter_mut() { for e in s.edges.iter_mut() { let t = state_map[e.state() as usize]; debug_assert_ne!(t, EMPTY_GOTO); *e = Edge::new(e.value(), t); } } self.states = states; } fn build(mut self, prog: &Program) -> Nfa { self.merge_epsilon_states(prog); let mut states = Vec::with_capacity(self.states.len()); for state in self.states { debug_assert!(state.accepts.len() <= 1); states.push(State::from(state)); } Nfa { states } } } #[cfg(test)] mod tests { use super::*; #[test] fn test1() { let re = Regex::parse("aa*bb+|bb").unwrap(); let p = Program::from_regex(&re, 1); println!("{}", p); println!("{}", Nfa::from_program(&p)); } }
use super::*; #[derive(Debug, Clone)] pub struct Nfa { states: Vec<State>, } impl Nfa { pub fn from_program(prog: &Program) -> Nfa { Builder::new().build(prog) } pub fn states(&self) -> &[State] { &self.states } } impl std::fmt::Display for Nfa { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { for (i, s) in self.states.iter().enumerate() { write!(f, "({:4}): {}", i, s)?; } Ok(()) } } #[derive(Debug, Clone)] pub struct State { edges: Vec<Edge>, accept: Option<Accept>, } impl State { fn from(state: StateEx) -> State { debug_assert!(state.accepts.len() <= 1); debug_assert!(state.edges.len() > 0 || state.accepts.len() == 1); State { edges: state.edges, accept: state.accepts.get(0).cloned(), } } pub fn edges(&self) -> &[Edge] { &self.edges } pub fn accept(&self) -> Option<Accept> { self.accept } } impl std::fmt::Display for State { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { if let Some(a) = self.accept { write!(f, "{}\n", a)?; } else { write!(f, "\n")?; } if self.edges.is_empty() { write!(f, " -\n")?; } else { for e in self.edges.iter() { write!(f, " {}\n", e)?; } } Ok(()) } } struct Builder { states: Vec<StateEx>, } impl Builder { fn new() -> Builder { Builder { states: Vec::new(), } } fn merge_epsilon_states(&mut self, prog: &Program) { fn resolve_edges(pc: u32, edges: &mut SparseSet<u32>, prog: &Program, level: usize) { debug_assert!(level < prog.code().len()); if let Opcode::Split(g1, g2) = *prog.opcode(pc) { resolve_edges(g1, edges, prog, level + 1); resolve_edges(g2, edges, prog, level + 1); } else { edges.insert(pc); } } fn add_code(state: &mut StateEx, pc: u32, edges: &HashMap<u32, SparseSet<u32>>, prog: &Program, nested:
ates.iter().enumerate() { if *s == state { state_map[pc as usize] = i as u32; found = true; break; } } if !found { state_map[pc as usize] = states.len() as u32; states.push(state); } done = true; 'search_loop: for s in states.iter() { for e in s.edges.iter().cloned() { if state_map[e.state() as usize] == EMPTY_GOTO { pc = e.state(); done = false; break 'search_loop; } } } } for s in states.iter_mut() { for e in s.edges.iter_mut() { let t = state_map[e.state() as usize]; debug_assert_ne!(t, EMPTY_GOTO); *e = Edge::new(e.value(), t); } } self.states = states; } fn build(mut self, prog: &Program) -> Nfa { self.merge_epsilon_states(prog); let mut states = Vec::with_capacity(self.states.len()); for state in self.states { debug_assert!(state.accepts.len() <= 1); states.push(State::from(state)); } Nfa { states } } } #[cfg(test)] mod tests { use super::*; #[test] fn test1() { let re = Regex::parse("aa*bb+|bb").unwrap(); let p = Program::from_regex(&re, 1); println!("{}", p); println!("{}", Nfa::from_program(&p)); } }
bool) { debug_assert!(!state.merged_states.contains(&pc)); match *prog.opcode(pc) { Opcode::Match(m) => { state.accepts.push(Accept::new(pc, m)); state.merged_states.insert(pc); } Opcode::Byte(g, b) => { state.edges.push(Edge::new(b, g)); state.merged_states.insert(pc); } Opcode::Range(g, a, b) => { for i in a..=b { state.edges.push(Edge::new(i, g)); } state.merged_states.insert(pc); } Opcode::Mask(g, m) => { let m = prog.mask(m); for i in m.iter() { state.edges.push(Edge::new(i, g)); } state.merged_states.insert(pc); } Opcode::Split(..) => { debug_assert!(!nested); for s in edges.get(&pc).unwrap().iter().cloned() { add_code(state, s, edges, prog, true); } } } } let mut edge_map: HashMap<u32, SparseSet<u32>> = HashMap::with_capacity(prog.code().len()); for pc in 0..prog.code().len() as u32 { if let Opcode::Split(..) = prog.opcode(pc) { let mut e = SparseSet::with_capacity(prog.code().len()); resolve_edges(pc, &mut e, prog, 0); edge_map.insert(pc, e); } } let mut state_map = vec![EMPTY_GOTO; prog.code().len()]; let mut states: Vec<StateEx> = Vec::with_capacity(prog.code().len()); let mut pc = 0; let mut done = false; while !done { let mut state = StateEx::new(prog.code().len()); add_code(&mut state, pc, &edge_map, prog, false); let mut found = false; for (i, s) in st
function_block-random_span
[ { "content": "#[derive(Debug, Clone, Copy)]\n\nstruct Proc(u32, u32);\n\n\n\nimpl Proc {\n\n fn is_empty(&self) -> bool {\n\n self.0 == EMPTY_GOTO\n\n }\n\n\n\n fn merge(&mut self, p: Proc) -> Proc {\n\n if self.is_empty() {\n\n *self = p;\n\n } else if !p.is_empty() {\n\n debug_assert!(self.1 == p.0);\n\n self.1 = p.1;\n\n }\n\n *self\n\n }\n\n}\n\n\n\nimpl Default for Proc {\n\n fn default() -> Proc {\n\n Proc(EMPTY_GOTO, EMPTY_GOTO)\n\n }\n\n}\n\n\n\n\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 0, "score": 143968.91269323404 }, { "content": "#[derive(Debug)]\n\nstruct Builder {\n\n states: Vec<StateEx>,\n\n}\n\n\n\nimpl Builder {\n\n fn new() -> Self {\n\n Builder {\n\n states: Vec::new(),\n\n }\n\n }\n\n\n\n fn build(mut self, nfa: &Nfa) -> Dfa {\n\n let mut state = StateEx::new(nfa.states().len());\n\n state.merged_states.insert(0);\n\n self.states.push(state);\n\n\n\n let mut stack_queue = VecDeque::new();\n\n stack_queue.push_back(0);\n\n\n\n let mut merged_states = SparseSet::with_capacity(nfa.states().len());\n", "file_path": "kg-syntax/src/lexer/dfa.rs", "rank": 2, "score": 110781.31116516018 }, { "content": "#[derive(Debug)]\n\nstruct State {\n\n index: usize,\n\n actions: OrdSet<ActionEdge>,\n\n gotos: OrdSet<GotoEdge>,\n\n}\n\n\n\nimpl State {\n\n fn new(index: usize) -> State {\n\n State {\n\n index,\n\n actions: OrdSet::new(),\n\n gotos: OrdSet::new(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ClrParser {\n\n grammar: GrammarRef,\n\n states: Vec<State>,\n", "file_path": "kg-syntax/src/parser/clr/mod.rs", "rank": 3, "score": 108564.63297583951 }, { "content": "pub fn build(mut parser: ClrParser) -> Result<ClrParser, Error> {\n\n {\n\n let g = parser.grammar.borrow();\n\n let grammar = GrammarExt::new(&g);\n\n\n\n let mut states: Vec<RefCell<State>> = Vec::with_capacity(1024);\n\n states.push(RefCell::new(State::new(0)));\n\n\n\n let mut itemsets: Vec<ItemSet> = Vec::with_capacity(1024);\n\n itemsets.push(ItemSet::first(grammar.production(0), &grammar));\n\n\n\n fn build_transitions(s1: &mut State, s2: &State, symbol: Symbol) {\n\n match symbol {\n\n Symbol::Production(p) => {\n\n s1.gotos.insert(GotoEdge::new(p, s2.index));\n\n }\n\n Symbol::Terminal(t) => {\n\n s1.actions.insert(ActionEdge::new(t, Action::Shift(s2.index)));\n\n }\n\n }\n", "file_path": "kg-syntax/src/parser/clr/build.rs", "rank": 4, "score": 107444.07587517939 }, { "content": "pub fn parse(r: &mut dyn CharReader) -> Result<GrammarRef, Error> {\n\n let g = GrammarRef::new();\n\n\n\n let mut p = Parser::new();\n\n\n\n let mut pg = p.parse(r)?;\n\n\n\n let mut modes: Vec<Mode> = Vec::with_capacity(32);\n\n let mut channels: Vec<Channel> = Vec::with_capacity(32);\n\n let mut terminals: Vec<Lexeme> = Vec::with_capacity(128);\n\n let mut productions: Vec<Production> = Vec::with_capacity(128);\n\n let mut rules: Vec<Rule> = Vec::with_capacity(128);\n\n\n\n {\n\n let t0 = Lexeme::new(0, \"$\", Regex::Empty, &g);\n\n terminals.push(t0);\n\n\n\n let p0 = Production::new(0, \"@s\", &g);\n\n productions.push(p0);\n\n\n", "file_path": "kg-syntax/src/grammar/parse.rs", "rank": 5, "score": 107074.30215382492 }, { "content": "pub fn parse_lexer_def(reader: &mut dyn CharReader) -> Result<LexerDef, Error> {\n\n let mut lexemes = Vec::new();\n\n let mut default_action = None;\n\n while let Some(lexeme) = parse_lexeme_def_opt(reader)? {\n\n if lexeme.regex == Regex::Empty {\n\n if default_action.is_some() {\n\n // cannot have more then one catch-all rule\n\n return Err(Error::new(ErrorDetail::Undefined));\n\n } else {\n\n default_action = lexeme.action;\n\n }\n\n } else {\n\n lexemes.push(lexeme);\n\n }\n\n }\n\n Ok(LexerDef {\n\n name: String::new(),\n\n lexemes,\n\n default_action,\n\n })\n\n}\n\n\n", "file_path": "kg-syntax-gen/src/parse.rs", "rank": 6, "score": 101577.28214912835 }, { "content": "pub fn parse_lexeme_def_opt(reader: &mut dyn CharReader) -> Result<Option<LexemeDef>, Error> {\n\n let mut lexeme = LexemeDef {\n\n name: String::new(),\n\n label: None,\n\n regex: Regex::Empty,\n\n action: None,\n\n };\n\n reader.skip_whitespace()?;\n\n if let Some('.') = reader.peek_char(0)? {\n\n // parse default (catch all) rule\n\n reader.skip_chars(1)?;\n\n reader.skip_whitespace()?;\n\n if let Some(':') = reader.peek_char(0)? {\n\n reader.next_char()?;\n\n lexeme.action = parse_action(reader)?;\n\n if lexeme.action.is_none() {\n\n // catch-all rule must have an action\n\n return Err(Error::new(ErrorDetail::Undefined));\n\n }\n\n } else {\n", "file_path": "kg-syntax-gen/src/parse.rs", "rank": 7, "score": 96328.84194898434 }, { "content": "fn write_generation_report(w: &mut dyn Write, grammar: &GrammarExt, itemsets: &Vec<ItemSet>, states: &Vec<RefCell<State>>) -> fmt::Result {\n\n write!(w, \"grammar:\\n{}\", grammar)?;\n\n\n\n write!(w, \"\\nitemsets:\\n\")?;\n\n for (index, itemset) in itemsets.iter().enumerate() {\n\n write!(w, \"{}: {}\\n{}\",\n\n index,\n\n SymbolDisp::symbol(itemset.symbol, &grammar),\n\n ItemSetDisp::new(itemset, &grammar))?;\n\n }\n\n\n\n write!(w, \"\\nstates:\\n\")?;\n\n write!(w, \"{:8}\", \"#\")?;\n\n for t in grammar.terminals().iter() {\n\n write!(w, \"{:8}\", format!(\"{}\", SymbolDisp::terminal(t.index(), &grammar)))?;\n\n }\n\n for p in grammar.productions().iter().skip(1) {\n\n write!(w, \"{:8}\", format!(\"{}\", SymbolDisp::production(p.index(), &grammar)))?;\n\n }\n\n write!(w, \"\\n{0:-<1$}\\n\", \"\", (grammar.terminals().len() + grammar.productions().len()) * 8)?;\n\n for state in states.iter() {\n\n write!(w, \"{}\\n\", StateDisp::new(&state.borrow(), &grammar))?;\n\n }\n\n write!(w, \"\\n\")?;\n\n Ok(())\n\n}\n\n\n\n\n", "file_path": "kg-syntax/src/parser/clr/build.rs", "rank": 8, "score": 90019.95632993072 }, { "content": "fn skip_ws(r: &mut dyn CharReader, allow_nested_comments: bool) -> Result<bool, Error> {\n\n let mut level = 0;\n\n let mut skipped = false;\n\n while let Some(c) = r.peek_char(0)? {\n\n if c == '/' {\n\n if let Some(c) = r.peek_char(1)? {\n\n if level == 0 && c == '/' {\n\n r.skip_until(&mut |c| c == '\\n')?;\n\n skipped = true;\n\n } else if c == '*' {\n\n r.skip_chars(2)?;\n\n if allow_nested_comments {\n\n level += 1;\n\n } else {\n\n level = 1;\n\n }\n\n skipped = true;\n\n } else if level > 0 {\n\n r.next_char()?;\n\n } else {\n", "file_path": "kg-syntax/src/grammar/parse.rs", "rank": 9, "score": 89277.60011654909 }, { "content": "#[derive(Debug)]\n\nstruct Compiler {\n\n matching: usize,\n\n prog: Program,\n\n}\n\n\n\nimpl Compiler {\n\n pub fn new(matching: usize) -> Compiler {\n\n Compiler {\n\n matching,\n\n prog: Program::new(),\n\n }\n\n }\n\n\n\n fn byte(&mut self, b: u8) -> Proc {\n\n let n = self.prog.opcode_count();\n\n self.prog.add_opcode(Opcode::Byte(n + 1, b));\n\n Proc(n, n + 1)\n\n }\n\n\n\n fn range(&mut self, a: u8, b: u8) -> Proc {\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 10, "score": 86261.9023892498 }, { "content": "#[test]\n\nfn parse_nested_alternative() {\n\n let r = \"abc(a|b)abc\";\n\n assert_eq!(format!(\"{}\", Regex::parse(r).unwrap()), r);\n\n}\n\n\n", "file_path": "kg-syntax/src/regex/tests.rs", "rank": 11, "score": 83067.45678405295 }, { "content": "pub fn build_dfa(lexer: &LexerDef) -> Dfa {\n\n let mut progs = Vec::with_capacity(lexer.lexemes.len());\n\n for (i, lexeme) in lexer.lexemes.iter().enumerate() {\n\n let p = Program::from_regex(&lexeme.regex, i + 1);\n\n progs.push(p);\n\n }\n\n let p = Program::merge(&progs);\n\n let nfa = Nfa::from_program(&p);\n\n Dfa::from_nfa(&nfa)\n\n}\n\n\n", "file_path": "kg-syntax-gen/src/lex.rs", "rank": 12, "score": 83039.90251513541 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]\n\nstruct GotoEdge {\n\n production: usize,\n\n state: usize,\n\n}\n\n\n\nimpl GotoEdge {\n\n fn new(production: usize, state: usize) -> GotoEdge {\n\n GotoEdge {\n\n production,\n\n state,\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "kg-syntax/src/parser/clr/mod.rs", "rank": 13, "score": 82574.77420045409 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]\n\nstruct ActionEdge {\n\n terminal: usize,\n\n action: Action,\n\n}\n\n\n\nimpl ActionEdge {\n\n fn new(terminal: usize, action: Action) -> ActionEdge {\n\n ActionEdge {\n\n terminal,\n\n action,\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "kg-syntax/src/parser/clr/mod.rs", "rank": 14, "score": 82574.77420045409 }, { "content": "struct StateDisp<'a> {\n\n state: &'a State,\n\n grammar: &'a Grammar,\n\n}\n\n\n\nimpl<'a> StateDisp<'a> {\n\n fn new(state: &'a State, grammar: &'a Grammar) -> StateDisp<'a> {\n\n StateDisp {\n\n state,\n\n grammar,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> fmt::Display for StateDisp<'a> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{:<8}\", self.state.index)?;\n\n for t in self.grammar.terminals().iter() {\n\n let actions: Vec<&ActionEdge> = self.state\n\n .actions\n", "file_path": "kg-syntax/src/parser/clr/build.rs", "rank": 15, "score": 79923.033358988 }, { "content": "pub fn gen_lexer(input_path: &Path, output_path: &Path) -> Result<(), Error> {\n\n let f = FileBuffer::open(input_path)?;\n\n let mut r = f.char_reader();\n\n let mut l = self::parse::parse_lexer_def(&mut r)?;\n\n if l.name.is_empty() {\n\n l.name = input_path.file_stem().map(|s| s.to_str().unwrap_or(\"\")).unwrap_or(\"\").into();\n\n }\n\n l.name = l.name.to_class_case();\n\n let dfa = build_dfa(&l);\n\n let code = gen_dfa_lexer(&l, &dfa).map_err(|e| ParseDiag::new(IoErrorDetail::from(e.kind())))?;\n\n fs::write(output_path, code)?;\n\n Ok(())\n\n}\n", "file_path": "kg-syntax-gen/src/lib.rs", "rank": 16, "score": 71301.02550820018 }, { "content": "fn parse_action(reader: &mut dyn CharReader) -> Result<Option<String>, Error> {\n\n reader.skip_whitespace_nonl()?;\n\n\n\n let p1 = reader.position();\n\n if let Some('{') = reader.peek_char(0)? {\n\n //FIXME (jc) skip string literals (at least quoted curly braces)\n\n let mut level = 1;\n\n while let Some(c) = reader.next_char()? {\n\n if c == '{' {\n\n level += 1;\n\n } else if c == '}' {\n\n level -= 1;\n\n if level == 0 {\n\n break;\n\n }\n\n }\n\n }\n\n if level != 0 {\n\n return Err(Error::new(ErrorDetail::Undefined));\n\n }\n", "file_path": "kg-syntax-gen/src/parse.rs", "rank": 17, "score": 69888.7836277608 }, { "content": "fn parse_token(r: &mut dyn CharReader, ctx: ParseContext) -> Result<ParseToken, Error> {\n\n skip_ws(r, true)?;\n\n\n\n match r.peek_char(0)? {\n\n Some('<') => {\n\n if r.match_str(\"<%\")? {\n\n let p1 = r.position();\n\n r.skip_chars(2)?;\n\n let p2 = r.position();\n\n Ok(ParseToken::new(ParseTerminal::ActionStart, p1, p2))\n\n } else {\n\n Err(Error::Unspecified(line!())) //FIXME (jc) invalid character '<'\n\n }\n\n }\n\n Some('{') => {\n\n let p1 = r.position();\n\n r.next_char()?;\n\n let p2 = r.position();\n\n Ok(ParseToken::new(ParseTerminal::BraceLeft, p1, p2))\n\n }\n", "file_path": "kg-syntax/src/grammar/parse.rs", "rank": 18, "score": 66178.79364362896 }, { "content": "pub fn gen_dfa_lexer(lexer: &LexerDef, dfa: &Dfa) -> std::io::Result<String> {\n\n use regex::{Regex, Captures};\n\n use std::borrow::Cow;\n\n\n\n lazy_static! {\n\n static ref VAR_RE: Regex = Regex::new(r\"\\$\\{(.+?)\\}\").unwrap();\n\n }\n\n\n\n let num_states = dfa.states().len();\n\n\n\n let mut tab = String::with_capacity(10 * 1024);\n\n tab.push_str(\"[\\n\");\n\n for s in dfa.states() {\n\n tab.push_str(\"\\t[\");\n\n\n\n for (i, e) in s.edges().iter().cloned().enumerate() {\n\n if i % 16 == 0 {\n\n write!(tab, \"\\n\\t\").unwrap();\n\n }\n\n if e != EMPTY_GOTO {\n", "file_path": "kg-syntax-gen/src/lex.rs", "rank": 19, "score": 66082.97493307493 }, { "content": "fn main() {\n\n gen_lexer(\n\n \"src/num.lex\".as_ref(),\n\n Path::new(&std::env::var(\"OUT_DIR\").unwrap())\n\n .join(\"num.rs\")\n\n .as_ref(),\n\n )\n\n .unwrap();\n\n}\n", "file_path": "kg-syntax-test/build.rs", "rank": 20, "score": 55719.24592980687 }, { "content": "#[test]\n\nfn parse_grammar() {\n\n let f = FileBuffer::open(\"resources/java/grammar/java.grammar\").unwrap();\n\n let mut r = f.char_reader();\n\n\n\n let grammar = GrammarRef::parse(&mut r).unwrap();\n\n\n\n let mut runtime = JsRuntime::new(&grammar).unwrap();\n\n println!(\"\\n{}\", grammar.borrow());\n\n for lexeme in grammar.borrow().terminals().iter() {\n\n println!(\"{:?}\", lexeme);\n\n }\n\n\n\n let inp = FileBuffer::open(\"resources/java/src/main/java/org/example/geom/Point3.java\").unwrap();\n\n let mut ir = inp.byte_reader();\n\n\n\n let n = runtime.process(&mut ir).unwrap();\n\n println!(\"\\n{}\", n.to_yaml());\n\n\n\n}\n", "file_path": "kg-syntax/tests/grammar.rs", "rank": 21, "score": 54543.552098646534 }, { "content": "fn main() {\n\n let mut lexer = NumLexer::new();\n\n let s = \"12123231231\";\n\n let mut r = MemByteReader::new(s.as_bytes());\n\n loop {\n\n let t = lexer.next_token(&mut r).unwrap();\n\n println!(\"{:?}\", t);\n\n if t.term() == Term::End {\n\n break;\n\n }\n\n }\n\n}\n", "file_path": "kg-syntax-test/src/main.rs", "rank": 22, "score": 54543.552098646534 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nstruct Char {\n\n value: char,\n\n}\n\n\n\nimpl Char {\n\n fn new(c: char) -> Char {\n\n Char {\n\n value: c\n\n }\n\n }\n\n\n\n fn prev(c: char) -> char {\n\n unsafe {\n\n std::char::from_u32_unchecked(c as u32 - 1)\n\n }\n\n }\n\n\n\n fn next(c: char) -> char {\n\n unsafe {\n\n std::char::from_u32_unchecked(c as u32 + 1)\n", "file_path": "kg-syntax/src/regex/mod.rs", "rank": 23, "score": 54047.178531907426 }, { "content": "#[derive(Debug, Default, Clone, Copy)]\n\nstruct Flags {\n\n icase: bool,\n\n}\n\n\n\nimpl Flags {\n\n fn new() -> Flags {\n\n Flags {\n\n icase: false\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "kg-syntax/src/regex/parse.rs", "rank": 24, "score": 54047.090794905904 }, { "content": "#[derive(Debug)]\n\nstruct Options {\n\n case_sensitive: Option<bool>,\n\n greedy: Option<bool>,\n\n strict_mode: Option<bool>,\n\n}\n\n\n\nimpl Options {\n\n fn new() -> Options {\n\n Options {\n\n case_sensitive: None,\n\n greedy: None,\n\n strict_mode: None,\n\n }\n\n }\n\n}\n\n\n\nimpl Default for Options {\n\n fn default() -> Options {\n\n Options {\n\n case_sensitive: Some(true),\n\n greedy: Some(false),\n\n strict_mode: Some(false),\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "kg-syntax/src/grammar/parse.rs", "rank": 25, "score": 54043.24176865838 }, { "content": "#[derive(Debug)]\n\nstruct Fragment {\n\n elements: Vec<Element>,\n\n repeat: Repeat,\n\n}\n\n\n\nimpl Fragment {\n\n fn new() -> Fragment {\n\n Fragment {\n\n elements: Vec::new(),\n\n repeat: Repeat::One,\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "kg-syntax/src/grammar/parse.rs", "rank": 26, "score": 54043.24176865838 }, { "content": "#[derive(Debug)]\n\nstruct Parser {\n\n pass: bool,\n\n tokens: Vec<ParseToken>,\n\n token_stack: Vec<ParseToken>,\n\n context_stack: Vec<ParseContext>,\n\n}\n\n\n\nimpl Parser {\n\n fn new() -> Parser {\n\n Parser {\n\n pass: false,\n\n tokens: Vec::with_capacity(1024),\n\n token_stack: Vec::with_capacity(16),\n\n context_stack: Vec::with_capacity(16),\n\n }\n\n }\n\n\n\n fn parse(&mut self, r: &mut dyn CharReader) -> Result<ParseGrammar, Error> {\n\n let mut pg = ParseGrammar::new();\n\n pg.modes.push(ParseMode::new(\"default\"));\n", "file_path": "kg-syntax/src/grammar/parse.rs", "rank": 27, "score": 54043.24176865838 }, { "content": "#[test]\n\nfn should_parse_dot_as_any() {\n\n assert_eq!(Regex::Any, Regex::parse(\".\").unwrap());\n\n}\n\n\n", "file_path": "kg-syntax/src/regex/tests.rs", "rank": 28, "score": 53447.26883371289 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nstruct ParseToken {\n\n term: ParseTerminal,\n\n from: Position,\n\n to: Position,\n\n}\n\n\n\nimpl ParseToken {\n\n fn new(term: ParseTerminal, from: Position, to: Position) -> ParseToken {\n\n ParseToken {\n\n term,\n\n from,\n\n to,\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "kg-syntax/src/grammar/parse.rs", "rank": 29, "score": 52953.047903288665 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]\n\nstruct Item {\n\n rule: usize,\n\n position: usize,\n\n lookaheads: OrdSet<usize>,\n\n}\n\n\n\nimpl Item {\n\n fn new(rule: usize, position: usize, grammar: &GrammarExt) -> Item {\n\n let rule = grammar.grammar.rule(rule);\n\n if rule.symbols().len() == position {\n\n Item {\n\n rule: rule.index(),\n\n position,\n\n lookaheads: grammar.production(rule.production()).follow.borrow().clone(),\n\n }\n\n } else {\n\n Item {\n\n rule: rule.index(),\n\n position,\n\n lookaheads: OrdSet::new(),\n\n }\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "kg-syntax/src/parser/clr/build.rs", "rank": 30, "score": 52952.645124530216 }, { "content": "#[derive(Debug)]\n\nstruct LexerRegex {\n\n token_from: usize,\n\n token_to: usize,\n\n regex: Option<Regex>,\n\n mark: Cell<u32>,\n\n}\n\n\n\nimpl LexerRegex {\n\n fn new() -> LexerRegex {\n\n LexerRegex {\n\n token_from: 0,\n\n token_to: 0,\n\n regex: None,\n\n mark: Cell::new(0),\n\n }\n\n }\n\n\n\n fn is_resolved(&self) -> bool {\n\n self.regex.is_some()\n\n }\n\n}\n\n\n\n\n", "file_path": "kg-syntax/src/grammar/parse.rs", "rank": 31, "score": 52949.11114003962 }, { "content": "#[derive(Debug)]\n\nstruct ParseGrammar {\n\n modes: Vec<ParseMode>,\n\n channels: Vec<ParseChannel>,\n\n options: Options,\n\n globals: Option<Code>,\n\n}\n\n\n\nimpl ParseGrammar {\n\n fn new() -> ParseGrammar {\n\n ParseGrammar {\n\n modes: Vec::with_capacity(32),\n\n channels: Vec::with_capacity(32),\n\n options: Options::default(),\n\n globals: None,\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "kg-syntax/src/grammar/parse.rs", "rank": 32, "score": 52949.11114003962 }, { "content": "#[derive(Debug)]\n\nstruct ParseMode {\n\n name: String,\n\n parents: Vec<String>,\n\n options: Options,\n\n rules: Vec<LexerRule>,\n\n mode: Cell<usize>,\n\n}\n\n\n\nimpl ParseMode {\n\n fn new(name: &str) -> ParseMode {\n\n ParseMode {\n\n name: name.to_string(),\n\n parents: Vec::new(),\n\n options: Options::new(),\n\n rules: Vec::new(),\n\n mode: Cell::new(::std::usize::MAX),\n\n }\n\n }\n\n\n\n fn is_empty(&self) -> bool {\n\n self.rules.is_empty()\n\n }\n\n\n\n fn is_active(&self) -> bool {\n\n self.mode.get() != ::std::usize::MAX\n\n }\n\n}\n\n\n\n\n", "file_path": "kg-syntax/src/grammar/parse.rs", "rank": 33, "score": 52949.11114003962 }, { "content": "#[derive(Debug)]\n\nstruct ParserRule {\n\n name: String,\n\n fragments: Vec<Fragment>,\n\n var: bool,\n\n production: Cell<usize>,\n\n}\n\n\n\nimpl ParserRule {\n\n fn new() -> ParserRule {\n\n ParserRule {\n\n name: String::new(),\n\n fragments: Vec::new(),\n\n var: false,\n\n production: Cell::new(0),\n\n }\n\n }\n\n\n\n fn is_active(&self) -> bool {\n\n self.production.get() > 0\n\n }\n\n}\n\n\n\n\n", "file_path": "kg-syntax/src/grammar/parse.rs", "rank": 34, "score": 52949.11114003962 }, { "content": "#[derive(Debug)]\n\nstruct ParseChannel {\n\n name: String,\n\n parents: Vec<String>,\n\n options: Options,\n\n rules: Vec<ParserRule>,\n\n channel: Cell<usize>,\n\n}\n\n\n\nimpl ParseChannel {\n\n fn new(name: &str) -> ParseChannel {\n\n ParseChannel {\n\n name: name.to_string(),\n\n parents: Vec::new(),\n\n options: Options::new(),\n\n rules: Vec::new(),\n\n channel: Cell::new(::std::usize::MAX),\n\n }\n\n }\n\n\n\n fn is_empty(&self) -> bool {\n\n self.rules.is_empty()\n\n }\n\n\n\n fn is_active(&self) -> bool {\n\n self.channel.get() != ::std::usize::MAX\n\n }\n\n}\n\n\n\n\n", "file_path": "kg-syntax/src/grammar/parse.rs", "rank": 35, "score": 52949.11114003962 }, { "content": "#[derive(Debug)]\n\nstruct LexerRule {\n\n name: String,\n\n expr: LexerRegex,\n\n commands: Vec<LexerCommand>,\n\n var: bool,\n\n token_from: usize,\n\n token_to: usize,\n\n terminal: Cell<usize>,\n\n}\n\n\n\nimpl LexerRule {\n\n fn new() -> LexerRule {\n\n LexerRule {\n\n name: String::new(),\n\n expr: LexerRegex::new(),\n\n commands: Vec::new(),\n\n var: false,\n\n token_from: 0,\n\n token_to: 0,\n\n terminal: Cell::new(0),\n\n }\n\n }\n\n\n\n fn is_active(&self) -> bool {\n\n self.terminal.get() > 0\n\n }\n\n}\n\n\n\n\n", "file_path": "kg-syntax/src/grammar/parse.rs", "rank": 36, "score": 52949.11114003962 }, { "content": "#[test]\n\nfn literal_display_quoted() {\n\n let r = Regex::Literal {\n\n chars: vec!['\\\\', '.', '+', '*', '?', '|', '[', ']', '(', ')', '{', '}', '-', '^'],\n\n icase: false,\n\n };\n\n assert_eq!(format!(\"{}\", r), r\"\\\\\\.\\+\\*\\?\\|\\[\\]\\(\\)\\{\\}-^\");\n\n}\n\n\n", "file_path": "kg-syntax/src/regex/tests.rs", "rank": 37, "score": 52422.613459396285 }, { "content": "#[test]\n\nfn parse_excluded_set() {\n\n let r = r\"[^\\n\\r]\";\n\n assert_eq!(format!(\"{}\", Regex::parse(r).unwrap()), r);\n\n}\n\n\n", "file_path": "kg-syntax/src/regex/tests.rs", "rank": 38, "score": 52422.613459396285 }, { "content": "#[test]\n\nfn char_set_should_contain() {\n\n let mut c = CharSet::new();\n\n c.add_range('0', '5');\n\n c.add_range('a', 'f');\n\n c.add_range('B', 'F');\n\n\n\n assert!(c.contains_range('1', '4'));\n\n assert!(c.contains_char('b'));\n\n assert!(!c.contains_range('A', 'F'));\n\n}\n\n\n", "file_path": "kg-syntax/src/regex/tests.rs", "rank": 39, "score": 52422.613459396285 }, { "content": "#[test]\n\nfn char_set_should_overlap() {\n\n let mut c = CharSet::new();\n\n c.add_range('0', '5');\n\n c.add_range('a', 'f');\n\n c.add_range('B', 'F');\n\n\n\n assert!(c.overlaps_range('1', '8'));\n\n assert!(c.overlaps_range('A', 'a'));\n\n assert!(c.overlaps_range('E', 'Z'));\n\n}\n\n\n", "file_path": "kg-syntax/src/regex/tests.rs", "rank": 40, "score": 52422.613459396285 }, { "content": "#[test]\n\nfn char_set_remove() {\n\n let mut c = CharSet::new();\n\n c.add_range('1', '5');\n\n c.add_range('A', 'P');\n\n assert_eq!(format!(\"{}\", c), \"[1-5A-P]\");\n\n c.remove_range('F', 'Z');\n\n assert_eq!(format!(\"{}\", c), \"[1-5A-E]\");\n\n}\n\n\n", "file_path": "kg-syntax/src/regex/tests.rs", "rank": 41, "score": 52422.613459396285 }, { "content": "#[test]\n\nfn char_set_exclude() {\n\n let mut c = CharSet::new();\n\n c.remove_range('1', '5');\n\n assert_eq!(format!(\"{}\", c), \"[^1-5]\");\n\n assert_eq!(c.contains_char('0'), true);\n\n assert_eq!(c.contains_char('1'), false);\n\n assert_eq!(c.contains_char('2'), false);\n\n assert_eq!(c.contains_char('3'), false);\n\n assert_eq!(c.contains_char('4'), false);\n\n assert_eq!(c.contains_char('5'), false);\n\n assert_eq!(c.contains_char('6'), true);\n\n assert_eq!(c.contains_char('!'), true);\n\n}\n\n\n", "file_path": "kg-syntax/src/regex/tests.rs", "rank": 42, "score": 52422.613459396285 }, { "content": "pub trait Parser {\n\n fn reset(&mut self);\n\n\n\n fn parse(&mut self, token: &Token) -> Result<Step, ParserError>;\n\n\n\n fn channel(&self) -> usize;\n\n}\n", "file_path": "kg-syntax/src/parser/mod.rs", "rank": 43, "score": 52029.363641469114 }, { "content": "pub trait Lexer {\n\n fn reset(&mut self);\n\n\n\n fn lex(&mut self, reader: &mut dyn ByteReader) -> Result<Token, LexerError>;\n\n\n\n fn unmatched(&self) -> usize;\n\n\n\n fn mode(&self) -> usize;\n\n}\n", "file_path": "kg-syntax/src/lexer/mod.rs", "rank": 44, "score": 52029.363641469114 }, { "content": "pub trait Runtime {\n\n fn process(&mut self, reader: &mut dyn ByteReader) -> Result<NodeRef, Error>;\n\n\n\n fn reset(&mut self);\n\n}\n\n\n\n\n", "file_path": "kg-syntax/src/runtime/mod.rs", "rank": 45, "score": 52029.363641469114 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq)]\n\nstruct ItemSet {\n\n symbol: Symbol,\n\n kernel: OrdSet<Item>,\n\n closure: OrdSet<Item>,\n\n}\n\n\n\nimpl ItemSet {\n\n fn new(symbol: Symbol) -> ItemSet {\n\n ItemSet {\n\n symbol,\n\n kernel: OrdSet::with_capacity(128),\n\n closure: OrdSet::with_capacity(128),\n\n }\n\n }\n\n\n\n fn first(production: &ProductionExt, grammar: &GrammarExt) -> ItemSet {\n\n let mut is = ItemSet::new(Symbol::Production(production.index()));\n\n for r in production.rules().iter() {\n\n is.add_item(Item::new(*r, 0, grammar), true);\n\n }\n", "file_path": "kg-syntax/src/parser/clr/build.rs", "rank": 46, "score": 51930.232869883774 }, { "content": "#[derive(Debug)]\n\nstruct JsRuntimeContext {\n\n token_input_queue: VecDeque<Token>,\n\n token_merge_queue: VecDeque<Token>,\n\n mode: usize,\n\n mode_stack: Vec<usize>,\n\n channel: usize,\n\n channel_stack: Vec<usize>,\n\n}\n\n\n\nimpl JsRuntimeContext {\n\n fn new() -> JsRuntimeContext {\n\n JsRuntimeContext {\n\n token_input_queue: VecDeque::new(),\n\n token_merge_queue: VecDeque::new(),\n\n mode: 0,\n\n mode_stack: Vec::new(),\n\n channel: 0,\n\n channel_stack: Vec::new(),\n\n }\n\n }\n", "file_path": "kg-syntax/src/runtime/mod.rs", "rank": 47, "score": 51926.46775517824 }, { "content": "#[test]\n\nfn char_set_display_quoted() {\n\n let mut c = CharSet::new();\n\n c.add_char('-');\n\n c.add_char('\\\\');\n\n assert_eq!(format!(\"{}\", c), r\"[\\-\\\\]\");\n\n c.add_char('[');\n\n c.add_char(']');\n\n assert_eq!(format!(\"{}\", c), r\"[\\-\\[-\\]]\");\n\n c.add_char('^');\n\n assert_eq!(format!(\"{}\", c), r\"[\\-\\[-\\^]\");\n\n}\n\n\n", "file_path": "kg-syntax/src/regex/tests.rs", "rank": 48, "score": 51462.78812123531 }, { "content": "#[derive(Debug)]\n\nstruct ProductionExt<'a> {\n\n production: &'a Production,\n\n first: RefCell<OrdSet<usize>>,\n\n follow: RefCell<OrdSet<usize>>,\n\n nullable: Cell<bool>,\n\n}\n\n\n\nimpl<'a> ProductionExt<'a> {\n\n fn new(p: &'a Production) -> ProductionExt<'a> {\n\n ProductionExt {\n\n production: p,\n\n first: RefCell::new(OrdSet::new()),\n\n follow: RefCell::new(OrdSet::new()),\n\n nullable: Cell::new(false),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> PartialEq for ProductionExt<'a> {\n\n fn eq(&self, other: &ProductionExt<'a>) -> bool {\n", "file_path": "kg-syntax/src/parser/clr/build.rs", "rank": 49, "score": 49292.96136800593 }, { "content": "struct ItemDisp<'a> {\n\n item: &'a Item,\n\n grammar: &'a Grammar,\n\n kernel: bool,\n\n}\n\n\n\n\n\nimpl<'a> ItemDisp<'a> {\n\n fn new(item: &'a Item, grammar: &'a Grammar, kernel: bool) -> ItemDisp<'a> {\n\n ItemDisp {\n\n item,\n\n grammar,\n\n kernel,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> fmt::Display for ItemDisp<'a> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let rule = self.grammar.rule(self.item.rule);\n", "file_path": "kg-syntax/src/parser/clr/build.rs", "rank": 50, "score": 49292.96136800593 }, { "content": "#[derive(Debug)]\n\nstruct GrammarExt<'a> {\n\n grammar: &'a Grammar,\n\n productions: Vec<ProductionExt<'a>>,\n\n}\n\n\n\nimpl<'a> GrammarExt<'a> {\n\n fn new(g: &'a Grammar) -> GrammarExt<'a> {\n\n let mut g = GrammarExt {\n\n grammar: g,\n\n productions: g.productions().iter().map(|p| ProductionExt::new(p)).collect(),\n\n };\n\n g.build_first_follow_nullable();\n\n g\n\n }\n\n\n\n fn production(&'a self, production: usize) -> &'a ProductionExt<'a> {\n\n &self.productions[production]\n\n }\n\n\n\n fn build_first_follow_nullable(&mut self) {\n", "file_path": "kg-syntax/src/parser/clr/build.rs", "rank": 51, "score": 49292.96136800593 }, { "content": "struct ItemSetDisp<'a> {\n\n items: Vec<ItemDisp<'a>>,\n\n}\n\n\n\nimpl<'a> ItemSetDisp<'a> {\n\n fn new(itemset: &'a ItemSet, grammar: &'a Grammar) -> ItemSetDisp<'a> {\n\n let mut items: Vec<ItemDisp<'a>> = Vec::with_capacity(itemset.len());\n\n items.extend(itemset.kernel.iter().map(|i| ItemDisp::new(i, grammar, true)));\n\n items.extend(itemset.closure.iter().map(|i| ItemDisp::new(i, grammar, false)));\n\n items.sort_by(|a, b| a.item.cmp(&b.item));\n\n\n\n ItemSetDisp { items }\n\n }\n\n}\n\n\n\nimpl<'a> fmt::Display for ItemSetDisp<'a> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n for item in self.items.iter() {\n\n write!(f, \"{}\\n\", item)?;\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "kg-syntax/src/parser/clr/build.rs", "rank": 52, "score": 48335.02072054941 }, { "content": "use super::*;\n\n\n\nuse std::collections::VecDeque;\n\n\n\n/// Regex engine implementation in form of virtual machine. This is essentially equivalent to\n\n/// epsilon-NFA (NFA with null moves), but it is convenient to implement as executable machine\n\n/// with small memory footprint\n\n#[derive(Debug, Clone)]\n\npub struct Program {\n\n code: Vec<Opcode>,\n\n masks: Vec<ByteMask>,\n\n}\n\n\n\nimpl Program {\n\n pub fn from_regex(regex: &Regex, m: usize) -> Program {\n\n Compiler::new(m).compile(regex)\n\n }\n\n\n\n pub fn merge<'a, I, E>(progs: I) -> Program\n\n where I: IntoIterator<IntoIter = E>, E: ExactSizeIterator<Item = &'a Program>\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 63, "score": 33411.03864756893 }, { "content": " *g1 = new_goto;\n\n } else {\n\n *g2 = new_goto;\n\n }\n\n },\n\n }\n\n }\n\n Proc(s, n.1)\n\n }\n\n Regex::Any => {\n\n //FIXME (jc) handle Unicode UTF8\n\n self.range(b'\\0', b'\\x7F')\n\n }\n\n Regex::Empty => {\n\n unreachable!();\n\n }\n\n }\n\n }\n\n\n\n pub fn compile(mut self, regex: &Regex) -> Program {\n\n self.regex(regex);\n\n self.prog.add_opcode(Opcode::Match(self.matching as u32));\n\n self.prog\n\n }\n\n}\n\n\n\n\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 64, "score": 33405.95104504632 }, { "content": "\n\n pub fn opcode(&self, pc: u32) -> &Opcode {\n\n &self.code[pc as usize]\n\n }\n\n\n\n fn opcode_mut(&mut self, pc: u32) -> &mut Opcode {\n\n &mut self.code[pc as usize]\n\n }\n\n\n\n #[inline]\n\n fn add_opcode(&mut self, op: Opcode) -> u32 {\n\n let n = self.code.len() as u32;\n\n self.code.push(op);\n\n n\n\n }\n\n\n\n #[inline]\n\n pub fn masks(&self) -> &[ByteMask] {\n\n &self.masks\n\n }\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 65, "score": 33404.11142296332 }, { "content": "\n\n prog\n\n }\n\n\n\n fn new() -> Program {\n\n Program {\n\n code: Vec::new(),\n\n masks: Vec::new(),\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn code(&self) -> &[Opcode] {\n\n &self.code\n\n }\n\n\n\n #[inline]\n\n fn opcode_count(&self) -> u32 {\n\n self.code.len() as u32\n\n }\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 66, "score": 33402.85704683722 }, { "content": " {\n\n let mut progs_iter = progs.into_iter();\n\n let progs_count = progs_iter.len();\n\n\n\n if progs_count == 1 {\n\n return progs_iter.next().unwrap().clone();\n\n }\n\n\n\n let mut prog = Program::new();\n\n\n\n for _ in 0..progs_count as u32 {\n\n prog.code.push(Opcode::Split(0, 0));\n\n }\n\n\n\n let mut op_offset = prog.code.len();\n\n let mut mask_offset = 0;\n\n let mut mask_map = HashMap::new();\n\n\n\n for (i, p) in progs_iter.enumerate() {\n\n for (i, m) in p.masks.iter().enumerate() {\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 67, "score": 33402.82756986961 }, { "content": " stack2: Vec<usize>,\n\n}\n\n\n\nimpl ProgMatcher {\n\n pub fn from_regexes<'a, I: Iterator<Item = &'a Regex>>(regexes: I) -> ProgMatcher {\n\n let mut machines = Vec::with_capacity(regexes.size_hint().0);\n\n\n\n for (i, r) in regexes.enumerate() {\n\n let p = Program::from_regex(r, i + 1);\n\n machines.push(Machine::new(p));\n\n }\n\n\n\n let len = machines.len();\n\n ProgMatcher {\n\n machines,\n\n stack1: Vec::with_capacity(len),\n\n stack2: Vec::with_capacity(len),\n\n }\n\n }\n\n\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 68, "score": 33401.50466775333 }, { "content": "#[derive(Debug)]\n\npub struct ProgLexer {\n\n matcher: ProgMatcher,\n\n grammar: GrammarRef,\n\n unmatched: usize,\n\n mode: usize,\n\n}\n\n\n\nimpl ProgLexer {\n\n pub fn new(grammar: &GrammarRef, unmatched: usize, mode: usize) -> ProgLexer {\n\n let g = grammar.borrow();\n\n\n\n let mut lexemes: Vec<&Lexeme> = g.terminals().iter()\n\n .filter(|lexeme| lexeme.has_mode(mode) && match *lexeme.regex() {\n\n Regex::Empty | Regex::Any => false,\n\n _ => true,\n\n })\n\n .collect();\n\n\n\n lexemes.sort_by(|a, b| {\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 69, "score": 33401.20724414996 }, { "content": "\n\n #[inline]\n\n pub fn mask(&self, mask: u32) -> &ByteMask {\n\n &self.masks[mask as usize]\n\n }\n\n\n\n fn add_mask(&mut self, mask: ByteMask) -> u32 {\n\n for (i, m) in self.masks.iter().enumerate() {\n\n if *m == mask {\n\n return i as u32;\n\n }\n\n }\n\n let idx = self.masks.len() as u32;\n\n self.masks.push(mask);\n\n idx\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for Program {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 70, "score": 33400.67888627302 }, { "content": " let mi = prog.add_mask(m.clone());\n\n mask_map.insert((mask_offset + i) as u32, mi);\n\n }\n\n for mut o in p.code.iter().cloned() {\n\n o.shift(op_offset as u32, mask_offset as u32);\n\n prog.code.push(o);\n\n }\n\n\n\n prog.code[i] = Opcode::Split(op_offset as u32, (i + 1) as u32);\n\n\n\n op_offset += p.code.len();\n\n mask_offset += p.masks.len();\n\n }\n\n\n\n // remapping masks, since we removed duplicated masks in merged program\n\n for p in prog.code.iter_mut() {\n\n if let &mut Opcode::Mask(_, ref mut m) = p {\n\n *m = *mask_map.get(m).unwrap();\n\n }\n\n }\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 71, "score": 33400.62481083004 }, { "content": " pub fn from_progs<I: Iterator<Item = Program>>(progs: I) -> ProgMatcher {\n\n let mut machines = Vec::with_capacity(progs.size_hint().0);\n\n\n\n for p in progs {\n\n machines.push(Machine::new(p));\n\n }\n\n\n\n let len = machines.len();\n\n ProgMatcher {\n\n machines,\n\n stack1: Vec::with_capacity(len),\n\n stack2: Vec::with_capacity(len),\n\n }\n\n }\n\n\n\n fn swap(&mut self) {\n\n std::mem::swap(&mut self.stack1, &mut self.stack2);\n\n }\n\n\n\n fn reset(&mut self) {\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 72, "score": 33399.891246804546 }, { "content": " }\n\n // the last arm will have correct goto(s), so we remove it from `procs`\n\n procs.pop_back();\n\n // in other arms the goto(s) must be corrected to jump after the alternative\n\n for p in procs {\n\n let pc = p.1 - 1;\n\n let old_goto = pc + 1;\n\n let new_goto = n.1;\n\n debug_assert_ne!(old_goto, new_goto);\n\n match self.prog.opcode_mut(pc) {\n\n Opcode::Match(..) => unreachable!(),\n\n Opcode::Mask(ref mut g, ..) |\n\n Opcode::Byte(ref mut g, ..) |\n\n Opcode::Range(ref mut g, ..) => {\n\n debug_assert_eq!(*g, old_goto);\n\n *g = new_goto;\n\n },\n\n Opcode::Split(ref mut g1, ref mut g2) => {\n\n debug_assert!(*g1 == old_goto || *g2 == old_goto);\n\n if *g1 == old_goto {\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 73, "score": 33398.93529662432 }, { "content": " }\n\n }\n\n\n\n fn swap(&mut self) {\n\n std::mem::swap(&mut self.pc1, &mut self.pc2);\n\n }\n\n\n\n fn restart(&mut self) {\n\n self.pc2.clear();\n\n self.push_pc(0);\n\n self.swap();\n\n }\n\n\n\n fn push_pc(&mut self, pc: u32) {\n\n if !self.pc2.contains(&pc) {\n\n match *self.program.opcode(pc) {\n\n Opcode::Split(goto1, goto2) => {\n\n self.push_pc(goto1);\n\n self.push_pc(goto2);\n\n }\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 74, "score": 33398.80150128753 }, { "content": " Match(u32),\n\n Byte(u32, u8),\n\n Range(u32, u8, u8),\n\n Mask(u32, u32),\n\n Split(u32, u32),\n\n}\n\n\n\nimpl Opcode {\n\n fn shift(&mut self, goto_offset: u32, mask_offset: u32) {\n\n match *self {\n\n Opcode::Match(..) => { },\n\n Opcode::Byte(ref mut g, ..) => *g += goto_offset,\n\n Opcode::Range(ref mut g, ..) => *g += goto_offset,\n\n Opcode::Mask(ref mut g, ref mut m) => {\n\n *g += goto_offset;\n\n *m += mask_offset;\n\n }\n\n Opcode::Split(ref mut g1, ref mut g2) => {\n\n *g1 += goto_offset;\n\n *g2 += goto_offset;\n\n }\n\n }\n\n }\n\n}\n\n\n\n\n\n#[derive(Debug)]\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 75, "score": 33398.11649050684 }, { "content": " if reader.eof() {\n\n for i in self.stack1.iter().cloned() {\n\n if let Status::Matched(m) = self.machines[i].step(0xFFu8) {\n\n if matching < m {\n\n matching = m;\n\n }\n\n }\n\n }\n\n }\n\n\n\n if matching != EMPTY_MATCH {\n\n Ok(Some(Match::new(matching)))\n\n } else {\n\n Ok(None)\n\n }\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 76, "score": 33397.66446126172 }, { "content": "\n\nimpl Lexer for ProgLexer {\n\n fn reset(&mut self) {\n\n self.matcher.reset();\n\n }\n\n\n\n fn lex(&mut self, reader: &mut dyn ByteReader) -> Result<Token, LexerError> {\n\n let s = reader.position();\n\n\n\n if reader.eof() {\n\n Ok(Token::with_id(0, \"$\".into(), \"\".into(), s, s))\n\n } else {\n\n match self.matcher.exec(reader)? {\n\n Some(m) => {\n\n let g = self.grammar.borrow();\n\n let t = g.terminal(m.matching() as usize);\n\n let e = reader.position();\n\n let mut token = Token::with_id(t.index(), t.id().into(), reader.slice(s.offset, e.offset)?.into(), s, e);\n\n token.set_mode(self.mode);\n\n Ok(token)\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 77, "score": 33397.366231188375 }, { "content": " println!(\"{:?}\", re1);\n\n println!(\"{:?}\", re2);\n\n println!(\"{:?}\", re3);\n\n println!(\"{:?}\", re4);\n\n\n\n let p1 = Program::from_regex(&re1, 1);\n\n let p2 = Program::from_regex(&re2, 2);\n\n let p3 = Program::from_regex(&re3, 3);\n\n let p4 = Program::from_regex(&re4, 4);\n\n\n\n // let p = Program::merge(&[p1, p2, p3, p4]);\n\n //eprintln!(\"\\n{}\", p);\n\n\n\n let mut m = ProgMatcher::from_progs(vec![p1, p2, p3, p4].into_iter());\n\n\n\n let mut r = MemByteReader::new(b\"/* */123a/****/*/ class classa \");\n\n println!(\"{}\", r.input().unwrap());\n\n\n\n while !r.eof() {\n\n let p1 = r.position();\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 78, "score": 33396.994676053844 }, { "content": " n.merge(self.regex(e));\n\n }\n\n n\n\n }\n\n Regex::Alternate(ref es) => {\n\n debug_assert!(es.len() > 1);\n\n let s = self.prog.opcode_count();\n\n for _ in 0..es.len() - 1 {\n\n self.prog.add_opcode(Opcode::Split(0, 0));\n\n }\n\n let mut procs = VecDeque::with_capacity(es.len());\n\n for e in es {\n\n let p = self.regex(e);\n\n procs.push_back(p);\n\n }\n\n let mut n = Proc::default();\n\n for (i, p) in procs.iter().skip(1).cloned().enumerate() {\n\n let i = i as u32;\n\n *self.prog.opcode_mut(s + i) = Opcode::Split(s + i + 1, p.0);\n\n n.merge(p);\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 79, "score": 33396.34529309913 }, { "content": " for m in self.machines.iter_mut() {\n\n m.restart();\n\n }\n\n self.stack1.clear();\n\n self.stack1.extend(0..self.machines.len());\n\n }\n\n\n\n pub fn exec(&mut self, reader: &mut dyn ByteReader) -> IoResult<Option<Match>> {\n\n if reader.eof() {\n\n Ok(None)\n\n } else {\n\n self.reset();\n\n\n\n let mut matching = EMPTY_MATCH;\n\n while let Some(c) = reader.peek_byte(0)? {\n\n self.stack2.clear();\n\n\n\n matching = EMPTY_MATCH;\n\n for i in self.stack1.iter().cloned() {\n\n match self.machines[i].step(c) {\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 80, "score": 33395.188188091604 }, { "content": " use self::Opcode::*;\n\n\n\n for (i, op) in self.code.iter().enumerate() {\n\n let i = i as u32;\n\n write!(f, \"({:4}): \", i)?;\n\n match *op {\n\n Byte(goto, b) => write!(f, \"byte ({:4}) {:?}\", goto, b as char)?,\n\n Range(goto, b1, b2) => write!(f, \"range ({:4}) {:?}-{:?}\", goto, b1 as char, b2 as char)?,\n\n Mask(goto, m) => write!(f, \"mask ({:4}) {}:{}\", goto, m, self.mask(m))?,\n\n Split(goto1, goto2) => write!(f, \"split ({:4}, {:4})\", goto1, goto2)?,\n\n Match(m) => write!(f, \"match [{}]\", m)?,\n\n }\n\n write!(f, \"\\n\")?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub enum Opcode {\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 81, "score": 33395.0573384206 }, { "content": " _ => {\n\n self.pc2.insert(pc);\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn step(&mut self, input: u8) -> Status {\n\n let mut matched = 0;\n\n\n\n self.pc2.clear();\n\n\n\n // must be iterated by indexing, because `self` must be mutably borrowed within loop\n\n // and iterator over `self.pc1` would already borrow `self` immutably\n\n for i in 0..self.pc1.len() {\n\n let pc = self.pc1[i];\n\n let opcode = *self.program.opcode(pc);\n\n match opcode {\n\n Opcode::Match(m) => {\n\n matched = m;\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 82, "score": 33394.59931660199 }, { "content": " let pa = a.precedence(mode);\n\n let pb = b.precedence(mode);\n\n match pa.cmp(&pb) {\n\n Ordering::Equal => a.index().cmp(&b.index()),\n\n o => o\n\n }\n\n });\n\n\n\n let matcher = ProgMatcher::from_progs(lexemes.iter().map(|lexeme| {\n\n Program::from_regex(lexeme.regex(), lexeme.index())\n\n }));\n\n\n\n ProgLexer {\n\n matcher,\n\n grammar: grammar.clone(),\n\n unmatched,\n\n mode,\n\n }\n\n }\n\n}\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 83, "score": 33393.93916967418 }, { "content": " *self.prog.opcode_mut(s) = Opcode::Split(p.1 + 1, p.0);\n\n *self.prog.opcode_mut(f) = Opcode::Split(f + 1, p.0);\n\n }\n\n Proc(s, f + 1)\n\n }\n\n\n\n fn plus(&mut self, e: &Regex, greedy: bool) -> Proc {\n\n let p = self.regex(e);\n\n let f = self.prog.add_opcode(Opcode::Split(0, 0));\n\n if greedy {\n\n *self.prog.opcode_mut(f) = Opcode::Split(p.0, f + 1);\n\n } else {\n\n *self.prog.opcode_mut(f) = Opcode::Split(f + 1, p.0);\n\n }\n\n Proc(p.0, f + 1)\n\n }\n\n\n\n fn option(&mut self, e: &Regex, greedy: bool) -> Proc {\n\n let s = self.prog.add_opcode(Opcode::Split(0, 0));\n\n let p = self.regex(e);\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 84, "score": 33393.9307923279 }, { "content": "\n\n self.swap();\n\n\n\n if self.pc1.is_empty() {\n\n if matched > 0 {\n\n Status::Matched(matched)\n\n } else {\n\n Status::Failed\n\n }\n\n } else {\n\n Status::Processing\n\n }\n\n }\n\n}\n\n\n\n\n\n#[derive(Debug)]\n\npub struct ProgMatcher {\n\n machines: Vec<Machine>,\n\n stack1: Vec<usize>,\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 85, "score": 33393.918154458974 }, { "content": " let n = self.prog.opcode_count();\n\n self.prog.add_opcode(Opcode::Range(n + 1, a, b));\n\n Proc(n, n + 1)\n\n }\n\n\n\n fn mask(&mut self, m: ByteMask) -> Proc {\n\n let m = self.prog.add_mask(m);\n\n let n = self.prog.opcode_count();\n\n self.prog.add_opcode(Opcode::Mask(n + 1, m));\n\n Proc(n, n + 1)\n\n }\n\n\n\n fn star(&mut self, e: &Regex, greedy: bool) -> Proc {\n\n let s = self.prog.add_opcode(Opcode::Split(0, 0));\n\n let p = self.regex(e);\n\n let f = self.prog.add_opcode(Opcode::Split(0, 0));\n\n if greedy {\n\n *self.prog.opcode_mut(s) = Opcode::Split(p.0, p.1 + 1);\n\n *self.prog.opcode_mut(f) = Opcode::Split(p.0, f + 1);\n\n } else {\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 86, "score": 33393.65595015434 }, { "content": " }\n\n\n\n fn mode(&self) -> usize {\n\n self.mode\n\n }\n\n}\n\n\n\n\n\n//FIXME (jc)\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test1() {\n\n let re1 = Regex::parse(\"/\\\\*.*?\\\\*/\").unwrap();\n\n let re2 = Regex::parse(\"class\").unwrap();\n\n let re3 = Regex::parse(\"[0-9a-f]+\").unwrap();\n\n let re4 = Regex::parse(\"[a-zA-Z_][0-9a-zA-Z_]*\").unwrap();\n\n\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 87, "score": 33392.966422199876 }, { "content": " if greedy {\n\n *self.prog.opcode_mut(s) = Opcode::Split(p.0, p.1);\n\n } else {\n\n *self.prog.opcode_mut(s) = Opcode::Split(p.1, p.0);\n\n }\n\n Proc(s, p.1)\n\n }\n\n\n\n fn regex(&mut self, r: &Regex) -> Proc {\n\n match *r {\n\n Regex::Literal { ref chars, icase } => {\n\n let mut p = Proc::default();\n\n if icase {\n\n let mut up = String::with_capacity(chars.len());\n\n let mut lo = String::with_capacity(chars.len());\n\n for c in chars {\n\n up.clear();\n\n lo.clear();\n\n for c in c.to_uppercase() {\n\n up.push(c);\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 88, "score": 33392.69899023574 }, { "content": " }\n\n None => {\n\n if self.unmatched > 0 {\n\n reader.next_byte()?;\n\n let g = self.grammar.borrow();\n\n let t = g.terminal(self.unmatched);\n\n let e = reader.position();\n\n let mut token = Token::with_id(t.index(), t.id().into(), reader.slice(s.offset, e.offset)?.into(), s, e);\n\n token.set_mode(self.mode);\n\n Ok(token)\n\n } else {\n\n Err(LexerError::UnexpectedInput(s))\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn unmatched(&self) -> usize {\n\n self.unmatched\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 89, "score": 33392.2627078632 }, { "content": " break;\n\n }\n\n Opcode::Byte(goto, b) => {\n\n if input == b {\n\n self.push_pc(goto);\n\n }\n\n }\n\n Opcode::Range(goto, b1, b2) => {\n\n if input >= b1 && input <= b2 {\n\n self.push_pc(goto);\n\n }\n\n }\n\n Opcode::Mask(goto, m) => {\n\n if self.program.mask(m).matches(input) {\n\n self.push_pc(goto);\n\n }\n\n }\n\n _ => unreachable!()\n\n }\n\n }\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 90, "score": 33392.102128047176 }, { "content": " if set.ranges() == 1 {\n\n let range = set.iter().next().unwrap();\n\n self.range(range.start() as u8, range.end() as u8)\n\n } else {\n\n let mut m = ByteMask::empty();\n\n for r in set.iter() {\n\n for c in r.chars() {\n\n m.include(c as u8);\n\n }\n\n }\n\n self.mask(m)\n\n }\n\n } else {\n\n //FIXME (jc) handle Unicode UTF8\n\n let mut m = ByteMask::empty();\n\n for r in set.iter() {\n\n for c in r.chars() {\n\n if c <= '\\x7F' {\n\n m.include(c as u8);\n\n }\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 91, "score": 33390.72181974917 }, { "content": " }\n\n for c in c.to_lowercase() {\n\n lo.push(c);\n\n }\n\n let mut up = up.bytes().fuse();\n\n let mut lo = lo.bytes().fuse();\n\n\n\n loop {\n\n match (up.next(), lo.next()) {\n\n (Some(b1), Some(b2)) if b1 == b2 => {\n\n p.merge(self.byte(b1));\n\n }\n\n (Some(b1), Some(b2)) => {\n\n let mut m = ByteMask::empty();\n\n m.include(b1);\n\n m.include(b2);\n\n p.merge(self.mask(m));\n\n }\n\n (Some(b1), None) => {\n\n p.merge(self.byte(b1));\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 92, "score": 33388.93154128958 }, { "content": " }\n\n (None, Some(b2)) => {\n\n p.merge(self.byte(b2));\n\n }\n\n (None, None) => break\n\n }\n\n }\n\n }\n\n } else {\n\n let mut buf = [0; 4];\n\n for c in chars {\n\n for b in c.encode_utf8(&mut buf).bytes() {\n\n p = p.merge(self.byte(b));\n\n }\n\n }\n\n }\n\n p\n\n }\n\n Regex::Set(ref set) => {\n\n if set.is_ascii_range() {\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 93, "score": 33388.882110407554 }, { "content": " }\n\n let p = self.star(e, greedy);\n\n Proc(s.0, p.1)\n\n }\n\n (lo, up) => { // eg. `a{2}`, `a{2,2}`, `a{2,4}`\n\n debug_assert!(lo <= up);\n\n let mut s = Proc::default();\n\n for _ in 0..lo {\n\n s.merge(self.regex(e));\n\n }\n\n for _ in 0..up - lo {\n\n s.merge(self.option(e, greedy));\n\n }\n\n s\n\n }\n\n }\n\n }\n\n Regex::Concat(ref es) => {\n\n let mut n = Proc::default();\n\n for e in es {\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 94, "score": 33388.84768688364 }, { "content": " match m.exec(&mut r).unwrap() {\n\n Some(m) => {\n\n let s = r.slice_pos(p1, r.position()).unwrap();\n\n eprintln!(\"match {}: {:?}\", m.matching(), s);\n\n },\n\n None => {\n\n r.seek(p1).unwrap();\n\n r.next_byte().unwrap();\n\n eprintln!(\"?\");\n\n }\n\n }\n\n }\n\n }\n\n\n\n}", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 95, "score": 33388.210450697276 }, { "content": " }\n\n }\n\n self.mask(m)\n\n }\n\n }\n\n Regex::Repeat { ref e, min, max, greedy } => {\n\n match (min, max) {\n\n (0, 0) => { // eg. `a*`\n\n self.star(e, greedy)\n\n }\n\n (1, 0) => { // eg. `a+`\n\n self.plus(e, greedy)\n\n }\n\n (0, 1) => { // eg. `a?`\n\n self.option(e, greedy)\n\n }\n\n (lo, 0) => { // eg. `a{2,}`\n\n let mut s = Proc::default();\n\n for _ in 0..lo {\n\n s.merge(self.regex(e));\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 96, "score": 33387.93288329144 }, { "content": " Status::Processing => {\n\n self.stack2.push(i)\n\n },\n\n Status::Matched(m) => {\n\n if matching < m {\n\n matching = m;\n\n }\n\n },\n\n Status::Failed => {},\n\n }\n\n }\n\n\n\n if self.stack2.is_empty() {\n\n break;\n\n }\n\n\n\n self.swap();\n\n reader.next_byte()?;\n\n }\n\n\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 97, "score": 33385.55888711825 }, { "content": "pub(crate) const NFA_LEXER_TPL: &'static str = r#\"\n\npub struct ${name}Lexer {\n\n\n\n}\n\n\"#;", "file_path": "kg-syntax-gen/src/tpl_nfa.rs", "rank": 98, "score": 32641.021631652013 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\nenum Status {\n\n Processing,\n\n Failed,\n\n Matched(u32),\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Machine {\n\n program: Program,\n\n pc1: SparseSet<u32>,\n\n pc2: SparseSet<u32>,\n\n}\n\n\n\nimpl Machine {\n\n pub fn new(program: Program) -> Machine {\n\n let len = program.code.len();\n\n Machine {\n\n program,\n\n pc1: SparseSet::with_capacity(len),\n\n pc2: SparseSet::with_capacity(len),\n", "file_path": "kg-syntax/src/lexer/prog.rs", "rank": 99, "score": 32222.345404359225 } ]
Rust
src/oneshot/timer_fd.rs
krircc/async-timer
c2cb75ab1d4624e8af280a1cdf8c3aa9f41f27bd
#[cfg(feature = "no_std")] core::compile_error!("no_std is not supported for timerfd implementation"); use crate::std::io; use core::future::Future; use core::pin::Pin; use core::{mem, ptr, task, time}; use libc::c_int; #[cfg(target_os = "android")] mod sys { #[repr(C)] pub struct itimerspec { pub it_interval: libc::timespec, pub it_value: libc::timespec, } extern "C" { pub fn timerfd_create(clockid: libc::clockid_t, flags: libc::c_int) -> libc::c_int; pub fn timerfd_settime(timerid: libc::c_int, flags: libc::c_int, new_value: *const itimerspec, old_value: *mut itimerspec) -> libc::c_int; } pub const TFD_NONBLOCK: libc::c_int = libc::O_NONBLOCK; } #[cfg(not(target_os = "android"))] use libc as sys; struct RawTimer(c_int); impl RawTimer { fn new() -> Self { let fd = unsafe { sys::timerfd_create(libc::CLOCK_MONOTONIC, sys::TFD_NONBLOCK) }; os_assert!(fd != -1); Self(fd) } fn set(&self, timer: sys::itimerspec) { let ret = unsafe { sys::timerfd_settime(self.0, 0, &timer, ptr::null_mut()) }; os_assert!(ret != -1); } fn read(&self) -> usize { let mut read_num = 0u64; match unsafe { libc::read(self.0, &mut read_num as *mut u64 as *mut _, 8) } { -1 => { let error = io::Error::last_os_error(); match error.kind() { io::ErrorKind::WouldBlock => 0, _ => panic!("Unexpected read error: {}", error), } } _ => read_num as usize, } } } impl mio::Evented for RawTimer { fn register(&self, poll: &mio::Poll, token: mio::Token, interest: mio::Ready, opts: mio::PollOpt) -> io::Result<()> { mio::unix::EventedFd(&self.0).register(poll, token, interest, opts) } fn reregister(&self, poll: &mio::Poll, token: mio::Token, interest: mio::Ready, opts: mio::PollOpt) -> io::Result<()> { mio::unix::EventedFd(&self.0).reregister(poll, token, interest, opts) } fn deregister(&self, poll: &mio::Poll) -> io::Result<()> { mio::unix::EventedFd(&self.0).deregister(poll) } } impl Drop for RawTimer { fn drop(&mut self) { unsafe { libc::close(self.0) }; } } enum State { Init(time::Duration), Running(bool), } fn set_timer_value(fd: &RawTimer, timeout: time::Duration) { #[cfg(not(target_pointer_width = "64"))] use core::convert::TryFrom; let it_value = libc::timespec { tv_sec: timeout.as_secs() as libc::time_t, #[cfg(target_pointer_width = "64")] tv_nsec: libc::suseconds_t::from(timeout.subsec_nanos()), #[cfg(not(target_pointer_width = "64"))] tv_nsec: libc::suseconds_t::try_from(timeout.subsec_nanos()).unwrap_or(libc::suseconds_t::max_value()), }; let new_value = sys::itimerspec { it_interval: unsafe { mem::zeroed() }, it_value, }; fd.set(new_value); } pub struct TimerFd { fd: tokio::io::PollEvented<RawTimer>, state: State, } impl super::Oneshot for TimerFd { fn new(timeout: time::Duration) -> Self { debug_assert!(!(timeout.as_secs() == 0 && timeout.subsec_nanos() == 0), "Zero timeout makes no sense"); Self { fd: tokio::io::PollEvented::new(RawTimer::new()).expect("To create PollEvented"), state: State::Init(timeout), } } fn is_ticking(&self) -> bool { match &self.state { State::Init(_) => false, State::Running(is_finished) => !*is_finished, } } fn is_expired(&self) -> bool { match &self.state { State::Init(_) => false, State::Running(is_finished) => *is_finished, } } fn cancel(&mut self) { self.fd.get_mut().set(unsafe { mem::zeroed() }); } fn restart(&mut self, new_value: time::Duration, _: &task::Waker) { debug_assert!(!(new_value.as_secs() == 0 && new_value.subsec_nanos() == 0), "Zero timeout makes no sense"); match &mut self.state { State::Init(ref mut timeout) => { *timeout = new_value; } State::Running(ref mut is_finished) => { *is_finished = false; set_timer_value(&self.fd.get_ref(), new_value); } } } } impl Future for TimerFd { type Output = (); fn poll(mut self: Pin<&mut Self>, ctx: &mut task::Context) -> task::Poll<Self::Output> { loop { self.state = match &self.state { State::Init(ref timeout) => { set_timer_value(self.fd.get_ref(), *timeout); State::Running(false) } State::Running(false) => { match Pin::new(&mut self.fd).poll_read_ready(ctx, mio::Ready::readable()) { task::Poll::Pending => return task::Poll::Pending, task::Poll::Ready(ready) => match ready.map(|ready| ready.is_readable()).expect("timerfd cannot be ready") { true => { let _ = Pin::new(&mut self.fd).clear_read_ready(ctx, mio::Ready::readable()); match self.fd.get_mut().read() { 0 => return task::Poll::Pending, _ => return task::Poll::Ready(()), } } false => return task::Poll::Pending, }, } } State::Running(true) => return task::Poll::Ready(()), } } } }
#[cfg(feature = "no_std")] core::compile_error!("no_std is not supported for timerfd implementation"); use crate::std::io; use core::future::Future; use core::pin::Pin; use core::{mem, ptr, task, time}; use libc::c_int; #[cfg(target_os = "android")] mod sys { #[repr(C)] pub struct itimerspec { pub it_interval: libc::timespec, pub it_value: libc::timespec, } extern "C" { pub fn timerfd_create(clockid: libc::clockid_t, flags: libc::c_int) -> libc::c_int; pub fn timerfd_settime(timerid: libc::c_int, flags: libc::c_int, new_value: *const itimerspec, old_value: *mut itimerspec) -> libc::c_int; } pub const TFD_NONBLOCK: libc::c_int = libc::O_NONBLOCK; } #[cfg(not(target_os = "android"))] use libc as sys; struct RawTimer(c_int); impl RawTimer { fn new() -> Self { let fd = unsafe { sys::timerfd_create(libc::CLOCK_MONOTONIC, sys::TFD_NONBLOCK) }; os_assert!(fd != -1); Self(fd) } fn set(&self, timer: sys::itimerspec) { let ret = unsafe { sys::timerfd_settime(self.0, 0, &timer, ptr::null_mut()) }; os_assert!(ret != -1); } fn read(&self) -> usize { let mut read_num = 0u64; match unsafe { libc::read(self.0, &mut read_num as *mut u64 as *mut _, 8) } { -1 => { let error = io::Error::last_os_error(); match error.kind() { io::ErrorKind::WouldBlock => 0, _ => panic!("Unexpected read error: {}", error), } } _ => read_num as usize, } } } impl mio::Evented for RawTimer { fn register(&self, poll: &mio::Poll, token: mio::Token, interest: mio::Ready, opts: mio::PollOpt) -> io::Result<()> { mio::unix::EventedFd(&self.0).register(poll, token, interest, opts) } fn reregister(&self, poll: &mio::Poll, token: mio::Token, interest: mio::Ready, opts: mio::PollOpt) -> io::Result<()> { mio::unix::EventedFd(&self.0).reregister(poll, token, interest, opts) } fn deregister(&self, poll: &mio::Poll) -> io::Result<()> { mio::unix::EventedFd(&self.0).deregister(poll) } } impl Drop for RawTimer { fn drop(&mut self) { unsafe { libc::close(self.0) }; } } enum State { Init(time::Duration), Running(bool), } fn set_timer_value(fd: &RawTimer, timeout: time::Duration) { #[cfg(not(target_pointer_width = "64"))] use core::convert::TryFrom; let it_value = libc::timespec { tv_sec: timeout.as_secs() as libc::time_t, #[cfg(target_pointer_width = "64")] tv_nsec: libc::suseconds_t::from(timeout.subsec_nanos()), #[cfg(not(target_pointer_width = "64"))] tv_nsec: libc::suseconds_t::try_from(timeout.subsec_nanos()).unwrap_or(libc::suseconds_t::max_value()), }; let new_value = sys::itimerspec { it_interval: unsafe { mem::zeroed() }, it_value, }; fd.set(new_value); } pub struct TimerFd { fd: tokio::io::PollEvented<RawTimer>, state: State, } impl super::Oneshot for TimerFd { fn new(timeout: time::Duration) -> Self { debug_assert!(!(timeout.as_secs() == 0 && timeout.subsec_nanos() == 0), "Zero timeout makes no sense"); Self { fd: tokio::io::PollEvented::new(RawTimer::new()).expect("To create PollEvented"), state: State::Init(timeout), } } fn is_ticking(&self) -> bool {
fn is_expired(&self) -> bool { match &self.state { State::Init(_) => false, State::Running(is_finished) => *is_finished, } } fn cancel(&mut self) { self.fd.get_mut().set(unsafe { mem::zeroed() }); } fn restart(&mut self, new_value: time::Duration, _: &task::Waker) { debug_assert!(!(new_value.as_secs() == 0 && new_value.subsec_nanos() == 0), "Zero timeout makes no sense"); match &mut self.state { State::Init(ref mut timeout) => { *timeout = new_value; } State::Running(ref mut is_finished) => { *is_finished = false; set_timer_value(&self.fd.get_ref(), new_value); } } } } impl Future for TimerFd { type Output = (); fn poll(mut self: Pin<&mut Self>, ctx: &mut task::Context) -> task::Poll<Self::Output> { loop { self.state = match &self.state { State::Init(ref timeout) => { set_timer_value(self.fd.get_ref(), *timeout); State::Running(false) } State::Running(false) => { match Pin::new(&mut self.fd).poll_read_ready(ctx, mio::Ready::readable()) { task::Poll::Pending => return task::Poll::Pending, task::Poll::Ready(ready) => match ready.map(|ready| ready.is_readable()).expect("timerfd cannot be ready") { true => { let _ = Pin::new(&mut self.fd).clear_read_ready(ctx, mio::Ready::readable()); match self.fd.get_mut().read() { 0 => return task::Poll::Pending, _ => return task::Poll::Ready(()), } } false => return task::Poll::Pending, }, } } State::Running(true) => return task::Poll::Ready(()), } } } }
match &self.state { State::Init(_) => false, State::Running(is_finished) => !*is_finished, } }
function_block-function_prefix_line
[ { "content": "fn time_create(timeout: time::Duration, state: *const TimerState) -> TimerHandle {\n\n let timeout = timeout.as_millis() as u32;\n\n\n\n let cb = wasm_bindgen::closure::Closure::once(move || unsafe {\n\n (*state).wake();\n\n });\n\n let handle = setTimeout(&cb, timeout);\n\n\n\n TimerHandle(handle)\n\n}\n\n\n", "file_path": "src/oneshot/web.rs", "rank": 0, "score": 196959.35454567993 }, { "content": "fn time_create(state: *mut TimerState) -> ffi::timer_t {\n\n let mut event: libc::sigevent = unsafe { mem::zeroed() };\n\n\n\n event.sigev_value = libc::sigval {\n\n sival_ptr: state as *mut _,\n\n };\n\n event.sigev_signo = TIMER_SIG;\n\n //NOTE: Timer handler is invoked by signal handler\n\n // Therefore all limitations are applied to your waker.\n\n // To be safe we could use thread, but in this case\n\n // we cannot really hope for it to be optimal...\n\n event.sigev_notify = libc::SIGEV_SIGNAL;\n\n\n\n let mut res = mem::MaybeUninit::<ffi::timer_t>::uninit();\n\n\n\n unsafe {\n\n os_assert!(ffi::timer_create(libc::CLOCK_MONOTONIC, &mut event, res.as_mut_ptr()) == 0);\n\n res.assume_init()\n\n }\n\n}\n\n\n", "file_path": "src/oneshot/posix.rs", "rank": 1, "score": 164378.498161408 }, { "content": "fn time_create(state: *mut TimerState) -> ffi::PTP_TIMER {\n\n let timer = unsafe {\n\n ffi::CreateThreadpoolTimer(Some(timer_callback), state as *mut ffi::c_void, ptr::null_mut())\n\n };\n\n os_assert!(!timer.is_null());\n\n\n\n timer\n\n}\n\n\n", "file_path": "src/oneshot/win.rs", "rank": 2, "score": 160519.19861965952 }, { "content": "fn set_timer_value(fd: ffi::timer_t, timeout: time::Duration) {\n\n let it_value = libc::timespec {\n\n tv_sec: timeout.as_secs() as libc::time_t,\n\n #[cfg(not(any(target_os = \"openbsd\", target_os = \"netbsd\")))]\n\n tv_nsec: timeout.subsec_nanos() as libc::suseconds_t,\n\n #[cfg(any(target_os = \"openbsd\", target_os = \"netbsd\"))]\n\n tv_nsec: timeout.subsec_nanos() as libc::c_long,\n\n };\n\n\n\n let new_value = ffi::itimerspec {\n\n it_interval: unsafe { mem::zeroed() },\n\n it_value,\n\n };\n\n\n\n unsafe {\n\n os_assert!(ffi::timer_settime(fd, 0, &new_value, ptr::null_mut()) == 0);\n\n }\n\n}\n\n\n", "file_path": "src/oneshot/posix.rs", "rank": 4, "score": 144948.56629001067 }, { "content": "fn set_timer_value(fd: ffi::PTP_TIMER, timeout: time::Duration) {\n\n let mut ticks = i64::from(timeout.subsec_nanos() / 100);\n\n ticks += (timeout.as_secs() * 10_000_000) as i64;\n\n let ticks = -ticks;\n\n\n\n unsafe {\n\n let mut time: ffi::FILETIME = mem::transmute(ticks);\n\n ffi::SetThreadpoolTimerEx(fd, &mut time, 0, 0);\n\n }\n\n}\n\n\n", "file_path": "src/oneshot/win.rs", "rank": 5, "score": 141944.53222242597 }, { "content": "///Run future in timed fashion using default Platform timer.\n\npub fn timed<F: future::Future>(job: F, timeout: time::Duration) -> impl future::Future<Output=Result<F::Output, Expired<F, oneshot::Timer>>> {\n\n unsafe {\n\n Timed::platform_new_unchecked(job, timeout)\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 7, "score": 126681.42963724723 }, { "content": "///Creates interval with default Platform timer.\n\npub fn interval(interval: time::Duration) -> Interval<oneshot::Timer> {\n\n Interval::platform_new(interval)\n\n}\n", "file_path": "src/lib.rs", "rank": 8, "score": 122732.76088525809 }, { "content": "enum State {\n\n Init(time::Duration),\n\n Running(TimerHandle, *const TimerState),\n\n}\n\n\n\n///Web timer wrapper\n\npub struct WebTimer {\n\n state: State,\n\n}\n\n\n\nimpl super::Oneshot for WebTimer {\n\n fn new(timeout: time::Duration) -> Self {\n\n debug_assert!(!(timeout.as_secs() == 0 && timeout.subsec_nanos() == 0), \"Zero timeout makes no sense\");\n\n\n\n Self {\n\n state: State::Init(timeout),\n\n }\n\n }\n\n\n\n fn is_ticking(&self) -> bool {\n", "file_path": "src/oneshot/web.rs", "rank": 10, "score": 84530.13115867849 }, { "content": "enum State {\n\n Init(time::Duration),\n\n Running(TimerHandle, Box<TimerState>),\n\n}\n\n\n\n///Timer based on Apple APIs\n\npub struct AppleTimer {\n\n state: State,\n\n}\n\n\n\nimpl super::Oneshot for AppleTimer {\n\n fn new(timeout: time::Duration) -> Self {\n\n debug_assert!(!(timeout.as_secs() == 0 && timeout.subsec_nanos() == 0), \"Zero timeout makes no sense\");\n\n\n\n Self {\n\n state: State::Init(timeout),\n\n }\n\n }\n\n\n\n fn is_ticking(&self) -> bool {\n", "file_path": "src/oneshot/apple.rs", "rank": 11, "score": 84530.13115867849 }, { "content": "enum State {\n\n Init(time::Duration),\n\n Running(ffi::timer_t, Box<TimerState>),\n\n}\n\n\n\n///Posix Timer\n\n///\n\n///Currently implemented only for `Linux` and `Android` as BSD systems\n\n///proved to be a bit problematic\n\npub struct PosixTimer {\n\n state: State,\n\n}\n\n\n\nimpl super::Oneshot for PosixTimer {\n\n fn new(timeout: time::Duration) -> Self {\n\n use crate::std::sync::Once;\n\n static RUNTIME: Once = Once::new();\n\n\n\n debug_assert!(!(timeout.as_secs() == 0 && timeout.subsec_nanos() == 0), \"Zero timeout makes no sense\");\n\n\n", "file_path": "src/oneshot/posix.rs", "rank": 12, "score": 84530.13115867849 }, { "content": "enum State {\n\n Init(time::Duration),\n\n Running(ffi::PTP_TIMER, Box<TimerState>),\n\n}\n\n\n\n///Windows Native timer\n\npub struct WinTimer {\n\n state: State,\n\n}\n\n\n\nimpl super::Oneshot for WinTimer {\n\n fn new(timeout: time::Duration) -> Self {\n\n debug_assert!(!(timeout.as_secs() == 0 && timeout.subsec_nanos() == 0), \"Zero timeout makes no sense\");\n\n\n\n Self {\n\n state: State::Init(timeout),\n\n }\n\n }\n\n\n\n fn is_ticking(&self) -> bool {\n", "file_path": "src/oneshot/win.rs", "rank": 13, "score": 84530.13115867849 }, { "content": "enum State {\n\n Init(time::Duration),\n\n Running(bool),\n\n}\n\n\n\n///Timer based on `kqueue`\n\npub struct KqueueTimer {\n\n fd: tokio::io::PollEvented<RawTimer>,\n\n state: State,\n\n}\n\n\n\nimpl super::Oneshot for KqueueTimer {\n\n fn new(timeout: time::Duration) -> Self {\n\n debug_assert!(!(timeout.as_secs() == 0 && timeout.subsec_nanos() == 0), \"Zero timeout makes no sense\");\n\n\n\n Self {\n\n fd: tokio::io::PollEvented::new(RawTimer::new()).expect(\"To create PollEvented\"),\n\n state: State::Init(timeout),\n\n }\n\n }\n", "file_path": "src/oneshot/kqueue.rs", "rank": 14, "score": 84530.13115867849 }, { "content": "// Based on futures-rs\n\nstruct AtomicWaker {\n\n state: AtomicU8,\n\n waker: UnsafeCell<Option<Waker>>,\n\n}\n\n\n\n/// Idle state\n\nconst WAITING: u8 = 0;\n\n\n\n/// A new waker value is being registered with the `AtomicWaker` cell.\n\nconst REGISTERING: u8 = 0b01;\n\n\n\n/// The waker currently registered with the `AtomicWaker` cell is being woken.\n\nconst WAKING: u8 = 0b10;\n\n\n\nimpl AtomicWaker {\n\n const fn new() -> Self {\n\n Self {\n\n state: AtomicU8::new(WAITING),\n\n waker: UnsafeCell::new(None),\n\n }\n", "file_path": "src/oneshot/state.rs", "rank": 15, "score": 65992.83733406977 }, { "content": "struct TimerHandle {\n\n inner: ffi::dispatch_source_t,\n\n //Suspension count. Incremented suspend, and decremented on each resume\n\n s_count: u8,\n\n}\n\n\n\nimpl Drop for TimerHandle {\n\n fn drop(&mut self) {\n\n unsafe {\n\n ffi::dispatch_source_cancel(self.inner);\n\n\n\n //It is error to release while source is suspended\n\n //So we decrement it\n\n self.resume();\n\n\n\n ffi::dispatch_release(self.inner);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/oneshot/apple.rs", "rank": 16, "score": 65803.14764441113 }, { "content": "struct TimerHandle(i32);\n\n\n\nimpl TimerHandle {\n\n #[inline]\n\n fn clear(&mut self) {\n\n clearTimeout(self.0)\n\n }\n\n}\n\n\n\nimpl Drop for TimerHandle {\n\n fn drop(&mut self) {\n\n self.clear();\n\n }\n\n}\n\n\n", "file_path": "src/oneshot/web.rs", "rank": 17, "score": 62268.366172016875 }, { "content": "struct RawTimer(c_int);\n\n\n\nimpl RawTimer {\n\n fn new() -> Self {\n\n let fd = nix::sys::event::kqueue().unwrap_or(-1);\n\n\n\n //If you hit this, then most likely you run into OS imposed limit on file descriptor number\n\n os_assert!(fd != -1);\n\n Self(fd)\n\n }\n\n\n\n fn set(&self, time: time::Duration) {\n\n use nix::sys::event::*;\n\n\n\n let flags = EventFlag::EV_ADD | EventFlag::EV_ENABLE | EventFlag::EV_ONESHOT;\n\n let mut time = time.as_nanos();\n\n let mut unit = FilterFlag::NOTE_NSECONDS;\n\n\n\n if time > isize::max_value() as u128 {\n\n unit = FilterFlag::NOTE_USECONDS;\n", "file_path": "src/oneshot/kqueue.rs", "rank": 18, "score": 62268.366172016875 }, { "content": "///One-shot timer that expires once\n\n///\n\n///Trait itself describes `Future` that resolves after `timeout`\n\n///\n\n///Most common platforms are supplied via alias [Timer](type.Timer.html)\n\n///\n\n///## Common implementations:\n\n///\n\n///- Windows uses thread pooled timer\n\n///- Apple systems uses dispatch source API\n\n///- Posix compatible `timer_create`, available on major Posix-compliant systems. Depends on availability of `siginfo_t::si_value` method.\n\n///- Wasm uses Web API `SetTimeout`\n\n///- Dummy timer is used when no implementation is available. Panics when used.\n\n///\n\n///## Feature `tokio_on`\n\n///\n\n///- Linux uses `timerfd_create`, replaces Posix tiemr when enabled.\n\n///- Other unix systems uses `kqueue`, replaces Apple timer when enabled.\n\n///\n\n///```rust, no_run\n\n/// use async_timer::oneshot::{Oneshot, Timer};\n\n///\n\n/// use std::time;\n\n///\n\n/// async fn do_stuff() {\n\n/// let work = Timer::new(time::Duration::from_secs(2));\n\n///\n\n/// let before = time::SystemTime::now();\n\n/// work.await;\n\n/// let after = time::SystemTime::now();\n\n/// let diff = after.duration_since(before).unwrap();\n\n///\n\n/// assert_eq!(diff.as_secs(), 2);\n\n/// }\n\n///\n\n///```\n\npub trait Oneshot: Send + Sync + Unpin + Future<Output=()> {\n\n ///Creates new instance without actually starting timer.\n\n ///\n\n ///Timer should start only on first `Future::poll`\n\n fn new(timeout: time::Duration) -> Self;\n\n\n\n ///Returns whether timer is ongoing.\n\n ///\n\n ///Note that if it returns `false` it doesn't mean that `is_expired` will return `true`\n\n ///as initially timer is not armed.\n\n fn is_ticking(&self) -> bool;\n\n\n\n ///Returns whether timer has expired.\n\n fn is_expired(&self) -> bool;\n\n\n\n ///Cancels ongoing timer, if it is not expired yet.\n\n fn cancel(&mut self);\n\n\n\n ///Restarts timer with new timeout value.\n\n ///\n", "file_path": "src/oneshot/mod.rs", "rank": 19, "score": 52050.098503168476 }, { "content": "fn init() {\n\n let mut sa_mask = mem::MaybeUninit::<libc::sigset_t>::uninit();\n\n unsafe {\n\n libc::sigemptyset(sa_mask.as_mut_ptr());\n\n }\n\n\n\n let timer_sig = libc::sigaction {\n\n sa_flags: libc::SA_SIGINFO,\n\n sa_sigaction: ffi::timer_handler as usize,\n\n sa_mask: unsafe { sa_mask.assume_init() },\n\n #[cfg(any(target_os = \"linux\", target_os = \"android\"))]\n\n sa_restorer: None,\n\n };\n\n\n\n unsafe {\n\n os_assert!(libc::sigaction(TIMER_SIG, &timer_sig, ptr::null_mut()) != -1);\n\n }\n\n}\n\n\n", "file_path": "src/oneshot/posix.rs", "rank": 28, "score": 41869.90339706038 }, { "content": " Timed::Ongoing(T::new(timeout), inner, timeout)\n\n }\n\n}\n\n\n\nimpl<F: Future, T: Oneshot> Timed<F, T> {\n\n ///Creates new instance with specified timeout\n\n ///\n\n ///Unsafe version of `new` that doesn't require `Unpin`.\n\n ///\n\n ///Requires to specify `Oneshot` type (e.g. `Timed::<oneshoot::Timer>::new()`)\n\n pub unsafe fn new_unchecked(inner: F, timeout: time::Duration) -> Self {\n\n Timed::Ongoing(T::new(timeout), inner, timeout)\n\n }\n\n}\n\n\n\nimpl<F: Future, T: Oneshot> Future for Timed<F, T> {\n\n type Output = Result<F::Output, Expired<F, T>>;\n\n\n\n fn poll(self: Pin<&mut Self>, ctx: &mut task::Context) -> task::Poll<Self::Output> {\n\n let mut state = Timed::Stopped;\n", "file_path": "src/timed.rs", "rank": 29, "score": 27251.52374431646 }, { "content": " _ => unreach!(),\n\n }\n\n }\n\n}\n\n\n\nimpl<F: Future, T: Oneshot> Future for Expired<F, T> {\n\n type Output = Timed<F, T>;\n\n\n\n fn poll(self: Pin<&mut Self>, ctx: &mut task::Context) -> task::Poll<Self::Output> {\n\n let mut state = Timed::Stopped;\n\n let this = unsafe { self.get_unchecked_mut() };\n\n mem::swap(&mut this.inner, &mut state);\n\n\n\n match state {\n\n Timed::Ongoing(mut timer, future, timeout) => {\n\n timer.restart(timeout, ctx.waker());\n\n\n\n task::Poll::Ready(Timed::Ongoing(timer, future, timeout))\n\n },\n\n _ => task::Poll::Pending,\n", "file_path": "src/timed.rs", "rank": 30, "score": 27250.214707076357 }, { "content": " let mut this = unsafe { self.get_unchecked_mut() };\n\n mem::swap(&mut state, &mut this);\n\n\n\n match state {\n\n Timed::Ongoing(mut timer, mut future, timeout) => {\n\n match Future::poll(unsafe { Pin::new_unchecked(&mut future) }, ctx) {\n\n task::Poll::Pending => (),\n\n task::Poll::Ready(result) => return task::Poll::Ready(Ok(result)),\n\n }\n\n\n\n match Future::poll(Pin::new(&mut timer), ctx) {\n\n task::Poll::Pending => (),\n\n task::Poll::Ready(_) => return task::Poll::Ready(Err(Expired {\n\n inner: Timed::Ongoing(timer, future, timeout),\n\n })),\n\n }\n\n\n\n *this = Timed::Ongoing(timer, future, timeout);\n\n task::Poll::Pending\n\n },\n", "file_path": "src/timed.rs", "rank": 31, "score": 27249.875258485255 }, { "content": " pub fn platform_new(inner: F, timeout: time::Duration) -> Self {\n\n Timed::<F, PlatformTimer>::new(inner, timeout)\n\n }\n\n}\n\n\n\nimpl<F: Future> Timed<F> {\n\n #[inline]\n\n ///Creates new instance using [Timer](../oneshot/type.Timer.html) alias.\n\n ///\n\n ///Unsafe version of `platform_new` that doesn't require `Unpin`.\n\n pub unsafe fn platform_new_unchecked(inner: F, timeout: time::Duration) -> Self {\n\n Timed::<F, PlatformTimer>::new_unchecked(inner, timeout)\n\n }\n\n}\n\n\n\nimpl<F: Future + Unpin, T: Oneshot> Timed<F, T> {\n\n ///Creates new instance with specified timeout\n\n ///\n\n ///Requires to specify `Oneshot` type (e.g. `Timed::<oneshoot::Timer>::new()`)\n\n pub fn new(inner: F, timeout: time::Duration) -> Self {\n", "file_path": "src/timed.rs", "rank": 32, "score": 27248.500689766253 }, { "content": "/// async_timer::Timed::platform_new_unchecked(job(), core::time::Duration::from_secs(1))\n\n/// };\n\n///\n\n/// match work.await {\n\n/// Ok(_) => println!(\"I'm done!\"),\n\n/// //You can retry by polling `expired`\n\n/// Err(expired) => println!(\"Job expired: {}\", expired),\n\n/// }\n\n///}\n\n///```\n\npub enum Timed<F, T=PlatformTimer> {\n\n #[doc(hidden)]\n\n Ongoing(T, F, time::Duration),\n\n #[doc(hidden)]\n\n Stopped,\n\n}\n\n\n\nimpl<F: Future + Unpin> Timed<F> {\n\n #[inline]\n\n ///Creates new instance using [Timer](../oneshot/type.Timer.html) alias.\n", "file_path": "src/timed.rs", "rank": 33, "score": 27242.14823138179 }, { "content": " Timed::Stopped => task::Poll::Pending,\n\n }\n\n }\n\n}\n\n\n\nimpl<F: Future + Unpin, T: Oneshot> Unpin for Timed<F, T> {}\n\n\n\n///Error when [Timed](struct.Timed.html) expires\n\n///\n\n///Implements `Future` that can be used to restart `Timed`\n\n///Note, that `Oneshot` starts execution immediately after resolving this Future\n\npub struct Expired<F, T> {\n\n inner: Timed<F, T>,\n\n}\n\n\n\nimpl<F: Future, T: Oneshot> Expired<F, T> {\n\n ///Returns underlying `Future`\n\n pub fn into_inner(self) -> F {\n\n match self.inner {\n\n Timed::Ongoing(_, fut, _) => fut,\n", "file_path": "src/timed.rs", "rank": 34, "score": 27242.110608602827 }, { "content": "//! Timed future\n\n\n\nuse core::future::Future;\n\nuse core::{fmt, task, time, mem};\n\nuse core::pin::Pin;\n\n\n\nuse crate::oneshot::Oneshot;\n\nuse crate::oneshot::Timer as PlatformTimer;\n\n\n\n#[must_use = \"Timed does nothing unless polled\"]\n\n///Limiter on time to wait for underlying `Future`\n\n///\n\n///# Usage\n\n///\n\n///```rust, no_run\n\n///async fn job() {\n\n///}\n\n///\n\n///async fn do_job() {\n\n/// let work = unsafe {\n", "file_path": "src/timed.rs", "rank": 35, "score": 27238.217262569633 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl<F: Future + Unpin, T: Oneshot> Unpin for Expired<F, T> {}\n\n\n\n#[cfg(not(feature = \"no_std\"))]\n\nimpl<F, T: Oneshot> crate::std::error::Error for Expired<F, T> {}\n\nimpl<F, T: Oneshot> fmt::Debug for Expired<F, T> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self)\n\n }\n\n}\n\n\n\nimpl<F, T: Oneshot> fmt::Display for Expired<F, T> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self.inner {\n\n Timed::Stopped => write!(f, \"Future is being re-tried.\"),\n\n Timed::Ongoing(_, _, timeout) => match timeout.as_secs() {\n\n 0 => write!(f, \"Future expired in {} ms\", timeout.as_millis()),\n\n secs => write!(f, \"Future expired in {} seconds and {} ms\", secs, timeout.subsec_millis()),\n\n },\n\n }\n\n }\n\n}\n", "file_path": "src/timed.rs", "rank": 36, "score": 27238.035563160207 }, { "content": "use async_timer::{Timed};\n\nuse async_timer::oneshot::{Oneshot, Timer};\n\n\n\nuse std::time;\n\n\n\n#[tokio::test]\n\nasync fn test_timed() {\n\n let future = Timer::new(time::Duration::from_secs(4));\n\n let work = Timed::platform_new(future, time::Duration::from_secs(3));\n\n\n\n let before = time::SystemTime::now();\n\n\n\n let expired = work.await.unwrap_err();\n\n let work = expired.await;\n\n\n\n assert!(work.await.is_ok());\n\n let after = time::SystemTime::now();\n\n let diff = after.duration_since(before).unwrap();\n\n\n\n assert!(diff.as_millis() >= 3_500 && diff.as_millis() <= 4_500);\n\n}\n", "file_path": "tests/timed.rs", "rank": 37, "score": 27235.192142524018 }, { "content": " ///If timer is already running, then over-write old value and replaces waker.\n\n fn restart(&mut self, timeout: time::Duration, waker: &task::Waker);\n\n}\n\n\n\nmod state;\n\n\n\n#[cfg(target_arch = \"wasm32\")]\n\npub mod web;\n\n#[cfg(windows)]\n\npub mod win;\n\n#[cfg(all(unix, not(any(target_os = \"macos\", target_os = \"ios\"))))]\n\npub mod posix;\n\n#[cfg(any(target_os = \"macos\", target_os = \"ios\"))]\n\npub mod apple;\n\n#[cfg(all(feature = \"tokio_on\", any(target_os = \"linux\", target_os = \"android\")))]\n\npub mod timer_fd;\n\n#[cfg(all(feature = \"tokio_on\", any(target_os = \"bitrig\", target_os = \"dragonfly\", target_os = \"freebsd\", target_os = \"ios\", target_os = \"macos\", target_os = \"netbsd\", target_os = \"openbsd\")))]\n\npub mod kqueue;\n\npub mod dummy;\n\nmod extra;\n", "file_path": "src/oneshot/mod.rs", "rank": 38, "score": 26591.033468776743 }, { "content": "\n\npub use extra::NeverTimer;\n\n\n\n#[cfg(all(feature = \"tokio_on\", any(target_os = \"linux\", target_os = \"android\")))]\n\npub use timer_fd::TimerFd;\n\n\n\n#[cfg(target_arch = \"wasm32\")]\n\n///Alias to Web based Timer.\n\npub type Timer = web::WebTimer;\n\n\n\n#[cfg(windows)]\n\n///Alias to Windows Timer\n\npub type Timer = win::WinTimer;\n\n\n\n#[cfg(all(not(feature = \"tokio_on\"), not(any(target_os = \"macos\", target_os = \"ios\")), unix))]\n\n///Alias to Posix Timer\n\npub type Timer = posix::PosixTimer;\n\n#[cfg(all(feature = \"tokio_on\", any(target_os = \"linux\", target_os = \"android\")))]\n\n///Alias to Linux `timerfd` Timer\n\npub type Timer = timer_fd::TimerFd;\n", "file_path": "src/oneshot/mod.rs", "rank": 39, "score": 26589.01781342741 }, { "content": "//! One-shot Timer\n\n\n\nuse core::{task, time};\n\nuse core::marker::Unpin;\n\nuse core::future::Future;\n\n\n\n///One-shot timer that expires once\n\n///\n\n///Trait itself describes `Future` that resolves after `timeout`\n\n///\n\n///Most common platforms are supplied via alias [Timer](type.Timer.html)\n\n///\n\n///## Common implementations:\n\n///\n\n///- Windows uses thread pooled timer\n\n///- Apple systems uses dispatch source API\n\n///- Posix compatible `timer_create`, available on major Posix-compliant systems. Depends on availability of `siginfo_t::si_value` method.\n\n///- Wasm uses Web API `SetTimeout`\n\n///- Dummy timer is used when no implementation is available. Panics when used.\n\n///\n", "file_path": "src/oneshot/mod.rs", "rank": 40, "score": 26589.00465582057 }, { "content": "///## Feature `tokio_on`\n\n///\n\n///- Linux uses `timerfd_create`, replaces Posix tiemr when enabled.\n\n///- Other unix systems uses `kqueue`, replaces Apple timer when enabled.\n\n///\n\n///```rust, no_run\n\n/// use async_timer::oneshot::{Oneshot, Timer};\n\n///\n\n/// use std::time;\n\n///\n\n/// async fn do_stuff() {\n\n/// let work = Timer::new(time::Duration::from_secs(2));\n\n///\n\n/// let before = time::SystemTime::now();\n\n/// work.await;\n\n/// let after = time::SystemTime::now();\n\n/// let diff = after.duration_since(before).unwrap();\n\n///\n\n/// assert_eq!(diff.as_secs(), 2);\n\n/// }\n\n///\n\n///```\n", "file_path": "src/oneshot/mod.rs", "rank": 41, "score": 26587.905646306473 }, { "content": "\n\n#[cfg(all(not(feature = \"tokio_on\"), any(target_os = \"macos\", target_os = \"ios\")))]\n\n///Alias to Apple Timer\n\npub type Timer = apple::AppleTimer;\n\n#[cfg(all(feature = \"tokio_on\", any(target_os = \"bitrig\", target_os = \"dragonfly\", target_os = \"freebsd\", target_os = \"ios\", target_os = \"macos\", target_os = \"netbsd\", target_os = \"openbsd\")))]\n\n///Alias to `kqueue` based Timer\n\npub type Timer = kqueue::KqueueTimer;\n\n\n\n#[cfg(not(any(\n\nwindows, target_arch = \"wasm32\", unix,\n\nall(feature = \"tokio_on\", any(target_os = \"bitrig\", target_os = \"ios\", target_os = \"macos\"))\n\n)))]\n\n///Dummy Timer\n\npub type Timer = dummy::DummyTimer;\n", "file_path": "src/oneshot/mod.rs", "rank": 42, "score": 26579.766395493203 }, { "content": " state == REGISTERING | WAKING ||\n\n state == WAKING);\n\n None\n\n }\n\n }\n\n }\n\n}\n\n\n\nunsafe impl Send for AtomicWaker {}\n\nunsafe impl Sync for AtomicWaker {}\n\n\n\n///Timer's state\n\npub struct TimerState {\n\n ///Underlying waker\n\n inner: AtomicWaker,\n\n}\n\n\n\nimpl TimerState {\n\n ///Initializes state.\n\n pub const fn new() -> Self {\n", "file_path": "src/oneshot/state.rs", "rank": 43, "score": 25765.097272312483 }, { "content": " Self {\n\n inner: AtomicWaker::new(),\n\n }\n\n }\n\n\n\n #[inline]\n\n ///Returns whether notification has been fired.\n\n ///\n\n ///Namely it checks whether `Waker` is registered\n\n ///with `TimerState` or not. It is not intended for user\n\n ///to call `is_done` before `register`\n\n pub fn is_done(&self) -> bool {\n\n !self.inner.is_registered()\n\n }\n\n\n\n #[inline]\n\n ///Registers `Waker` with state\n\n pub fn register(&self, waker: &task::Waker) {\n\n self.inner.register(waker)\n\n }\n", "file_path": "src/oneshot/state.rs", "rank": 44, "score": 25761.289401748047 }, { "content": " }\n\n\n\n fn is_registered(&self) -> bool {\n\n match self.state.load(Acquire) {\n\n WAITING => unsafe { (*self.waker.get()).is_some() },\n\n //If we're WAKING then early false\n\n state => (state & WAKING) == 0,\n\n }\n\n }\n\n\n\n fn wake(&self) {\n\n if let Some(waker) = self.take() {\n\n waker.wake();\n\n }\n\n }\n\n\n\n fn take(&self) -> Option<Waker> {\n\n // AcqRel ordering is used in order to acquire the value of the `task`\n\n // cell as well as to establish a `release` ordering with whatever\n\n // memory the `AtomicWaker` is associated with.\n", "file_path": "src/oneshot/state.rs", "rank": 45, "score": 25758.92787727808 }, { "content": "//!State module\n\n\n\nuse core::cell::UnsafeCell;\n\nuse core::sync::atomic::{AtomicU8};\n\nuse core::sync::atomic::Ordering::{Acquire, Release, AcqRel};\n\nuse core::task::{self, Waker};\n\n\n\n// Based on futures-rs\n", "file_path": "src/oneshot/state.rs", "rank": 46, "score": 25756.490031116133 }, { "content": " match self.state.fetch_or(WAKING, AcqRel) {\n\n WAITING => {\n\n // The waking lock has been acquired.\n\n let waker = unsafe { (*self.waker.get()).take() };\n\n\n\n // Release the lock\n\n self.state.fetch_and(!WAKING, Release);\n\n\n\n waker\n\n }\n\n state => {\n\n // There is a concurrent thread currently updating the\n\n // associated task.\n\n //\n\n // Nothing more to do as the `WAKING` bit has been set. It\n\n // doesn't matter if there are concurrent registering threads or\n\n // not.\n\n //\n\n debug_assert!(\n\n state == REGISTERING ||\n", "file_path": "src/oneshot/state.rs", "rank": 47, "score": 25755.37866701082 }, { "content": " }\n\n WAKING => {\n\n // Currently in the process of waking the task, i.e.,\n\n // `wake` is currently being called on the old task handle.\n\n // So, we call wake on the new waker\n\n waker.wake_by_ref();\n\n }\n\n state => {\n\n // In this case, a concurrent thread is holding the\n\n // \"registering\" lock. This probably indicates a bug in the\n\n // caller's code as racing to call `register` doesn't make much\n\n // sense.\n\n //\n\n // We just want to maintain memory safety. It is ok to drop the\n\n // call to `register`.\n\n debug_assert!(\n\n state == REGISTERING ||\n\n state == REGISTERING | WAKING);\n\n }\n\n }\n", "file_path": "src/oneshot/state.rs", "rank": 48, "score": 25755.307634717738 }, { "content": "\n\n #[inline]\n\n ///Notifies underlying `Waker`\n\n ///\n\n ///After that `Waker` is no longer registered with `TimerState`\n\n pub fn wake(&self) {\n\n self.inner.wake();\n\n }\n\n}\n", "file_path": "src/oneshot/state.rs", "rank": 49, "score": 25754.11312638776 }, { "content": " }\n\n\n\n fn register(&self, waker: &Waker) {\n\n match self.state.compare_and_swap(WAITING, REGISTERING, Acquire) {\n\n WAITING => {\n\n unsafe {\n\n // Locked acquired, update the waker cell\n\n *self.waker.get() = Some(waker.clone());\n\n\n\n // Release the lock. If the state transitioned to include\n\n // the `WAKING` bit, this means that a wake has been\n\n // called concurrently, so we have to remove the waker and\n\n // wake it.`\n\n //\n\n // Start by assuming that the state is `REGISTERING` as this\n\n // is what we jut set it to.\n\n let res = self.state.compare_exchange(REGISTERING, WAITING, AcqRel, Acquire);\n\n\n\n match res {\n\n Ok(_) => {}\n", "file_path": "src/oneshot/state.rs", "rank": 50, "score": 25753.834936776344 }, { "content": " Err(actual) => {\n\n // This branch can only be reached if a\n\n // concurrent thread called `wake`. In this\n\n // case, `actual` **must** be `REGISTERING |\n\n // `WAKING`.\n\n debug_assert_eq!(actual, REGISTERING | WAKING);\n\n\n\n // Take the waker to wake once the atomic operation has\n\n // completed.\n\n let waker = (*self.waker.get()).take().unwrap();\n\n\n\n // Just swap, because no one could change state while state == `REGISTERING` | `WAKING`.\n\n self.state.swap(WAITING, AcqRel);\n\n\n\n // The atomic swap was complete, now\n\n // wake the task and return.\n\n waker.wake();\n\n }\n\n }\n\n }\n", "file_path": "src/oneshot/state.rs", "rank": 51, "score": 25751.20598349465 }, { "content": "# async-timer\n\n\n\n![](https://github.com/DoumanAsh/async-timer/workflows/Rust/badge.svg)\n\n[![Crates.io](https://img.shields.io/crates/v/async-timer.svg)](https://crates.io/crates/async-timer)\n\n[![Documentation](https://docs.rs/async-timer/badge.svg)](https://docs.rs/crate/async-timer/)\n\n[![dependency status](https://deps.rs/crate/async-timer/0.7.3/status.svg)](https://deps.rs/crate/async-timer)\n\n\n\nTimer facilities for Rust's async story\n\n\n\nMinimal Rust version: 1.36\n\n\n\n## Timed\n\n\n\n```rust\n\nasync fn job() {\n\n}\n\n\n\nasync fn do_job() {\n\n let work = unsafe {\n\n async_timer::Timed::platform_new_unchecked(job(), core::time::Duration::from_secs(1))\n\n };\n\n\n\n match work.await {\n\n Ok(_) => println!(\"I'm done!\"),\n\n //You can retry by polling `expired`\n\n Err(expired) => println!(\"Job expired: {}\", expired),\n\n }\n\n}\n\n```\n\n\n\n## Interval\n\n\n\n```rust\n\nasync fn job() {\n\n}\n\n\n\nasync fn do_a_while() {\n\n let mut times: u8 = 0;\n\n let mut interval = async_timer::Interval::platform_new(core::time::Duration::from_secs(1));\n\n\n\n while times < 5 {\n\n job().await;\n\n interval.as_mut().await;\n\n times += 1;\n\n }\n\n}\n\n```\n\n\n\n## Q&A\n\n\n\nQ: When it is going to be async/await?\n\n\n\nA: When async/await will become `no_std`\n", "file_path": "README.md", "rank": 52, "score": 17117.655551234137 }, { "content": " }\n\n }\n\n }\n\n}\n\n\n\nimpl Future for PosixTimer {\n\n type Output = ();\n\n\n\n fn poll(mut self: Pin<&mut Self>, ctx: &mut task::Context) -> task::Poll<Self::Output> {\n\n self.state = match &self.state {\n\n State::Init(ref timeout) => {\n\n let state = Box::into_raw(Box::new(TimerState::new()));\n\n let fd = time_create(state);\n\n\n\n let state = unsafe { Box::from_raw(state) };\n\n state.register(ctx.waker());\n\n\n\n set_timer_value(fd, *timeout);\n\n\n\n State::Running(fd, state)\n", "file_path": "src/oneshot/posix.rs", "rank": 53, "score": 30.574604897735473 }, { "content": " fn poll(mut self: Pin<&mut Self>, ctx: &mut task::Context) -> task::Poll<Self::Output> {\n\n self.state = match &self.state {\n\n State::Init(ref timeout) => {\n\n let state = Box::into_raw(Box::new(TimerState::new()));\n\n let mut fd = TimerHandle::new(state);\n\n\n\n let state = unsafe { Box::from_raw(state) };\n\n state.register(ctx.waker());\n\n\n\n fd.set_delay(*timeout);\n\n\n\n State::Running(fd, state)\n\n },\n\n State::Running(_, ref state) => match state.is_done() {\n\n false => return task::Poll::Pending,\n\n true => return task::Poll::Ready(()),\n\n }\n\n };\n\n\n\n task::Poll::Pending\n\n }\n\n}\n\n\n\nunsafe impl Send for AppleTimer {}\n\nunsafe impl Sync for AppleTimer {}\n", "file_path": "src/oneshot/apple.rs", "rank": 54, "score": 29.186403994783632 }, { "content": "\n\n fn poll(mut self: Pin<&mut Self>, ctx: &mut task::Context) -> task::Poll<Self::Output> {\n\n self.state = match &self.state {\n\n State::Init(ref timeout) => {\n\n let state = Box::into_raw(Box::new(TimerState::new()));\n\n let fd = time_create(state);\n\n\n\n let state = unsafe { Box::from_raw(state) };\n\n state.register(ctx.waker());\n\n\n\n set_timer_value(fd, *timeout);\n\n\n\n State::Running(fd, state)\n\n },\n\n State::Running(_, ref state) => match state.is_done() {\n\n false => return task::Poll::Pending,\n\n true => return task::Poll::Ready(()),\n\n }\n\n };\n\n\n", "file_path": "src/oneshot/win.rs", "rank": 55, "score": 29.08923965471598 }, { "content": " fn poll(mut self: Pin<&mut Self>, ctx: &mut task::Context) -> task::Poll<Self::Output> {\n\n self.state = match &self.state {\n\n State::Init(ref timeout) => {\n\n let state = TimerState::new();\n\n state.register(ctx.waker());\n\n\n\n let state = Box::into_raw(Box::new(state));\n\n let fd = time_create(*timeout, state);\n\n\n\n State::Running(fd, state)\n\n },\n\n State::Running(_, ref state) => match unsafe { (**state).is_done() } {\n\n false => return task::Poll::Pending,\n\n true => return task::Poll::Ready(()),\n\n }\n\n };\n\n\n\n task::Poll::Pending\n\n }\n\n}\n", "file_path": "src/oneshot/web.rs", "rank": 56, "score": 28.918785125770185 }, { "content": " let value = (*info).si_value();\n\n\n\n value.sival_ptr as *const TimerState\n\n }\n\n pub unsafe extern \"C\" fn timer_handler(_sig: libc::c_int, si: *mut libc::siginfo_t, _uc: *mut libc::c_void) {\n\n let state = get_value(si);\n\n\n\n (*state).wake();\n\n }\n\n\n\n #[repr(C)]\n\n pub struct itimerspec {\n\n pub it_interval: libc::timespec,\n\n pub it_value: libc::timespec,\n\n }\n\n\n\n extern \"C\" {\n\n pub fn timer_create(clockid: libc::clockid_t, sevp: *mut libc::sigevent, timerid: *mut timer_t) -> libc::c_int;\n\n pub fn timer_settime(timerid: timer_t, flags: libc::c_int, new_value: *const itimerspec, old_value: *mut itimerspec) -> libc::c_int;\n\n pub fn timer_delete(timerid: timer_t);\n\n }\n\n}\n\n\n\nconst TIMER_SIG: libc::c_int = 40;\n\n\n", "file_path": "src/oneshot/posix.rs", "rank": 57, "score": 28.332477330168324 }, { "content": "//! Posix based timer\n\n\n\n#[cfg(feature = \"no_std\")]\n\ncore::compile_error!(\"no_std is not supported for posix implementation\");\n\n\n\nuse core::future::Future;\n\nuse core::pin::Pin;\n\nuse core::{mem, ptr, time, task};\n\n\n\nuse super::state::TimerState;\n\nuse crate::alloc::boxed::Box;\n\n\n\nmod ffi {\n\n use super::*;\n\n\n\n #[allow(non_camel_case_types)]\n\n pub type timer_t = usize;\n\n\n\n #[inline(always)]\n\n unsafe fn get_value(info: *mut libc::siginfo_t) -> *const TimerState {\n", "file_path": "src/oneshot/posix.rs", "rank": 58, "score": 27.718120439353427 }, { "content": " }\n\n\n\n fn restart(&mut self, new_value: time::Duration, waker: &task::Waker) {\n\n debug_assert!(!(new_value.as_secs() == 0 && new_value.subsec_nanos() == 0), \"Zero timeout makes no sense\");\n\n\n\n match &mut self.state {\n\n State::Init(ref mut timeout) => {\n\n *timeout = new_value\n\n },\n\n State::Running(ref mut fd, ref state) => {\n\n unsafe { (**state).register(waker) };\n\n *fd = time_create(new_value, *state);\n\n },\n\n }\n\n }\n\n}\n\n\n\nimpl Future for WebTimer {\n\n type Output = ();\n\n\n", "file_path": "src/oneshot/web.rs", "rank": 59, "score": 27.52936496853594 }, { "content": " }\n\n\n\n fn read(&self) -> usize {\n\n use nix::sys::event::*;\n\n\n\n let mut ev = [KEvent::new(0, EventFilter::EVFILT_TIMER, EventFlag::empty(), FilterFlag::empty(), 0, 0)];\n\n\n\n kevent(self.0, &[], &mut ev[..], 0).expect(\"To execute kevent\")\n\n }\n\n}\n\n\n\nimpl mio::Evented for RawTimer {\n\n fn register(&self, poll: &mio::Poll, token: mio::Token, mut interest: mio::Ready, opts: mio::PollOpt) -> io::Result<()> {\n\n interest.remove(mio::Ready::writable());\n\n mio::unix::EventedFd(&self.0).register(poll, token, interest, opts)\n\n }\n\n\n\n fn reregister(&self, poll: &mio::Poll, token: mio::Token, mut interest: mio::Ready, opts: mio::PollOpt) -> io::Result<()> {\n\n interest.remove(mio::Ready::writable());\n\n mio::unix::EventedFd(&self.0).reregister(poll, token, interest, opts)\n", "file_path": "src/oneshot/kqueue.rs", "rank": 60, "score": 27.105169623820597 }, { "content": " debug_assert!(!(new_value.as_secs() == 0 && new_value.subsec_nanos() == 0), \"Zero timeout makes no sense\");\n\n\n\n match &mut self.state {\n\n State::Init(ref mut timeout) => {\n\n *timeout = new_value;\n\n },\n\n State::Running(ref mut is_finished) => {\n\n *is_finished = false;\n\n self.fd.get_ref().set(new_value);\n\n },\n\n }\n\n }\n\n}\n\n\n\nimpl Future for KqueueTimer {\n\n type Output = ();\n\n\n\n fn poll(mut self: Pin<&mut Self>, ctx: &mut task::Context) -> task::Poll<Self::Output> {\n\n loop {\n\n self.state = match &self.state {\n", "file_path": "src/oneshot/kqueue.rs", "rank": 61, "score": 26.82853713273247 }, { "content": "\n\n fn cancel(&mut self) {\n\n match self.state {\n\n State::Init(_) => (),\n\n State::Running(fd, _) => unsafe {\n\n ffi::timer_settime(fd, 0, &mut mem::zeroed(), ptr::null_mut());\n\n }\n\n }\n\n }\n\n\n\n fn restart(&mut self, new_value: time::Duration, waker: &task::Waker) {\n\n debug_assert!(!(new_value.as_secs() == 0 && new_value.subsec_nanos() == 0), \"Zero timeout makes no sense\");\n\n\n\n match &mut self.state {\n\n State::Init(ref mut timeout) => {\n\n *timeout = new_value;\n\n },\n\n State::Running(fd, ref mut state) => {\n\n state.register(waker);\n\n set_timer_value(*fd, new_value);\n", "file_path": "src/oneshot/posix.rs", "rank": 62, "score": 26.640565699354426 }, { "content": " task::Poll::Pending\n\n }\n\n}\n\n\n\nimpl Drop for WinTimer {\n\n fn drop(&mut self) {\n\n match self.state {\n\n State::Init(_) => (),\n\n State::Running(fd, _) => unsafe {\n\n ffi::SetThreadpoolTimerEx(fd, ptr::null_mut(), 0, 0);\n\n ffi::WaitForThreadpoolTimerCallbacks(fd, 1);\n\n ffi::CloseThreadpoolTimer(fd);\n\n }\n\n }\n\n }\n\n}\n\n\n\nunsafe impl Send for WinTimer {}\n\nunsafe impl Sync for WinTimer {}\n", "file_path": "src/oneshot/win.rs", "rank": 63, "score": 26.11949189139049 }, { "content": " },\n\n State::Running(_, ref state) => match state.is_done() {\n\n false => return task::Poll::Pending,\n\n true => return task::Poll::Ready(()),\n\n }\n\n };\n\n\n\n task::Poll::Pending\n\n }\n\n}\n\n\n\nimpl Drop for PosixTimer {\n\n fn drop(&mut self) {\n\n match self.state {\n\n State::Init(_) => (),\n\n State::Running(fd, _) => unsafe {\n\n ffi::timer_delete(fd);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/oneshot/posix.rs", "rank": 64, "score": 25.09505540839127 }, { "content": " }\n\n }\n\n\n\n fn restart(&mut self, new_value: time::Duration, waker: &task::Waker) {\n\n debug_assert!(!(new_value.as_secs() == 0 && new_value.subsec_nanos() == 0), \"Zero timeout makes no sense\");\n\n\n\n match &mut self.state {\n\n State::Init(ref mut timeout) => {\n\n *timeout = new_value;\n\n },\n\n State::Running(fd, ref mut state) => {\n\n state.register(waker);\n\n set_timer_value(*fd, new_value);\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Future for WinTimer {\n\n type Output = ();\n", "file_path": "src/oneshot/win.rs", "rank": 65, "score": 24.215434315542186 }, { "content": " }\n\n\n\n fn restart(&mut self, new_value: time::Duration, waker: &task::Waker) {\n\n debug_assert!(!(new_value.as_secs() == 0 && new_value.subsec_nanos() == 0), \"Zero timeout makes no sense\");\n\n\n\n match &mut self.state {\n\n State::Init(ref mut timeout) => {\n\n *timeout = new_value;\n\n },\n\n State::Running(ref mut fd, ref state) => {\n\n state.register(waker);\n\n fd.set_delay(new_value);\n\n },\n\n }\n\n }\n\n}\n\n\n\nimpl Future for AppleTimer {\n\n type Output = ();\n\n\n", "file_path": "src/oneshot/apple.rs", "rank": 66, "score": 23.663195523446742 }, { "content": " State::Init(ref timeout) => {\n\n self.fd.get_ref().set(*timeout);\n\n State::Running(false)\n\n },\n\n State::Running(false) => match Pin::new(&mut self.fd).poll_read_ready(ctx, mio::Ready::readable()) {\n\n task::Poll::Pending => return task::Poll::Pending,\n\n task::Poll::Ready(ready) => match ready.map(|ready| ready.is_readable()).expect(\"kqueue cannot be ready\") {\n\n true => {\n\n let _ = Pin::new(&mut self.fd).clear_read_ready(ctx, mio::Ready::readable());\n\n match self.fd.get_mut().read() {\n\n 0 => return task::Poll::Pending,\n\n _ => return task::Poll::Ready(()),\n\n }\n\n },\n\n false => return task::Poll::Pending,\n\n }\n\n },\n\n State::Running(true) => return task::Poll::Ready(()),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/oneshot/kqueue.rs", "rank": 67, "score": 22.94401203204219 }, { "content": "\n\nimpl Drop for WebTimer {\n\n fn drop(&mut self) {\n\n match self.state {\n\n State::Running(ref mut fd, state) => {\n\n fd.clear();\n\n unsafe { Box::from_raw(state as *mut TimerState) };\n\n },\n\n _ => (),\n\n }\n\n }\n\n}\n\n\n\nunsafe impl Send for WebTimer {}\n\nunsafe impl Sync for WebTimer {}\n", "file_path": "src/oneshot/web.rs", "rank": 68, "score": 22.271172079564597 }, { "content": "use core::{task, time};\n\nuse core::future::Future;\n\nuse core::pin::Pin;\n\n\n\n///Timer that never expires.\n\npub struct NeverTimer;\n\n\n\nimpl super::Oneshot for NeverTimer {\n\n fn new(_: time::Duration) -> Self {\n\n Self\n\n }\n\n\n\n fn is_ticking(&self) -> bool {\n\n true\n\n }\n\n\n\n fn is_expired(&self) -> bool {\n\n false\n\n }\n\n\n", "file_path": "src/oneshot/extra.rs", "rank": 69, "score": 21.42064602347583 }, { "content": "//! Windows API based timer\n\n\n\nuse super::state::TimerState;\n\nuse crate::alloc::boxed::Box;\n\n\n\nuse core::{mem, task, time, ptr};\n\nuse core::pin::Pin;\n\nuse core::future::Future;\n\n\n\nmod ffi {\n\n pub use winapi::shared::minwindef::{FILETIME};\n\n pub use winapi::um::threadpoolapiset::{\n\n CloseThreadpoolTimer,\n\n CreateThreadpoolTimer,\n\n SetThreadpoolTimerEx,\n\n WaitForThreadpoolTimerCallbacks,\n\n };\n\n\n\n pub use winapi::ctypes::{c_ulong, c_void};\n\n pub use winapi::um::winnt::{PTP_TIMER_CALLBACK, PTP_CALLBACK_INSTANCE, PTP_TIMER};\n\n}\n\n\n\nunsafe extern \"system\" fn timer_callback(_: ffi::PTP_CALLBACK_INSTANCE, data: *mut ffi::c_void, _: ffi::PTP_TIMER) {\n\n #[cfg_attr(feature = \"cargo-clippy\", allow(clippy::cast_ptr_alignment))]\n\n let state = data as *mut TimerState;\n\n\n\n (*state).wake();\n\n}\n\n\n", "file_path": "src/oneshot/win.rs", "rank": 70, "score": 21.365135411494418 }, { "content": " ///Creates new instance using platform timer\n\n pub fn platform_new(interval: time::Duration) -> Self {\n\n Interval::<PlatformTimer>::new(interval)\n\n }\n\n}\n\n\n\nimpl<T: Oneshot> Interval<T> {\n\n ///Creates new instance with specified timer type.\n\n pub fn new(interval: time::Duration) -> Self {\n\n Self {\n\n timer: T::new(interval),\n\n interval,\n\n }\n\n }\n\n\n\n #[inline(always)]\n\n ///Stops interval\n\n pub fn cancel(&mut self) {\n\n self.timer.cancel()\n\n }\n", "file_path": "src/interval.rs", "rank": 71, "score": 21.291435600370043 }, { "content": "//! Web based timer\n\n\n\nuse core::{task, time};\n\nuse core::pin::Pin;\n\nuse core::future::Future;\n\n\n\nuse super::state::TimerState;\n\nuse crate::alloc::boxed::Box;\n\n\n\n#[wasm_bindgen::prelude::wasm_bindgen]\n\nextern \"C\" {\n\n fn setTimeout(closure: &wasm_bindgen::closure::Closure<dyn FnMut()>, time: u32) -> i32;\n\n fn clearTimeout(id: i32);\n\n}\n\n\n", "file_path": "src/oneshot/web.rs", "rank": 72, "score": 21.086380340691775 }, { "content": "\n\n pub const DISPATCH_TIME_FOREVER: dispatch_time_t = !0;\n\n //pub const DISPATCH_WALLTIME_NOW: dispatch_time_t = !1;\n\n pub const QOS_CLASS_DEFAULT: c_long = 0x15;\n\n\n\n extern \"C\" {\n\n pub static _dispatch_source_type_timer: c_long;\n\n\n\n pub fn dispatch_get_global_queue(identifier: c_long, flags: c_ulong) -> dispatch_queue_t;\n\n pub fn dispatch_source_create(type_: dispatch_source_type_t, handle: uintptr_t, mask: c_ulong, queue: dispatch_queue_t) -> dispatch_source_t;\n\n pub fn dispatch_source_set_timer(source: dispatch_source_t, start: dispatch_time_t, interval: u64, leeway: u64);\n\n pub fn dispatch_source_set_event_handler_f(source: dispatch_source_t, handler: unsafe extern \"C\" fn(*mut c_void));\n\n pub fn dispatch_set_context(object: dispatch_object_t, context: *mut c_void);\n\n pub fn dispatch_resume(object: dispatch_object_t);\n\n pub fn dispatch_suspend(object: dispatch_object_t);\n\n pub fn dispatch_release(object: dispatch_object_t);\n\n pub fn dispatch_source_cancel(object: dispatch_object_t);\n\n pub fn dispatch_walltime(when: *const c_void, delta: i64) -> dispatch_time_t;\n\n }\n\n}\n\n\n\n//TODO: Investigate why sometimes it is called multiple times\n\nunsafe extern \"C\" fn timer_handler(context: *mut c_void) {\n\n let state = context as *mut TimerState;\n\n\n\n (*state).wake();\n\n}\n\n\n", "file_path": "src/oneshot/apple.rs", "rank": 73, "score": 20.795565287921374 }, { "content": "//! Dummy Timer\n\n\n\nuse core::{task, time};\n\nuse core::future::Future;\n\nuse core::pin::Pin;\n\n\n\n///Dummy Timer\n\npub struct DummyTimer;\n\n\n\nimpl super::Oneshot for DummyTimer {\n\n fn new(_: time::Duration) -> Self {\n\n unimplemented!();\n\n }\n\n\n\n fn is_ticking(&self) -> bool {\n\n false\n\n }\n\n\n\n fn is_expired(&self) -> bool {\n\n false\n", "file_path": "src/oneshot/dummy.rs", "rank": 74, "score": 20.71847171179702 }, { "content": "//! Dispatch Source based Timer\n\n\n\nuse core::{ptr, task, time};\n\nuse core::pin::Pin;\n\nuse core::future::Future;\n\n\n\nuse super::state::TimerState;\n\nuse crate::alloc::boxed::Box;\n\n\n\nuse libc::{c_long, c_ulong, c_void, uintptr_t};\n\n\n\n#[allow(non_camel_case_types)]\n\nmod ffi {\n\n use super::*;\n\n\n\n pub type dispatch_object_t = *const c_void;\n\n pub type dispatch_queue_t = *const c_void;\n\n pub type dispatch_source_t = *const c_void;\n\n pub type dispatch_source_type_t = *const c_void;\n\n pub type dispatch_time_t = u64;\n", "file_path": "src/oneshot/apple.rs", "rank": 75, "score": 20.557610532886414 }, { "content": "\n\n ///Restarts interval\n\n pub fn restart(&mut self, ctx: &task::Context) {\n\n let interval = self.interval;\n\n self.timer.restart(interval, ctx.waker());\n\n }\n\n\n\n\n\n #[inline(always)]\n\n ///Gets mutable reference\n\n pub fn as_mut(&mut self) -> &mut Self {\n\n self\n\n }\n\n}\n\n\n\nimpl<T: Oneshot> Future for &'_ mut Interval<T> {\n\n type Output = ();\n\n\n\n fn poll(mut self: Pin<&mut Self>, ctx: &mut task::Context) -> task::Poll<Self::Output> {\n\n match Future::poll(Pin::new(&mut self.timer), ctx) {\n", "file_path": "src/interval.rs", "rank": 76, "score": 20.53951382105667 }, { "content": "\n\n fn is_ticking(&self) -> bool {\n\n match &self.state {\n\n State::Init(_) => false,\n\n State::Running(is_finished) => !*is_finished,\n\n }\n\n }\n\n\n\n fn is_expired(&self) -> bool {\n\n match &self.state {\n\n State::Init(_) => false,\n\n State::Running(is_finished) => *is_finished,\n\n }\n\n }\n\n\n\n fn cancel(&mut self) {\n\n self.fd.get_mut().unset();\n\n }\n\n\n\n fn restart(&mut self, new_value: time::Duration, _: &task::Waker) {\n", "file_path": "src/oneshot/kqueue.rs", "rank": 77, "score": 18.930915506894646 }, { "content": " match &self.state {\n\n State::Init(_) => false,\n\n State::Running(_, ref state) => !state.is_done(),\n\n }\n\n }\n\n\n\n fn is_expired(&self) -> bool {\n\n match &self.state {\n\n State::Init(_) => false,\n\n State::Running(_, ref state) => state.is_done(),\n\n }\n\n }\n\n\n\n fn cancel(&mut self) {\n\n match self.state {\n\n State::Init(_) => (),\n\n State::Running(fd, _) => unsafe {\n\n ffi::SetThreadpoolTimerEx(fd, ptr::null_mut(), 0, 0);\n\n ffi::WaitForThreadpoolTimerCallbacks(fd, 1);\n\n }\n", "file_path": "src/oneshot/win.rs", "rank": 78, "score": 18.70816727328838 }, { "content": " fn cancel(&mut self) {\n\n }\n\n\n\n fn restart(&mut self, _: time::Duration, _: &task::Waker) {\n\n }\n\n}\n\n\n\nimpl Future for NeverTimer {\n\n type Output = ();\n\n\n\n fn poll(self: Pin<&mut Self>, _: &mut task::Context) -> task::Poll<Self::Output> {\n\n task::Poll::Pending\n\n }\n\n}\n", "file_path": "src/oneshot/extra.rs", "rank": 79, "score": 18.09992253055003 }, { "content": "//! Timer based on `kqueue`\n\n\n\n#[cfg(feature = \"no_std\")]\n\ncore::compile_error!(\"no_std is not supported for kqueue implementation\");\n\n\n\nuse core::{task, time};\n\nuse core::pin::Pin;\n\nuse core::future::Future;\n\nuse crate::std::io;\n\n\n\nuse libc::{c_int};\n\n\n", "file_path": "src/oneshot/kqueue.rs", "rank": 80, "score": 17.787052663406513 }, { "content": " }\n\n\n\n fn cancel(&mut self) {\n\n unimplemented!();\n\n }\n\n\n\n fn restart(&mut self, _: time::Duration, _: &task::Waker) {\n\n unimplemented!();\n\n }\n\n}\n\n\n\nimpl Future for DummyTimer {\n\n type Output = ();\n\n\n\n fn poll(self: Pin<&mut Self>, _: &mut task::Context) -> task::Poll<Self::Output> {\n\n unimplemented!();\n\n }\n\n}\n\n\n", "file_path": "src/oneshot/dummy.rs", "rank": 81, "score": 17.609073681039032 }, { "content": "extern crate alloc;\n\n#[cfg(not(feature = \"no_std\"))]\n\nextern crate std;\n\n\n\nuse core::{time, future};\n\n\n\n#[macro_use]\n\nmod utils;\n\npub mod oneshot;\n\nmod timed;\n\nmod interval;\n\n\n\npub use oneshot::Oneshot;\n\npub use timed::{Timed, Expired};\n\npub use interval::Interval;\n\n\n\n///Run future in timed fashion using default Platform timer.\n", "file_path": "src/lib.rs", "rank": 82, "score": 16.97976498769541 }, { "content": "///async fn do_a_while() {\n\n/// let mut times: u8 = 0;\n\n/// let mut interval = async_timer::Interval::platform_new(core::time::Duration::from_secs(1));\n\n///\n\n/// while times < 5 {\n\n/// job().await;\n\n/// interval.as_mut().await;\n\n/// times += 1;\n\n/// }\n\n///}\n\n///```\n\n#[must_use = \"Interval does nothing unless polled\"]\n\npub struct Interval<T=PlatformTimer> {\n\n timer: T,\n\n ///Timer interval, change to this value will be reflected on next restart of timer.\n\n pub interval: time::Duration,\n\n}\n\n\n\nimpl Interval {\n\n #[inline(always)]\n", "file_path": "src/interval.rs", "rank": 83, "score": 16.9569494221675 }, { "content": " match &self.state {\n\n State::Init(_) => false,\n\n State::Running(_, ref state) => unsafe { !(**state).is_done() },\n\n }\n\n }\n\n\n\n fn is_expired(&self) -> bool {\n\n match &self.state {\n\n State::Init(_) => false,\n\n State::Running(_, ref state) => unsafe { (**state).is_done() },\n\n }\n\n }\n\n\n\n fn cancel(&mut self) {\n\n match &mut self.state {\n\n State::Init(_) => (),\n\n State::Running(ref mut fd, _) => {\n\n fd.clear();\n\n }\n\n }\n", "file_path": "src/oneshot/web.rs", "rank": 84, "score": 16.274017195638095 }, { "content": "impl TimerHandle {\n\n fn new(state: *mut TimerState) -> Self {\n\n let inner = unsafe {\n\n let queue = ffi::dispatch_get_global_queue(ffi::QOS_CLASS_DEFAULT, 0);\n\n ffi::dispatch_source_create(&ffi::_dispatch_source_type_timer as *const _ as ffi::dispatch_source_type_t, 0, 0, queue)\n\n };\n\n\n\n os_assert!(!inner.is_null());\n\n\n\n unsafe {\n\n ffi::dispatch_source_set_event_handler_f(inner, timer_handler);\n\n ffi::dispatch_set_context(inner, state as *mut _);\n\n }\n\n\n\n Self {\n\n inner,\n\n //Starts as suspended\n\n s_count: 1,\n\n }\n\n }\n", "file_path": "src/oneshot/apple.rs", "rank": 85, "score": 16.174067673197584 }, { "content": " }\n\n\n\n fn deregister(&self, poll: &mio::Poll) -> io::Result<()> {\n\n mio::unix::EventedFd(&self.0).deregister(poll)\n\n }\n\n}\n\n\n\nimpl Drop for RawTimer {\n\n fn drop(&mut self) {\n\n let _ = nix::unistd::close(self.0);\n\n }\n\n}\n\n\n", "file_path": "src/oneshot/kqueue.rs", "rank": 86, "score": 15.119527535115765 }, { "content": " task::Poll::Ready(()) => {\n\n self.restart(ctx);\n\n task::Poll::Ready(())\n\n },\n\n task::Poll::Pending => task::Poll::Pending,\n\n }\n\n }\n\n}\n\n\n\n#[cfg(feature = \"stream\")]\n\nimpl<T: Oneshot> futures_core::stream::Stream for Interval<T> {\n\n type Item = ();\n\n\n\n #[inline]\n\n fn poll_next(self: Pin<&mut Self>, ctx: &mut task::Context) -> task::Poll<Option<Self::Item>> {\n\n let mut this = self.get_mut();\n\n Future::poll(Pin::new(&mut this), ctx).map(|res| Some(res))\n\n }\n\n}\n", "file_path": "src/interval.rs", "rank": 87, "score": 14.994834511313185 }, { "content": " time /= 1_000;\n\n }\n\n if time > isize::max_value() as u128 {\n\n unit = FilterFlag::empty(); // default is milliseconds\n\n time /= 1_000;\n\n }\n\n if time > isize::max_value() as u128 {\n\n unit = FilterFlag::NOTE_SECONDS;\n\n time /= 1_000;\n\n }\n\n\n\n let time = time as isize;\n\n kevent(self.0, &[KEvent::new(1, EventFilter::EVFILT_TIMER, flags, unit, time, 0)], &mut [], 0).expect(\"To arm timer\");\n\n }\n\n\n\n fn unset(&self) {\n\n use nix::sys::event::*;\n\n\n\n let flags = EventFlag::EV_DELETE;\n\n kevent(self.0, &[KEvent::new(1, EventFilter::EVFILT_TIMER, flags, FilterFlag::empty(), 0, 0)], &mut [], 0).expect(\"To disarm timer\");\n", "file_path": "src/oneshot/kqueue.rs", "rank": 88, "score": 14.334754449431145 }, { "content": " match &self.state {\n\n State::Init(_) => false,\n\n State::Running(_, ref state) => !state.is_done(),\n\n }\n\n }\n\n\n\n fn is_expired(&self) -> bool {\n\n match &self.state {\n\n State::Init(_) => false,\n\n State::Running(_, ref state) => state.is_done(),\n\n }\n\n }\n\n\n\n fn cancel(&mut self) {\n\n match &mut self.state {\n\n State::Init(_) => (),\n\n State::Running(ref mut fd, _) => {\n\n fd.suspend();\n\n }\n\n }\n", "file_path": "src/oneshot/apple.rs", "rank": 89, "score": 13.855172779982187 }, { "content": "\n\n fn set_delay(&mut self, timeout: time::Duration) {\n\n self.suspend();\n\n\n\n unsafe {\n\n let start = ffi::dispatch_walltime(ptr::null(), timeout.as_nanos() as i64);\n\n ffi::dispatch_source_set_timer(self.inner, start, ffi::DISPATCH_TIME_FOREVER, 0);\n\n }\n\n\n\n self.resume();\n\n }\n\n}\n\n\n\n\n", "file_path": "src/oneshot/apple.rs", "rank": 90, "score": 13.400814637452232 }, { "content": " RUNTIME.call_once(init);\n\n\n\n Self {\n\n state: State::Init(timeout),\n\n }\n\n }\n\n\n\n fn is_ticking(&self) -> bool {\n\n match &self.state {\n\n State::Init(_) => false,\n\n State::Running(_, ref state) => !state.is_done(),\n\n }\n\n }\n\n\n\n fn is_expired(&self) -> bool {\n\n match &self.state {\n\n State::Init(_) => false,\n\n State::Running(_, ref state) => state.is_done(),\n\n }\n\n }\n", "file_path": "src/oneshot/posix.rs", "rank": 91, "score": 13.206282949558272 }, { "content": "//!Interval module\n\n\n\nuse core::future::Future;\n\nuse core::{task, time};\n\nuse core::pin::Pin;\n\n\n\nuse crate::oneshot::Oneshot;\n\nuse crate::oneshot::Timer as PlatformTimer;\n\n\n\n///Periodic Timer\n\n///\n\n///On each completition, underlying timer is restarted and therefore `Future` can be polled once\n\n///more.\n\n///\n\n///## Usage\n\n///\n\n///```rust, no_run\n\n///async fn job() {\n\n///}\n\n///\n", "file_path": "src/interval.rs", "rank": 92, "score": 11.373921491206916 }, { "content": "use async_timer::oneshot::{Oneshot, Timer};\n\n\n\nuse std::time;\n\n\n\n#[tokio::test]\n\nasync fn test_oneshot() {\n\n let work = Timer::new(time::Duration::from_secs(2));\n\n assert!(!work.is_expired());\n\n\n\n let before = time::SystemTime::now();\n\n work.await;\n\n let after = time::SystemTime::now();\n\n let diff = after.duration_since(before).unwrap();\n\n\n\n assert!(diff.as_millis() >= 1_500 && diff.as_millis() <= 2_500);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_tons_oneshot() {\n\n const NUM: usize = 1024;\n", "file_path": "tests/oneshot.rs", "rank": 93, "score": 11.166745081299755 }, { "content": "use async_timer::{Interval};\n\n\n\nuse std::time;\n\n\n\n#[tokio::test]\n\nasync fn test_interval() {\n\n let mut interval = Interval::platform_new(time::Duration::from_secs(1));\n\n\n\n let before = time::SystemTime::now();\n\n interval.as_mut().await;\n\n let after = time::SystemTime::now();\n\n let diff = after.duration_since(before).unwrap();\n\n\n\n assert!(diff.as_millis() >= 750 && diff.as_millis() <= 1_250);\n\n\n\n let before = time::SystemTime::now();\n\n interval.as_mut().await;\n\n let after = time::SystemTime::now();\n\n let diff = after.duration_since(before).unwrap();\n\n\n", "file_path": "tests/interval.rs", "rank": 94, "score": 10.90671530703342 }, { "content": " let mut jobs = Vec::with_capacity(NUM);\n\n\n\n for _ in 0..NUM {\n\n jobs.push(Timer::new(time::Duration::from_secs(2)));\n\n }\n\n\n\n let before = time::SystemTime::now();\n\n futures_util::future::join_all(jobs).await;\n\n let after = time::SystemTime::now();\n\n let diff = after.duration_since(before).unwrap();\n\n\n\n assert!(diff.as_millis() >= 1_500 && diff.as_millis() <= 2_500);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_smol_oneshot() {\n\n let work = Timer::new(time::Duration::from_millis(500));\n\n assert!(!work.is_expired());\n\n\n\n let before = time::SystemTime::now();\n", "file_path": "tests/oneshot.rs", "rank": 95, "score": 8.690019366851082 }, { "content": "//! Async timer lib\n\n//!\n\n//! ## Timers\n\n//!\n\n//! - [Oneshot](oneshot/trait.Oneshot.html) interface to one-shot [Timer](oneshot/type.Timer.html)\n\n//!\n\n//! ## Primitives\n\n//!\n\n//! - [Timed](enum.Timed.html) - A wrapper over future that allows to limit time for the future to resolve.\n\n//! - [Interval](struct.Interval.html) - Periodic timer, that on each completition returns itself to poll once again with the same interval.\n\n//!\n\n//! ## Features\n\n//!\n\n//! - `tokio_on` - Enables implementations that require platform's event loop\n\n#![warn(missing_docs)]\n\n\n\n#![no_std]\n\n#![cfg_attr(feature = \"cargo-clippy\", allow(clippy::style))]\n\n\n\n#[allow(unused_imports)]\n", "file_path": "src/lib.rs", "rank": 96, "score": 8.337117749119688 }, { "content": " assert!(diff.as_millis() >= 750 && diff.as_millis() <= 1_250);\n\n}\n\n\n\n#[cfg(feature = \"stream\")]\n\n#[tokio::test]\n\nasync fn test_stream_interval() {\n\n use futures_util::stream::StreamExt;\n\n\n\n let mut interval = Interval::platform_new(time::Duration::from_secs(1));\n\n\n\n let before = time::SystemTime::now();\n\n interval.next().await;\n\n let after = time::SystemTime::now();\n\n let diff = after.duration_since(before).unwrap();\n\n\n\n assert!(diff.as_millis() >= 750 && diff.as_millis() <= 1_250);\n\n\n\n let before = time::SystemTime::now();\n\n interval.next().await;\n\n let after = time::SystemTime::now();\n\n let diff = after.duration_since(before).unwrap();\n\n\n\n assert!(diff.as_millis() >= 750 && diff.as_millis() <= 1_250);\n\n}\n", "file_path": "tests/interval.rs", "rank": 97, "score": 7.327049700731471 }, { "content": " work.await;\n\n let after = time::SystemTime::now();\n\n let diff = after.duration_since(before).unwrap();\n\n\n\n assert!(diff.as_millis() >= 250 && diff.as_millis() <= 750);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_nano_oneshot() {\n\n let work = Timer::new(time::Duration::from_nanos(604000));\n\n assert!(!work.is_expired());\n\n work.await;\n\n}\n", "file_path": "tests/oneshot.rs", "rank": 98, "score": 6.351122057623885 }, { "content": "\n\n fn suspend(&mut self) {\n\n if self.s_count == 0 {\n\n unsafe {\n\n ffi::dispatch_suspend(self.inner);\n\n }\n\n\n\n self.s_count += 1;\n\n }\n\n }\n\n\n\n fn resume(&mut self) {\n\n while self.s_count > 0 {\n\n unsafe {\n\n ffi::dispatch_resume(self.inner)\n\n }\n\n\n\n self.s_count -= 1;\n\n }\n\n }\n", "file_path": "src/oneshot/apple.rs", "rank": 99, "score": 5.739277056575833 } ]
Rust
src/ingester/ingester.rs
pathivu/pathivu
09ac381630dceb578b8abc39a67030c521e404ce
/* * Copyright 2019 Balaji Jinnah and Contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use crate::config::config::Config; use crate::partition::segment_writer::SegmentWriter; use crate::store::store::Store; use crate::types::types::*; use futures::channel::mpsc::{Receiver, Sender}; use futures::executor::block_on; use futures::sink::SinkExt; use futures::stream::StreamExt; use log::{debug, info, warn}; use retain_mut::RetainMut; use rmp_serde::Deserializer; use serde::Deserialize; use std::collections::HashMap; use tonic::Status; pub struct Ingester<S: Store> { receiver: Receiver<IngesterRequest>, id: u8, segment_writers: HashMap<String, SegmentWriter<S>>, cfg: Config, store: S, tailers: HashMap<String, Vec<Sender<Result<api::QueryResponse, Status>>>>, } impl<S: Store + Clone> Ingester<S> { pub fn new(receiver: Receiver<IngesterRequest>, cfg: Config, store: S) -> Ingester<S> { Ingester { receiver: receiver, id: 0, segment_writers: HashMap::new(), cfg: cfg, store: store, tailers: HashMap::default(), } } pub fn start(&mut self) { info!("ingester {} started", self.id); loop { let ingester_request = block_on(async { self.receiver.next().await }); info!("received yo"); if !ingester_request.is_some() { continue; } let ingester_request = ingester_request.unwrap(); match ingester_request { IngesterRequest::Push(req) => { self.handle_tailers(&req); let result = self.push(req.push_request); info!(" result {:?}", result); match req.complete_signal.send(result) { Err(e) => { warn!( "unable to complete the signal for the ingester {}: {:?}", self.id, e ); } _ => {} } } IngesterRequest::Flush(hint) => { let result = self.flush_if_necessary(hint.app, hint.start_ts, hint.end_ts); match hint.complete_signal.send(result) { Err(e) => warn!( "unable to send complete signal for ingester necessary flush {:?}", e ), _ => debug!("ingester necessary flush signal sent successfully"), } } IngesterRequest::RegisterTailer(req) => { self.register_tailer(req); } } } } fn flush_if_necessary( &mut self, partition: String, start_ts: u64, end_ts: u64, ) -> Result<(), failure::Error> { if let Some(writer) = self.segment_writers.get_mut(&partition) { let (segment_start_ts, segment_end_ts) = writer.segment_ts(); if (segment_start_ts >= start_ts && segment_start_ts <= end_ts) || (segment_end_ts >= start_ts && segment_end_ts <= start_ts) || (start_ts == 0 && end_ts == 0) { let segment_writer = self.segment_writers.remove(&partition).unwrap(); segment_writer.close()?; debug!("flushing writer {} for hint", partition); } } Ok(()) } fn push(&mut self, req: api::PushRequest) -> Result<(), failure::Error> { if req.lines.len() == 0 { return Ok(()); } debug!( "ingesting partition {}, with {} lines", req.source, req.lines.len() ); let ref mut segment_writer: SegmentWriter<S>; if let Some(writer) = self.segment_writers.get_mut(&req.source) { segment_writer = writer; info!("writer is thre"); } else { info!("writer not there"); let writer = self.create_segment_writer(&req.source, req.lines[0].ts)?; info!("inserting yo"); self.segment_writers.insert(req.source.clone(), writer); segment_writer = self.segment_writers.get_mut(&req.source).unwrap(); } segment_writer.push(req.lines)?; if self.cfg.max_segment_size <= segment_writer.size() { let segment_writer = self.segment_writers.remove(&req.source).unwrap(); segment_writer.close()?; } info!("segment writers {:}", &self.segment_writers.len()); Ok(()) } fn create_segment_writer( &self, partition: &String, start_ts: u64, ) -> Result<SegmentWriter<S>, failure::Error> { let segment_id: u64; let partition_registry = self .store .get(format!("{}_{}", PARTITION_PREFIX, partition).as_bytes())?; match partition_registry { Some(registry) => { let mut buf = Deserializer::new(&registry[..]); let registry: PartitionRegistry = Deserialize::deserialize(&mut buf)?; segment_id = registry.last_assigned + 1; } None => segment_id = 1, } SegmentWriter::new( self.cfg.clone(), partition.clone(), segment_id, self.store.clone(), start_ts, ) } fn register_tailer(&mut self, mut req: TailerRequest) { let mut push_tailer = |key: String, tailer: Sender<Result<api::QueryResponse, Status>>| match self .tailers .get_mut(&key) { Some(tailers) => tailers.push(tailer), None => { self.tailers.insert(key, vec![tailer]); } }; if req.partitions.len() == 0 { push_tailer(String::from("*"), req.sender); return; } for partition in req.partitions.drain(..) { push_tailer(partition, req.sender.clone()); } } fn handle_tailers(&mut self, req: &IngesterPush) { let send_logs = |tailers: Option<&mut Vec<Sender<Result<api::QueryResponse, Status>>>>| { if let Some(tailers) = tailers { tailers.retain_mut(|tailer| { let mut lines = Vec::new(); for log_line in req.push_request.lines.iter() { lines.push(api::LogLine { app: req.push_request.source.clone(), inner: String::from_utf8(log_line.raw_data.clone()).unwrap(), ts: log_line.ts, structured: log_line.structured, }); } match block_on(async { tailer .send(Ok(api::QueryResponse { lines: lines, json: String::from(""), })) .await }) { Ok(_) => true, Err(_) => { println!("removing tailer"); return false; } } }); } }; let tailers = self.tailers.get_mut("*"); send_logs(tailers); let tailers = self.tailers.get_mut(&req.push_request.source); send_logs(tailers); } }
/* * Copyright 2019 Balaji Jinnah and Contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use crate::config::config::Config; use crate::partition::segment_writer::SegmentWriter; use crate::store::store::Store; use crate::types::types::*; use futures::channel::mpsc::{Receiver, Sender}; use futures::executor::block_on; use futures::sink::SinkExt; use futures::stream::StreamExt; use log::{debug, info, warn}; use retain_mut::RetainMut; use rmp_serde::Deserializer; use serde::Deserialize; use std::collections::HashMap; use tonic::Status; pub struct Ingester<S: Store> { receiver: Receiver<IngesterRequest>, id: u8, segment_writers: HashMap<String, SegmentWriter<S>>, cfg: Config, store: S, tailers: HashMap<String, Vec<Sender<Result<api::QueryResponse, Status>>>>, } impl<S: Store + Clone> Ingester<S> {
pub fn start(&mut self) { info!("ingester {} started", self.id); loop { let ingester_request = block_on(async { self.receiver.next().await }); info!("received yo"); if !ingester_request.is_some() { continue; } let ingester_request = ingester_request.unwrap(); match ingester_request { IngesterRequest::Push(req) => { self.handle_tailers(&req); let result = self.push(req.push_request); info!(" result {:?}", result); match req.complete_signal.send(result) { Err(e) => { warn!( "unable to complete the signal for the ingester {}: {:?}", self.id, e ); } _ => {} } } IngesterRequest::Flush(hint) => { let result = self.flush_if_necessary(hint.app, hint.start_ts, hint.end_ts); match hint.complete_signal.send(result) { Err(e) => warn!( "unable to send complete signal for ingester necessary flush {:?}", e ), _ => debug!("ingester necessary flush signal sent successfully"), } } IngesterRequest::RegisterTailer(req) => { self.register_tailer(req); } } } } fn flush_if_necessary( &mut self, partition: String, start_ts: u64, end_ts: u64, ) -> Result<(), failure::Error> { if let Some(writer) = self.segment_writers.get_mut(&partition) { let (segment_start_ts, segment_end_ts) = writer.segment_ts(); if (segment_start_ts >= start_ts && segment_start_ts <= end_ts) || (segment_end_ts >= start_ts && segment_end_ts <= start_ts) || (start_ts == 0 && end_ts == 0) { let segment_writer = self.segment_writers.remove(&partition).unwrap(); segment_writer.close()?; debug!("flushing writer {} for hint", partition); } } Ok(()) } fn push(&mut self, req: api::PushRequest) -> Result<(), failure::Error> { if req.lines.len() == 0 { return Ok(()); } debug!( "ingesting partition {}, with {} lines", req.source, req.lines.len() ); let ref mut segment_writer: SegmentWriter<S>; if let Some(writer) = self.segment_writers.get_mut(&req.source) { segment_writer = writer; info!("writer is thre"); } else { info!("writer not there"); let writer = self.create_segment_writer(&req.source, req.lines[0].ts)?; info!("inserting yo"); self.segment_writers.insert(req.source.clone(), writer); segment_writer = self.segment_writers.get_mut(&req.source).unwrap(); } segment_writer.push(req.lines)?; if self.cfg.max_segment_size <= segment_writer.size() { let segment_writer = self.segment_writers.remove(&req.source).unwrap(); segment_writer.close()?; } info!("segment writers {:}", &self.segment_writers.len()); Ok(()) } fn create_segment_writer( &self, partition: &String, start_ts: u64, ) -> Result<SegmentWriter<S>, failure::Error> { let segment_id: u64; let partition_registry = self .store .get(format!("{}_{}", PARTITION_PREFIX, partition).as_bytes())?; match partition_registry { Some(registry) => { let mut buf = Deserializer::new(&registry[..]); let registry: PartitionRegistry = Deserialize::deserialize(&mut buf)?; segment_id = registry.last_assigned + 1; } None => segment_id = 1, } SegmentWriter::new( self.cfg.clone(), partition.clone(), segment_id, self.store.clone(), start_ts, ) } fn register_tailer(&mut self, mut req: TailerRequest) { let mut push_tailer = |key: String, tailer: Sender<Result<api::QueryResponse, Status>>| match self .tailers .get_mut(&key) { Some(tailers) => tailers.push(tailer), None => { self.tailers.insert(key, vec![tailer]); } }; if req.partitions.len() == 0 { push_tailer(String::from("*"), req.sender); return; } for partition in req.partitions.drain(..) { push_tailer(partition, req.sender.clone()); } } fn handle_tailers(&mut self, req: &IngesterPush) { let send_logs = |tailers: Option<&mut Vec<Sender<Result<api::QueryResponse, Status>>>>| { if let Some(tailers) = tailers { tailers.retain_mut(|tailer| { let mut lines = Vec::new(); for log_line in req.push_request.lines.iter() { lines.push(api::LogLine { app: req.push_request.source.clone(), inner: String::from_utf8(log_line.raw_data.clone()).unwrap(), ts: log_line.ts, structured: log_line.structured, }); } match block_on(async { tailer .send(Ok(api::QueryResponse { lines: lines, json: String::from(""), })) .await }) { Ok(_) => true, Err(_) => { println!("removing tailer"); return false; } } }); } }; let tailers = self.tailers.get_mut("*"); send_logs(tailers); let tailers = self.tailers.get_mut(&req.push_request.source); send_logs(tailers); } }
pub fn new(receiver: Receiver<IngesterRequest>, cfg: Config, store: S) -> Ingester<S> { Ingester { receiver: receiver, id: 0, segment_writers: HashMap::new(), cfg: cfg, store: store, tailers: HashMap::default(), } }
function_block-full_function
[ { "content": "pub trait Store {\n\n fn merge(&mut self, key: &[u8], value: Vec<u8>);\n\n fn set(&mut self, key: &[u8], value: Vec<u8>);\n\n fn flush(&mut self) -> Result<usize, failure::Error>;\n\n fn get(&self, key: &[u8]) -> Result<Option<Vec<u8>>, failure::Error>;\n\n fn flush_batch(&self, wb: Batch) -> Result<(), failure::Error>;\n\n}\n", "file_path": "src/store/store.rs", "rank": 0, "score": 123372.22041237369 }, { "content": "/// decode_u64 is used to decode the buf.\n\npub fn decode_u64(buf: &[u8]) -> u64 {\n\n let mut reader = Cursor::new(buf);\n\n // let it panic, If any invalid data.\n\n reader.read_u64::<LittleEndian>().unwrap()\n\n}\n", "file_path": "src/util/mod.rs", "rank": 1, "score": 81993.25314103506 }, { "content": "pub fn decode_entry(line_buf: &[u8]) -> Entry {\n\n // first 8 bytes are timestamp.\n\n let ts = decode_u64(&line_buf[..8]);\n\n Entry {\n\n ts: ts,\n\n structured: line_buf[8],\n\n line: line_buf[9..].to_vec(),\n\n }\n\n}\n", "file_path": "src/partition/segment_iterator.rs", "rank": 2, "score": 77800.63687775849 }, { "content": "/// decode_posting_list is used to decode the given buf to the slices of u64.\n\n/// This can be further optimized for the future by groupvarint compressing.\n\npub fn decode_posting_list(list: &[u8]) -> Result<Vec<u64>, failure::Error> {\n\n // posting list is byte of u64.\n\n assert_eq!(list.len() % 8, 0);\n\n let mut reader = Cursor::new(list);\n\n let mut posting_list = Vec::new();\n\n loop {\n\n match reader.read_u64::<LittleEndian>() {\n\n Ok(val) => {\n\n posting_list.push(val);\n\n }\n\n _ => {\n\n // Should ideally throw an error.\n\n break;\n\n }\n\n }\n\n }\n\n Ok(posting_list)\n\n}\n", "file_path": "src/partition/posting_list.rs", "rank": 3, "score": 62786.19503220575 }, { "content": "#[derive(Clone)]\n\nstruct CorsHandler {}\n\n\n\nimpl Handler for CorsHandler {\n\n fn handle(self, state: State) -> Box<HandlerFuture> {\n\n let mut res = create_response(\n\n &state,\n\n StatusCode::OK,\n\n mime::APPLICATION_JSON,\n\n String::from(\"\"),\n\n );\n\n let header = res.headers_mut();\n\n header.insert(\"Access-Control-Allow-Origin\", \"*\".parse().unwrap());\n\n header.insert(\"Access-Control-Allow-Methods\", \"POST\".parse().unwrap());\n\n header.insert(\n\n \"Access-Control-Allow-Headers\",\n\n \"Content-Type\".parse().unwrap(),\n\n );\n\n return Box::new(oldfuture::future::ok((state, res)));\n\n }\n\n}\n", "file_path": "src/server/server.rs", "rank": 4, "score": 58356.57875275222 }, { "content": "#[derive(Clone)]\n\nstruct HelloHandler {}\n\nimpl Handler for HelloHandler {\n\n fn handle(self, state: State) -> Box<HandlerFuture> {\n\n let res = format!(\"Hi from chola\").into_response(&state);\n\n Box::new(oldfuture::future::ok((state, res)))\n\n }\n\n}\n\n\n\nimpl NewHandler for HelloHandler {\n\n type Instance = Self;\n\n\n\n fn new_handler(&self) -> GothamResult<Self::Instance> {\n\n Ok(self.clone())\n\n }\n\n}\n\n\n", "file_path": "src/server/server.rs", "rank": 5, "score": 58356.57875275222 }, { "content": "#[derive(Clone)]\n\nstruct PushHandler {\n\n manager: Manager,\n\n}\n\nimpl NewHandler for PushHandler {\n\n type Instance = Self;\n\n\n\n fn new_handler(&self) -> GothamResult<Self::Instance> {\n\n Ok(self.clone())\n\n }\n\n}\n\nimpl Handler for PushHandler {\n\n fn handle(self, mut state: State) -> Box<HandlerFuture> {\n\n let mut manager = self.manager.clone();\n\n let fut = Body::take_from(&mut state)\n\n .concat2()\n\n .then(move |body| match body {\n\n Ok(body) => {\n\n let result = serde_json::from_slice::<PushRequest>(&body.to_vec());\n\n match result {\n\n Ok(req) => {\n", "file_path": "src/server/server.rs", "rank": 6, "score": 58356.57875275222 }, { "content": "#[derive(Parser)]\n\n#[grammar = \"parser/query.pest\"]\n\nstruct QueryParser;\n\n\n\n/// default log line limit.\n\nconst DEFAULT_LIMIT: u64 = 10000;\n\n\n\n/// default distance to fuzzy search.\n\nconst DEFAULT_DISTANCE: u32 = 2;\n\n\n\n/// Selection hold the selection statement.\n\n#[derive(Default, Debug, Clone)]\n\npub struct Selection {\n\n pub structured: bool,\n\n pub attr: Option<String>,\n\n pub value: String,\n\n}\n\n\n\n/// Count will tell which attribute that should be counted\n\n/// on\n\n#[derive(Default, Debug)]\n\npub struct Count {\n", "file_path": "src/parser/parser.rs", "rank": 7, "score": 58352.45048916231 }, { "content": "struct QueryHandler {\n\n executor: QueryExecutor<rocks_store::RocksStore>,\n\n}\n\nimpl QueryHandler {\n\n fn execute(&mut self, req: QueryRequest) -> Result<String, failure::Error> {\n\n self.executor\n\n .execute(req.query, req.start_ts, req.end_ts, req.forward, req.count)\n\n }\n\n}\n\n\n\nimpl Handler for QueryHandler {\n\n fn handle(self, mut state: State) -> Box<HandlerFuture> {\n\n let mut executor = self.clone();\n\n let fut = Body::take_from(&mut state)\n\n .concat2()\n\n .then(move |body| match body {\n\n Ok(body) => {\n\n let result = serde_json::from_slice::<QueryRequest>(&body.to_vec());\n\n match result {\n\n Ok(req) => match executor.execute(req) {\n", "file_path": "src/server/server.rs", "rank": 8, "score": 58352.45048916231 }, { "content": "/// get_value_from_json is used to get value of the given json from the flattened key.\n\npub fn get_value_from_json(key: String, json: &mut [u8]) -> Result<Option<Value>, Error> {\n\n //TODO: this function should split into two, where we give the simd_json object becacuse\n\n // no need to keep parsing for when we use operators like AND or NOT.\n\n // Now we have to parse the json in order to get the value from the flattend\n\n // json key.\n\n let mut json: simd_json::BorrowedValue = simd_json::to_borrowed_value(json)?;\n\n // Now recursively find value of the given flattened key.\n\n let mut path: VecDeque<&str> = key.split(\".\").collect();\n\n let mut key = path.pop_front().unwrap();\n\n 'outer: loop {\n\n match json {\n\n Value::Object(mut obj) => {\n\n // I'm using this variable to capture the reference of the combined key. If you have\n\n // better idea. Please do it.\n\n let mut holder = String::from(\"\");\n\n 'inner: loop {\n\n if let Some(inner) = obj.remove(key) {\n\n json = inner;\n\n if path.len() == 0 {\n\n // We're at the end of the traversal. So return here.\n", "file_path": "src/json_parser/parser.rs", "rank": 9, "score": 57631.965756887264 }, { "content": "struct PathivuGrpcServer {\n\n ingester_manager: Manager,\n\n query_executor: QueryExecutor<rocks_store::RocksStore>,\n\n partition_path: PathBuf,\n\n}\n\n\n\n#[tonic::async_trait]\n\nimpl api::server::Pathivu for PathivuGrpcServer {\n\n type TailStream = mpsc::Receiver<Result<api::QueryResponse, Status>>;\n\n\n\n async fn tail(\n\n &self,\n\n req: Request<api::QueryRequest>,\n\n ) -> Result<TonicResponse<Self::TailStream>, Status> {\n\n let (tx, rx) = mpsc::channel(100);\n\n let mut req = req.into_inner();\n\n if req.partitions.len() == 0 {\n\n let partitions = get_partitions(&self.partition_path);\n\n match partitions {\n\n Ok(partitions) => req.partitions = partitions,\n", "file_path": "src/server/server.rs", "rank": 10, "score": 56824.5131420419 }, { "content": "#[derive(Serialize)]\n\nstruct Event<'a> {\n\n time: u64,\n\n user_id: &'a str,\n\n platform: &'a str,\n\n event_type: &'a str,\n\n}\n\n\n", "file_path": "src/telementry/telementry.rs", "rank": 11, "score": 55324.25348426562 }, { "content": "#[derive(Serialize)]\n\nstruct TelementryRequest<'a> {\n\n api_key: &'a str,\n\n events: Vec<Event<'a>>,\n\n}\n\n\n\n/// TelementryJob send telementry to the amplitude.\n\npub struct TelementryJob {}\n\n\n\nimpl CronJob for TelementryJob {\n\n fn execute(&mut self) {\n\n let username = whoami::username();\n\n let hostname = whoami::hostname();\n\n let platform = whoami::platform().to_string();\n\n let user_id = format!(\"{}_{}_{}\", username, hostname, platform);\n\n let time = SystemTime::now()\n\n .duration_since(SystemTime::UNIX_EPOCH)\n\n .unwrap();\n\n let time = time.as_secs();\n\n let event_type = \"alive_tick\";\n\n let event = Event {\n", "file_path": "src/telementry/telementry.rs", "rank": 12, "score": 53644.308245699125 }, { "content": "pub trait Iterator {\n\n fn entry(&self) -> Option<Rc<Entry>>;\n\n fn next(&mut self) -> Option<()>;\n\n}\n", "file_path": "src/partition/iterator.rs", "rank": 13, "score": 53554.960437945556 }, { "content": "/// flatten_json flatten the given json into key value pair.\n\npub fn flatten_json(buf: &mut Vec<u8>) -> Result<HashMap<String, Vec<String>>, failure::Error> {\n\n let mut result: HashMap<String, Vec<String>> = HashMap::new();\n\n let json: simd_json::BorrowedValue = simd_json::to_borrowed_value(buf)?;\n\n match json {\n\n Value::Object(mut obj) => {\n\n for (key, value) in obj.drain() {\n\n deep_flaten_json(key.into_owned(), value, &mut result);\n\n }\n\n }\n\n _ => panic!(\"Invalid json object traversal.\"),\n\n }\n\n Ok(result)\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use super::*;\n\n use simd_json::value::borrowed::Value;\n\n use simd_json::value::tape::StaticNode;\n\n #[test]\n", "file_path": "src/json_parser/parser.rs", "rank": 14, "score": 53377.808191049706 }, { "content": "/// The CronJob trait allows to execute the cron job.\n\npub trait CronJob {\n\n fn execute(&mut self);\n\n}\n\n\n\n/// The CronScheduler allows to execute job for given interval of time.\n\npub struct CronScheduler {\n\n jobs: Vec<Box<dyn CronJob + Send>>,\n\n interval: Duration,\n\n}\n\n\n\nimpl CronScheduler {\n\n /// new gives CrobScheduler by taking jobs and duration as an input.\n\n pub fn new(jobs: Vec<Box<dyn CronJob + Send>>, interval: Duration) -> CronScheduler {\n\n CronScheduler {\n\n jobs: jobs,\n\n interval: interval,\n\n }\n\n }\n\n\n\n /// start will execute the given job for given interval.\n\n pub fn start(mut self) {\n\n thread::spawn(move || loop {\n\n for job in &mut self.jobs {\n\n job.execute();\n\n }\n\n thread::sleep(self.interval.clone());\n\n });\n\n }\n\n}\n", "file_path": "src/cronscheduler/cron_scheduler.rs", "rank": 15, "score": 50635.13103806575 }, { "content": " def write(chunk)\n\n if @@client == nil\n\n @@client = Api::Pathivu::Stub.new(url, :this_channel_is_insecure)\n\n end\n\n\n\n partitions = {}\n\n chunk.each do |time, record|\n\n unless record.is_a?(Hash)\n\n @log.warn 'Dropping log entries with malformed record: ' \\\n\n \"'#{record.inspect}' from tag '#{tag}' at '#{time}'. \" \\\n\n 'A log record should be in JSON format.'\n\n next\n\n end\n\n flattened_hash = self.flatten_hash(record)\n\n indexes = self.build_indexes(flattened_hash)\n\n line = PushLogLine::new(ts: Time.at(time.to_f).to_i, indexes: indexes, structured: true, json_keys: flattened_hash.keys, raw_data: Yajl.dump(record))\n\n if !partitions.key?(record[\"kubernetes\"][\"pod_name\"])\n\n partitions[record[\"kubernetes\"][\"pod_name\"]] = []\n\n end\n\n partitions[record[\"kubernetes\"][\"pod_name\"]].push(line)\n\n end\n\n #post each parition\n\n partitions.each do|partition, lines|\n\n req = PushRequest::new(source: partition, lines: lines)\n\n @@client.push(req)\n\n end\n\n end\n\n end\n\nend\n", "file_path": "fluentd-plugin/debian-chola/plugins/out_chola.rb", "rank": 16, "score": 49540.90966967266 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\nuse std::path::PathBuf;\n\n/// Config for pathivu.\n\n#[derive(Clone, Debug)]\n\npub struct Config {\n\n pub dir: PathBuf,\n\n pub max_segment_size: u64,\n\n pub max_batch_size: u8,\n\n pub max_index_size: usize,\n\n pub retention_period: u64,\n\n}\n", "file_path": "src/config/config.rs", "rank": 17, "score": 47730.101560283525 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\nuse crate::store::batch::Batch;\n", "file_path": "src/store/store.rs", "rank": 18, "score": 47613.301889195536 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\nuse crate::config::config::Config;\n\nuse crate::store::batch::Batch as StoreBatch;\n\nuse crate::store::store::Store;\n\nuse failure;\n\nuse failure::format_err;\n", "file_path": "src/store/rocks_store.rs", "rank": 31, "score": 46074.43975898967 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n// NEED to look into it. why it is slow.\n\n// use crate::config::config::Config;\n\n// use crate::store::batch::Batch as StoreBatch;\n\n// use crate::store::store::Store;\n\n// use failure::{bail, Error};\n", "file_path": "src/store/sled_store.rs", "rank": 32, "score": 46073.75277833446 }, { "content": "use rocksdb::{Writable, WriteBatch, DB};\n\nuse std::sync::Arc;\n\npub struct RocksStore {\n\n db: Arc<DB>,\n\n}\n\n\n\nimpl RocksStore {\n\n pub fn new(cfg: Config) -> Result<RocksStore, failure::Error> {\n\n let db = DB::open_default(&cfg.dir.join(\"store\").to_str().unwrap()).unwrap();\n\n Ok(RocksStore { db: Arc::new(db) })\n\n }\n\n}\n\n\n\nimpl Store for RocksStore {\n\n fn merge(&mut self, key: &[u8], value: Vec<u8>) {\n\n self.db.merge(key, &value).unwrap();\n\n }\n\n\n\n fn flush(&mut self) -> Result<usize, failure::Error> {\n\n match self.db.flush(false) {\n", "file_path": "src/store/rocks_store.rs", "rank": 33, "score": 46017.15351583413 }, { "content": "// }\n\n\n\n// impl SledStore {\n\n// pub fn new(cfg: Config) -> Result<SledStore, Error> {\n\n// let config = ConfigBuilder::default()\n\n// .path(cfg.dir)\n\n// .cache_capacity(10_000_000_000)\n\n// .flush_every_ms(Some(1000))\n\n// .snapshot_after_ops(100_000)\n\n// .build();\n\n// let db = Db::start(config)?;\n\n// db.set_merge_operator(concatenate_merge);\n\n// Ok(SledStore { db: db })\n\n// }\n\n// }\n\n\n\n// impl Store for SledStore {\n\n// fn merge(&mut self, key: &[u8], value: Vec<u8>) {\n\n// self.db.merge(key, value);\n\n// }\n", "file_path": "src/store/sled_store.rs", "rank": 34, "score": 46014.267592473785 }, { "content": "// use log::{debug, info, warn};\n\n// use sled::{Batch, ConfigBuilder, Db};\n\n\n\n// /// concatenate_merge will merge the previous value with the give value.\n\n// // copied from sled documentation.\n\n// fn concatenate_merge(\n\n// _key: &[u8], // the key being merged\n\n// old_value: Option<&[u8]>, // the previous value, if one existed\n\n// merged_bytes: &[u8], // the new bytes being merged in\n\n// ) -> Option<Vec<u8>> {\n\n// // set the new value, return None to delete\n\n// let mut ret = old_value.map(|ov| ov.to_vec()).unwrap_or_else(|| vec![]);\n\n\n\n// ret.extend_from_slice(merged_bytes);\n\n\n\n// Some(ret)\n\n// }\n\n\n\n// pub struct SledStore {\n\n// db: Db,\n", "file_path": "src/store/sled_store.rs", "rank": 35, "score": 46012.9277698554 }, { "content": " let rwb = WriteBatch::with_capacity(inner.len());\n\n for (key, value) in inner.drain(0..inner.len()) {\n\n rwb.put(&key[..], &value[..]).unwrap();\n\n }\n\n self.db.write(&rwb).unwrap();\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Clone for RocksStore {\n\n fn clone(&self) -> RocksStore {\n\n RocksStore {\n\n db: self.db.clone(),\n\n }\n\n }\n\n}\n", "file_path": "src/store/rocks_store.rs", "rank": 36, "score": 46008.3591125614 }, { "content": "// fn flush_batch(&self, wb: Batch) -> Result<(), failure::Error> {\n\n// panic!(\"yet to implement batch for sled\");\n\n// Ok(())\n\n// }\n\n// }\n\n\n\n// impl Clone for SledStore {\n\n// fn clone(&self) -> SledStore {\n\n// SledStore {\n\n// db: self.db.clone(),\n\n// }\n\n// }\n\n// }\n", "file_path": "src/store/sled_store.rs", "rank": 37, "score": 46005.24214080558 }, { "content": "\n\n// fn flush(&mut self) -> Result<usize, Error> {\n\n// match self.db.flush() {\n\n// Ok(size) => Ok(size),\n\n// Err(e) => bail!(\"{}\", e),\n\n// }\n\n// }\n\n\n\n// fn set(&mut self, key: &[u8], value: Vec<u8>) {\n\n// self.db.insert(key, value);\n\n// }\n\n\n\n// fn get(&self, key: &[u8]) -> Result<Option<Vec<u8>>, failure::Error> {\n\n// let res = self.db.get(key)?;\n\n// info!(\"{:?}\", res);\n\n// match res {\n\n// Some(value) => Ok(Some(value.to_vec())),\n\n// None => Ok(None),\n\n// }\n\n// }\n", "file_path": "src/store/sled_store.rs", "rank": 38, "score": 46000.25951848468 }, { "content": " Ok(_) => Ok(0),\n\n Err(e) => Err(format_err!(\"{}\", e)),\n\n }\n\n }\n\n\n\n fn set(&mut self, key: &[u8], value: Vec<u8>) {\n\n self.db.put(key, &value).unwrap();\n\n }\n\n\n\n fn get(&self, key: &[u8]) -> Result<Option<Vec<u8>>, failure::Error> {\n\n match self.db.get(key) {\n\n Ok(res) => match res {\n\n Some(val) => Ok(Some(val.to_vec())),\n\n None => Ok(None),\n\n },\n\n Err(e) => Err(format_err!(\"{}\", e)),\n\n }\n\n }\n\n fn flush_batch(&self, wb: StoreBatch) -> Result<(), failure::Error> {\n\n let mut inner = wb.inner();\n", "file_path": "src/store/rocks_store.rs", "rank": 39, "score": 45999.81458352882 }, { "content": "/// convert_string_to_f32 is used to convert string into f32.\n\npub fn convert_string_to_f32(val: String) -> Option<f32> {\n\n if let Ok(val) = val.parse::<f32>() {\n\n return Some(val);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/util/util.rs", "rank": 40, "score": 37491.39581643797 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\npub mod config;\n", "file_path": "src/config/mod.rs", "rank": 41, "score": 37444.37722135233 }, { "content": "/// parse will parse the given query string into internal\n\n/// Pathivu query structure.\n\npub fn parse(query: String) -> Result<Query, Error> {\n\n let mut query_inner = Query::default();\n\n // Don't parse empty string\n\n if query == \"\" {\n\n return Ok(query_inner);\n\n }\n\n let mut result = QueryParser::parse(Rule::query, &query)?;\n\n let tokens = result.next().unwrap();\n\n parse_query(tokens, &mut query_inner)?;\n\n if query_inner.is_aggregation_exist() && query_inner.limit != 0 {\n\n return Err(format_err!(\n\n \"Limit is not supported with aggregation. At least for now\"\n\n ));\n\n }\n\n\n\n // If there is no limit, update the default limit.\n\n if query_inner.limit == 0 {\n\n query_inner.limit = DEFAULT_LIMIT;\n\n }\n\n\n\n if query_inner.distance == 0 {\n\n // set the default distance if there is no distance specified.\n\n query_inner.distance = DEFAULT_DISTANCE;\n\n }\n\n Ok(query_inner)\n\n}\n\n\n", "file_path": "src/parser/parser.rs", "rank": 42, "score": 37395.3874584319 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\npub mod batch;\n\npub mod rocks_store;\n\npub mod sled_store;\n\npub mod store;\n", "file_path": "src/store/mod.rs", "rank": 43, "score": 37343.69363314967 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\nuse failure;\n\n/// Batch is the used to batch all the entries and flush it to the store.\n\n// pub trait Batch<T> {\n\n// fn set(&mut self, key: Vec<u8>, value: Vec<u8>) -> Result<(), failure::Error>;\n\n// fn inner(self) -> T;\n", "file_path": "src/store/batch.rs", "rank": 44, "score": 37338.96857013384 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\npub mod ingester;\n\npub mod manager;\n", "file_path": "src/ingester/mod.rs", "rank": 45, "score": 37306.05423908481 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse crate::config::config::Config;\n\nuse crate::ingester::ingester::Ingester;\n\nuse crate::store::rocks_store::RocksStore;\n\nuse crate::types::types::api::QueryResponse;\n", "file_path": "src/ingester/manager.rs", "rank": 46, "score": 37304.32869281161 }, { "content": "// }\n\n\n\npub struct Batch {\n\n inner: Vec<(Vec<u8>, Vec<u8>)>,\n\n}\n\n\n\nimpl Batch {\n\n pub fn new() -> Batch {\n\n Batch { inner: Vec::new() }\n\n }\n\n pub fn set(&mut self, key: Vec<u8>, value: Vec<u8>) -> Result<(), failure::Error> {\n\n self.inner.push((key, value));\n\n Ok(())\n\n }\n\n pub fn inner(self) -> Vec<(Vec<u8>, Vec<u8>)> {\n\n self.inner\n\n }\n\n}\n", "file_path": "src/store/batch.rs", "rank": 47, "score": 37272.12312217508 }, { "content": "impl Manager {\n\n pub fn new(cfg: Config, store: RocksStore) -> Manager {\n\n let mut manager = Manager {\n\n transport: Vec::default(),\n\n no_of_shard: num_cpus::get(),\n\n };\n\n // Spin one ingester for each cpu.\n\n for _ in 0..manager.no_of_shard {\n\n let (sender, receiver) = mpsc::channel(1000);\n\n let mut ingester = Ingester::new(receiver, cfg.clone(), store.clone());\n\n manager.transport.push(sender);\n\n // Start the ingester in a new thread.\n\n thread::spawn(move || {\n\n ingester.start();\n\n });\n\n }\n\n manager\n\n }\n\n\n\n pub async fn ingest(&mut self, req: IngesterPush) -> Result<(), failure::Error> {\n", "file_path": "src/ingester/manager.rs", "rank": 48, "score": 37248.75359619825 }, { "content": "use crate::types::types::{IngesterFlushHintReq, IngesterPush, IngesterRequest, TailerRequest};\n\nuse failure;\n\nuse futures::channel::mpsc;\n\nuse futures::channel::mpsc::Sender;\n\nuse futures::executor::block_on;\n\nuse futures::sink::SinkExt;\n\nuse num_cpus;\n\nuse std::collections::hash_map::DefaultHasher;\n\nuse std::collections::HashMap;\n\nuse std::hash::{Hash, Hasher};\n\nuse std::thread;\n\nuse tonic::Status;\n\n/// Manager is responsible for managing multiple ingester and route the loglines to the correct\n\n/// ingester.\n\n#[derive(Clone)]\n\npub struct Manager {\n\n pub transport: Vec<Sender<IngesterRequest>>,\n\n pub no_of_shard: usize,\n\n}\n\n\n", "file_path": "src/ingester/manager.rs", "rank": 49, "score": 37239.34035417483 }, { "content": "\n\n pub fn register_tailer(\n\n &mut self,\n\n sources: Vec<String>,\n\n tailer_sender: Sender<Result<QueryResponse, Status>>,\n\n ) -> Result<(), failure::Error> {\n\n // We'll batch the partitions based on the shard.\n\n let mut buckets: HashMap<usize, TailerRequest> = HashMap::new();\n\n for partition in sources {\n\n let shard = self.get_ingester_shard(&partition);\n\n if let Some(req) = buckets.get_mut(&shard) {\n\n req.partitions.push(partition);\n\n continue;\n\n }\n\n buckets.insert(\n\n shard,\n\n TailerRequest {\n\n partitions: vec![partition],\n\n sender: tailer_sender.clone(),\n\n },\n", "file_path": "src/ingester/manager.rs", "rank": 50, "score": 37235.2657685465 }, { "content": " );\n\n }\n\n\n\n // Send the batched request to the right ingester.\n\n for (shard, batched_req) in buckets {\n\n let sender = self.transport.get_mut(shard).unwrap();\n\n\n\n block_on(async {\n\n sender\n\n .send(IngesterRequest::RegisterTailer(batched_req))\n\n .await\n\n })?;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn send_flush_hint(&mut self, req: IngesterFlushHintReq) -> Result<(), failure::Error> {\n\n // Get the right sender for the given request.\n\n let shard = self.get_ingester_shard(&req.app);\n\n let sender = self.transport.get_mut(shard).unwrap();\n\n\n\n block_on(async { sender.send(IngesterRequest::Flush(req)).await })?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/ingester/manager.rs", "rank": 51, "score": 37232.88808667869 }, { "content": " // Send the request to the right shard.\n\n let shard = self.get_ingester_shard(&req.push_request.source);\n\n if shard >= self.no_of_shard {\n\n panic!(shard);\n\n }\n\n let sender = self.transport.get_mut(shard).unwrap();\n\n sender.send(IngesterRequest::Push(req)).await?;\n\n Ok(())\n\n }\n\n\n\n fn get_ingester_shard(&mut self, source: &String) -> usize {\n\n // Calculate the shard to send the request to the right partition.\n\n // IDEAS: check io_uring how it scales well. Otherwise, instead of hashing based routing use\n\n // some logic to find the right shard. Example worker stealing.\n\n let mut s = DefaultHasher::new();\n\n source.hash(&mut s);\n\n let hash_value = s.finish();\n\n // Get the right ingester for the given source.\n\n (hash_value as usize) % self.no_of_shard\n\n }\n", "file_path": "src/ingester/manager.rs", "rank": 52, "score": 37228.59779544592 }, { "content": "/// convert_static_node_to_f32 is used to convert static node to f32\n\npub fn convert_static_node_to_f32(val: StaticNode) -> Option<f32> {\n\n match val {\n\n StaticNode::I64(num) => Some(num as f32),\n\n StaticNode::U64(num) => Some(num as f32),\n\n StaticNode::F64(num) => Some(num as f32),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/util/util.rs", "rank": 53, "score": 35686.54160138956 }, { "content": "/// convert_static_node_to_string is used to convert static node to string\n\npub fn convert_static_node_to_string(val: StaticNode) -> Option<String> {\n\n match val {\n\n StaticNode::I64(num) => Some(format!(\"{}\", num)),\n\n StaticNode::U64(num) => Some(format!(\"{}\", num)),\n\n StaticNode::F64(num) => Some(format!(\"{}\", num)),\n\n StaticNode::Bool(val) => Some(format!(\"{}\", val)),\n\n StaticNode::Null => Some(format!(\"null\")),\n\n }\n\n}\n", "file_path": "src/util/util.rs", "rank": 54, "score": 35686.54160138956 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\nuse crate::partition::segment_iterator::{decode_entry, Entry};\n\nuse crate::util::decode_u64;\n\nuse failure;\n\nuse std::fs::File;\n\nuse std::io::Read;\n", "file_path": "src/partition/segment_file_iterator.rs", "rank": 55, "score": 33840.87943260412 }, { "content": " return None;\n\n }\n\n self.current_index = self.current_index + 1;\n\n Some(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::partition::segment_file_iterator::FileIterator;\n\n use crate::partition::segment_writer::tests::{get_test_cfg, get_test_store};\n\n use crate::partition::segment_writer::SegmentWriter;\n\n use crate::types::types::api::PushLogLine;\n\n #[test]\n\n fn test_file_iterator() {\n\n let cfg = get_test_cfg();\n\n let store = get_test_store(cfg.clone());\n\n let mut segment_writer = SegmentWriter::new(\n\n cfg.clone(),\n\n String::from(\"tmppartition\"),\n", "file_path": "src/partition/segment_file_iterator.rs", "rank": 56, "score": 33784.084725793604 }, { "content": "use std::path::Path;\n\nuse std::rc::Rc;\n\n///FileIterator is used to iterate over all the log lines till the given offset.\n\npub struct FileIterator {\n\n entries: Vec<Rc<Entry>>,\n\n current_index: usize,\n\n}\n\n\n\nimpl FileIterator {\n\n pub fn new(\n\n id: u64,\n\n partition: String,\n\n dir: String,\n\n end_offset: u64,\n\n start_ts: u64,\n\n end_ts: u64,\n\n ) -> Result<FileIterator, failure::Error> {\n\n let path = Path::new(&dir)\n\n .join(\"partition\")\n\n .join(&partition)\n", "file_path": "src/partition/segment_file_iterator.rs", "rank": 57, "score": 33781.22516057018 }, { "content": " }\n\n Ok(FileIterator {\n\n entries: entries,\n\n current_index: 0,\n\n })\n\n }\n\n\n\n pub fn entry(&self) -> Option<Rc<Entry>> {\n\n let entry = self.entries.get(self.current_index);\n\n match entry {\n\n Some(ent) => Some(ent.clone()),\n\n None => None,\n\n }\n\n }\n\n\n\n /// next will advance the iterator. throws error if we reach end.\n\n pub fn next(&mut self) -> Option<()> {\n\n if self.current_index >= self.entries.len() - 1 {\n\n // just incrementing one so that entry will give none.\n\n self.current_index = self.current_index + 1;\n", "file_path": "src/partition/segment_file_iterator.rs", "rank": 58, "score": 33773.77369355438 }, { "content": " 1,\n\n store.clone(),\n\n 2,\n\n )\n\n .unwrap();\n\n let mut lines = Vec::new();\n\n lines.push(PushLogLine {\n\n raw_data: String::from(\"liala transfered money to raja\").into_bytes(),\n\n indexes: vec![\n\n \"liala\".to_string(),\n\n \"transfered\".to_string(),\n\n \"money\".to_string(),\n\n \"raja\".to_string(),\n\n ],\n\n ts: 2,\n\n structured: false,\n\n json_keys: Vec::default(),\n\n });\n\n lines.push(PushLogLine {\n\n raw_data: String::from(\"roja transfered money to navin\").into_bytes(),\n", "file_path": "src/partition/segment_file_iterator.rs", "rank": 59, "score": 33770.20340396937 }, { "content": " indexes: vec![\n\n \"roja\".to_string(),\n\n \"transfered\".to_string(),\n\n \"money\".to_string(),\n\n \"navin\".to_string(),\n\n ],\n\n ts: 4,\n\n structured: false,\n\n json_keys: Vec::default(),\n\n });\n\n segment_writer.push(lines).unwrap();\n\n segment_writer.flush().unwrap();\n\n let mut iterator = FileIterator::new(\n\n 1,\n\n String::from(\"tmppartition\"),\n\n cfg.dir,\n\n segment_writer.size(),\n\n 1,\n\n 5,\n\n )\n\n .unwrap();\n\n let ent = iterator.entry().unwrap();\n\n assert_eq!(ent.ts, 2);\n\n iterator.next().unwrap();\n\n let ent = iterator.entry().unwrap();\n\n assert_eq!(ent.ts, 4);\n\n }\n\n}\n", "file_path": "src/partition/segment_file_iterator.rs", "rank": 60, "score": 33769.99884398193 }, { "content": " .join(format!(\"{}.segment\", id));\n\n // allocate buffer.\n\n let mut buffer = vec![0; end_offset as usize];\n\n let mut file = File::open(path)?;\n\n // read the exact bytes.\n\n file.read_exact(&mut buffer[..])?;\n\n assert_eq!(buffer.len(), end_offset as usize);\n\n let mut entries = Vec::new();\n\n let mut read_index = 14 as usize;\n\n loop {\n\n if read_index >= end_offset as usize {\n\n break;\n\n }\n\n let entry_len = decode_u64(&buffer[read_index..read_index + 8]) as usize;\n\n read_index = read_index + 8;\n\n let entry = decode_entry(&buffer[read_index..read_index + entry_len]);\n\n if start_ts <= entry.ts && entry.ts <= end_ts {\n\n entries.push(Rc::new(entry));\n\n }\n\n read_index = read_index + entry_len;\n", "file_path": "src/partition/segment_file_iterator.rs", "rank": 61, "score": 33769.19008140495 }, { "content": "/// get_partitions returns partitions list that has been ingesterd into\n\n/// pathivu.\n\npub fn get_partitions(path: &PathBuf) -> Result<Vec<String>, failure::Error> {\n\n let path = path.join(\"partition\");\n\n create_dir_all(&path)?;\n\n let mut partitions = Vec::new();\n\n let dir = fs::read_dir(&path)?;\n\n for entry in dir {\n\n match entry {\n\n Ok(entry) => {\n\n partitions.push(entry.file_name().into_string().unwrap());\n\n }\n\n Err(e) => bail!(\"{}\", e),\n\n }\n\n }\n\n return Ok(partitions);\n\n}\n\n\n\nimpl PartitionHandler {\n\n pub fn partitions(&self) -> Result<PartitionRes, failure::Error> {\n\n let partitions = get_partitions(&self.partition_path)?;\n\n Ok(PartitionRes {\n", "file_path": "src/server/server.rs", "rank": 62, "score": 32114.738101797226 }, { "content": "/// parse_query is used to parse the Pathivu query into internal\n\n/// query structure.\n\npub fn parse_query(pair: Pair<'_, Rule>, mut query: &mut Query) -> Result<(), Error> {\n\n for inner in pair.into_inner() {\n\n match inner.as_rule() {\n\n Rule::count => {\n\n if query.is_aggregation_exist() {\n\n return Err(format_err!(\n\n \"{}\",\n\n \"Only one aggregation is supported. At least for now\"\n\n ));\n\n }\n\n parse_count(inner, &mut query);\n\n }\n\n Rule::structured => {\n\n if query.selection.is_some() {\n\n return Err(format_err!(\"{}\", \"Only one selection statement exist\"));\n\n }\n\n parse_structured(inner, &mut query)\n\n }\n\n Rule::query => parse_query(inner, &mut query)?,\n\n Rule::query_block => parse_query(inner, &mut query)?,\n", "file_path": "src/parser/parser.rs", "rank": 63, "score": 29688.855390974593 }, { "content": "var fileDescriptor_00212fb1f9d3bf1c = []byte{\n\n\t// 471 bytes of a gzipped FileDescriptorProto\n\n\t0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0xcd, 0x6e, 0x13, 0x3f,\n\n\t0x14, 0xc5, 0xe3, 0xcc, 0x57, 0xe6, 0x36, 0xff, 0xbf, 0xda, 0x2b, 0x88, 0x4c, 0x91, 0x50, 0xe4,\n\n\t0x45, 0x95, 0x55, 0x55, 0xd2, 0x57, 0x00, 0xb1, 0xa0, 0x48, 0xc1, 0xca, 0x3e, 0x32, 0x19, 0x87,\n\n\t0x1a, 0x8a, 0x3d, 0xb5, 0x3d, 0x84, 0x3c, 0x05, 0x6f, 0xc4, 0x03, 0xf0, 0x54, 0xc8, 0x9e, 0x99,\n\n\t0x74, 0x94, 0x0d, 0xec, 0xe6, 0x5c, 0x7f, 0xfd, 0xce, 0xb9, 0x77, 0xa0, 0x14, 0xb5, 0xba, 0xae,\n\n\t0xad, 0xf1, 0x06, 0x13, 0x51, 0x2b, 0xf6, 0x8b, 0xc0, 0xf4, 0x63, 0x23, 0xed, 0x81, 0xcb, 0xc7,\n\n\t0x46, 0x3a, 0x8f, 0xaf, 0x00, 0x6a, 0x61, 0xbd, 0xf2, 0xca, 0x68, 0x47, 0xc9, 0x3c, 0x59, 0x94,\n\n\t0x7c, 0x50, 0xc1, 0x17, 0x30, 0x71, 0x5e, 0x58, 0xbf, 0xf1, 0x8e, 0x8e, 0xe7, 0x64, 0x91, 0xf2,\n\n\t0x22, 0xea, 0xb5, 0xc3, 0xe7, 0x90, 0x4b, 0x5d, 0x85, 0x85, 0x24, 0x2e, 0x64, 0x52, 0x57, 0x6b,\n\n\t0x87, 0xcf, 0x20, 0xdb, 0x9a, 0x46, 0x7b, 0x9a, 0xb6, 0xd5, 0x28, 0x70, 0x06, 0xb9, 0xd9, 0xed,\n\n\t0x9c, 0xf4, 0x34, 0x8b, 0xe5, 0x4e, 0x21, 0x85, 0x62, 0x67, 0xec, 0x5e, 0xd8, 0x8a, 0xe6, 0x73,\n\n\t0xb2, 0x98, 0xf0, 0x5e, 0x86, 0x7b, 0x1e, 0x03, 0x29, 0x2d, 0xe6, 0x64, 0x51, 0xf2, 0x56, 0xb0,\n\n\t0x77, 0xf0, 0x5f, 0xc7, 0xef, 0x6a, 0xa3, 0x9d, 0x44, 0x06, 0xd9, 0x83, 0xd2, 0xb2, 0x65, 0x3f,\n\n\t0x5b, 0x4e, 0xaf, 0x83, 0xe3, 0x3b, 0xf3, 0xf9, 0x4e, 0x69, 0xc9, 0xdb, 0x25, 0x44, 0x48, 0xbf,\n\n\t0x38, 0xa3, 0xa3, 0x81, 0x92, 0xc7, 0x6f, 0x26, 0xa0, 0xe8, 0x76, 0x85, 0x97, 0x94, 0xd6, 0xd2,\n\n\t0x52, 0xd2, 0xbe, 0x14, 0x05, 0xfe, 0x0f, 0xe3, 0xa3, 0xe7, 0xb1, 0x77, 0x78, 0x0e, 0x89, 0xa8,\n\n\t0xeb, 0xe8, 0xb5, 0xe4, 0xe1, 0x33, 0x64, 0xe7, 0xbc, 0x6d, 0xb6, 0xbe, 0xb1, 0xb2, 0x8a, 0x76,\n\n\t0x27, 0x7c, 0x50, 0x61, 0xb7, 0x70, 0xb1, 0xea, 0x93, 0x3c, 0xf2, 0xfe, 0x25, 0x70, 0x56, 0x40,\n\n\t0xf6, 0xf6, 0x5b, 0xed, 0x0f, 0xec, 0x27, 0x81, 0xb3, 0x55, 0xe3, 0xee, 0x7b, 0xca, 0x96, 0x87,\n\n\t0x1c, 0x79, 0x28, 0x14, 0x4a, 0x57, 0xf2, 0x87, 0x0c, 0x90, 0xe1, 0x96, 0x5e, 0x9e, 0x70, 0x25,\n\n\t0xa7, 0x5c, 0xa1, 0xa7, 0x56, 0xec, 0x37, 0x95, 0xf0, 0x22, 0x52, 0x4f, 0x79, 0x61, 0xc5, 0xfe,\n\n\t0x8d, 0xf0, 0x02, 0x5f, 0x42, 0x19, 0xd2, 0xd9, 0x7c, 0x95, 0x07, 0x47, 0xb3, 0x78, 0xed, 0x24,\n\n\t0x14, 0xde, 0xcb, 0x83, 0x63, 0x1f, 0x5a, 0xa0, 0x7e, 0x74, 0x66, 0x90, 0x3b, 0xd3, 0xd8, 0xad,\n\n\t0xec, 0x72, 0xeb, 0x14, 0x5e, 0xf5, 0x1d, 0x19, 0xc7, 0x8e, 0x9c, 0xc7, 0x8e, 0x0c, 0x9c, 0x74,\n\n\t0x5d, 0x59, 0xfe, 0x26, 0x50, 0xac, 0x84, 0xbf, 0x57, 0xdf, 0x1b, 0x7c, 0x0d, 0xe9, 0x5a, 0xa8,\n\n\t0x07, 0xbc, 0x88, 0x9b, 0x87, 0x13, 0x7a, 0x89, 0xc3, 0x52, 0x1b, 0x22, 0x1b, 0xdd, 0x10, 0xbc,\n\n\t0x81, 0x2c, 0x16, 0xff, 0xf9, 0x0c, 0x2e, 0x01, 0x56, 0x4f, 0x93, 0x0d, 0x71, 0x4f, 0xcc, 0xfa,\n\n\t0x72, 0xd6, 0x32, 0x9e, 0x36, 0x8b, 0x8d, 0xf0, 0x0a, 0xd2, 0x80, 0x8e, 0x4f, 0x2e, 0xfa, 0x37,\n\n\t0x06, 0xe7, 0xd9, 0xe8, 0x53, 0x1e, 0x7f, 0xb2, 0xdb, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x6c,\n\n\t0xc8, 0xff, 0x5b, 0x71, 0x03, 0x00, 0x00,\n", "file_path": "katchi/api/api.pb.go", "rank": 64, "score": 27297.72826541032 }, { "content": "var fileDescriptor_00212fb1f9d3bf1c = []byte{\n\n\t// 471 bytes of a gzipped FileDescriptorProto\n\n\t0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0xcd, 0x6e, 0x13, 0x3f,\n\n\t0x14, 0xc5, 0xe3, 0xcc, 0x57, 0xe6, 0x36, 0xff, 0xbf, 0xda, 0x2b, 0x88, 0x4c, 0x91, 0x50, 0xe4,\n\n\t0x45, 0x95, 0x55, 0x55, 0xd2, 0x57, 0x00, 0xb1, 0xa0, 0x48, 0xc1, 0xca, 0x3e, 0x32, 0x19, 0x87,\n\n\t0x1a, 0x8a, 0x3d, 0xb5, 0x3d, 0x84, 0x3c, 0x05, 0x6f, 0xc4, 0x03, 0xf0, 0x54, 0xc8, 0x9e, 0x99,\n\n\t0x74, 0x94, 0x0d, 0xec, 0xe6, 0x5c, 0x7f, 0xfd, 0xce, 0xb9, 0x77, 0xa0, 0x14, 0xb5, 0xba, 0xae,\n\n\t0xad, 0xf1, 0x06, 0x13, 0x51, 0x2b, 0xf6, 0x8b, 0xc0, 0xf4, 0x63, 0x23, 0xed, 0x81, 0xcb, 0xc7,\n\n\t0x46, 0x3a, 0x8f, 0xaf, 0x00, 0x6a, 0x61, 0xbd, 0xf2, 0xca, 0x68, 0x47, 0xc9, 0x3c, 0x59, 0x94,\n\n\t0x7c, 0x50, 0xc1, 0x17, 0x30, 0x71, 0x5e, 0x58, 0xbf, 0xf1, 0x8e, 0x8e, 0xe7, 0x64, 0x91, 0xf2,\n\n\t0x22, 0xea, 0xb5, 0xc3, 0xe7, 0x90, 0x4b, 0x5d, 0x85, 0x85, 0x24, 0x2e, 0x64, 0x52, 0x57, 0x6b,\n\n\t0x87, 0xcf, 0x20, 0xdb, 0x9a, 0x46, 0x7b, 0x9a, 0xb6, 0xd5, 0x28, 0x70, 0x06, 0xb9, 0xd9, 0xed,\n\n\t0x9c, 0xf4, 0x34, 0x8b, 0xe5, 0x4e, 0x21, 0x85, 0x62, 0x67, 0xec, 0x5e, 0xd8, 0x8a, 0xe6, 0x73,\n\n\t0xb2, 0x98, 0xf0, 0x5e, 0x86, 0x7b, 0x1e, 0x03, 0x29, 0x2d, 0xe6, 0x64, 0x51, 0xf2, 0x56, 0xb0,\n\n\t0x77, 0xf0, 0x5f, 0xc7, 0xef, 0x6a, 0xa3, 0x9d, 0x44, 0x06, 0xd9, 0x83, 0xd2, 0xb2, 0x65, 0x3f,\n\n\t0x5b, 0x4e, 0xaf, 0x83, 0xe3, 0x3b, 0xf3, 0xf9, 0x4e, 0x69, 0xc9, 0xdb, 0x25, 0x44, 0x48, 0xbf,\n\n\t0x38, 0xa3, 0xa3, 0x81, 0x92, 0xc7, 0x6f, 0x26, 0xa0, 0xe8, 0x76, 0x85, 0x97, 0x94, 0xd6, 0xd2,\n\n\t0x52, 0xd2, 0xbe, 0x14, 0x05, 0xfe, 0x0f, 0xe3, 0xa3, 0xe7, 0xb1, 0x77, 0x78, 0x0e, 0x89, 0xa8,\n\n\t0xeb, 0xe8, 0xb5, 0xe4, 0xe1, 0x33, 0x64, 0xe7, 0xbc, 0x6d, 0xb6, 0xbe, 0xb1, 0xb2, 0x8a, 0x76,\n\n\t0x27, 0x7c, 0x50, 0x61, 0xb7, 0x70, 0xb1, 0xea, 0x93, 0x3c, 0xf2, 0xfe, 0x25, 0x70, 0x56, 0x40,\n\n\t0xf6, 0xf6, 0x5b, 0xed, 0x0f, 0xec, 0x27, 0x81, 0xb3, 0x55, 0xe3, 0xee, 0x7b, 0xca, 0x96, 0x87,\n\n\t0x1c, 0x79, 0x28, 0x14, 0x4a, 0x57, 0xf2, 0x87, 0x0c, 0x90, 0xe1, 0x96, 0x5e, 0x9e, 0x70, 0x25,\n\n\t0xa7, 0x5c, 0xa1, 0xa7, 0x56, 0xec, 0x37, 0x95, 0xf0, 0x22, 0x52, 0x4f, 0x79, 0x61, 0xc5, 0xfe,\n\n\t0x8d, 0xf0, 0x02, 0x5f, 0x42, 0x19, 0xd2, 0xd9, 0x7c, 0x95, 0x07, 0x47, 0xb3, 0x78, 0xed, 0x24,\n\n\t0x14, 0xde, 0xcb, 0x83, 0x63, 0x1f, 0x5a, 0xa0, 0x7e, 0x74, 0x66, 0x90, 0x3b, 0xd3, 0xd8, 0xad,\n\n\t0xec, 0x72, 0xeb, 0x14, 0x5e, 0xf5, 0x1d, 0x19, 0xc7, 0x8e, 0x9c, 0xc7, 0x8e, 0x0c, 0x9c, 0x74,\n\n\t0x5d, 0x59, 0xfe, 0x26, 0x50, 0xac, 0x84, 0xbf, 0x57, 0xdf, 0x1b, 0x7c, 0x0d, 0xe9, 0x5a, 0xa8,\n\n\t0x07, 0xbc, 0x88, 0x9b, 0x87, 0x13, 0x7a, 0x89, 0xc3, 0x52, 0x1b, 0x22, 0x1b, 0xdd, 0x10, 0xbc,\n\n\t0x81, 0x2c, 0x16, 0xff, 0xf9, 0x0c, 0x2e, 0x01, 0x56, 0x4f, 0x93, 0x0d, 0x71, 0x4f, 0xcc, 0xfa,\n\n\t0x72, 0xd6, 0x32, 0x9e, 0x36, 0x8b, 0x8d, 0xf0, 0x0a, 0xd2, 0x80, 0x8e, 0x4f, 0x2e, 0xfa, 0x37,\n\n\t0x06, 0xe7, 0xd9, 0xe8, 0x53, 0x1e, 0x7f, 0xb2, 0xdb, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x6c,\n\n\t0xc8, 0xff, 0x5b, 0x71, 0x03, 0x00, 0x00,\n", "file_path": "misc/api/api.pb.go", "rank": 65, "score": 27297.72826541032 }, { "content": "/// parse_limit will parse the limit which is to limit the number of\n\n/// log lines that needs to be responded.\n\nfn parse_limit(pair: Pair<'_, Rule>, query: &mut Query) {\n\n let limit = pair.into_inner().next().unwrap().as_str();\n\n query.limit = limit.parse().unwrap();\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use super::*;\n\n #[test]\n\n fn test_average() {\n\n // basic assertion\n\n let query = parse(String::from(\"avg(weight) as avg_weight\")).unwrap();\n\n let avg = query.average.unwrap();\n\n assert_eq!(avg.attr, \"weight\".to_string());\n\n assert_eq!(avg.alias, \"avg_weight\".to_string());\n\n\n\n // by assertion\n\n let query = parse(String::from(\"avg(weight) as avg_weight by hello\")).unwrap();\n\n let avg = query.average.unwrap();\n\n assert_eq!(avg.attr, \"weight\".to_string());\n", "file_path": "src/parser/parser.rs", "rank": 66, "score": 26341.643606007863 }, { "content": "var xxx_messageInfo_Empty proto.InternalMessageInfo\n", "file_path": "misc/api/api.pb.go", "rank": 67, "score": 26262.249302251195 }, { "content": "var xxx_messageInfo_Empty proto.InternalMessageInfo\n", "file_path": "katchi/api/api.pb.go", "rank": 68, "score": 26262.249302251195 }, { "content": "var xxx_messageInfo_QueryResponse proto.InternalMessageInfo\n", "file_path": "katchi/api/api.pb.go", "rank": 69, "score": 25327.762588294856 }, { "content": "var xxx_messageInfo_PartitionResponse proto.InternalMessageInfo\n", "file_path": "misc/api/api.pb.go", "rank": 70, "score": 25327.762588294856 }, { "content": "var xxx_messageInfo_PushRequest proto.InternalMessageInfo\n", "file_path": "misc/api/api.pb.go", "rank": 71, "score": 25327.762588294856 }, { "content": "var xxx_messageInfo_QueryRequest proto.InternalMessageInfo\n", "file_path": "misc/api/api.pb.go", "rank": 72, "score": 25327.762588294856 }, { "content": "var xxx_messageInfo_PushRequest proto.InternalMessageInfo\n", "file_path": "katchi/api/api.pb.go", "rank": 73, "score": 25327.762588294856 }, { "content": "var xxx_messageInfo_QueryRequest proto.InternalMessageInfo\n", "file_path": "katchi/api/api.pb.go", "rank": 74, "score": 25327.762588294856 }, { "content": "var xxx_messageInfo_PartitionResponse proto.InternalMessageInfo\n", "file_path": "katchi/api/api.pb.go", "rank": 75, "score": 25327.762588294856 }, { "content": "var xxx_messageInfo_LogLine proto.InternalMessageInfo\n", "file_path": "katchi/api/api.pb.go", "rank": 76, "score": 25327.762588294856 }, { "content": "var xxx_messageInfo_QueryResponse proto.InternalMessageInfo\n", "file_path": "misc/api/api.pb.go", "rank": 77, "score": 25327.762588294856 }, { "content": "var xxx_messageInfo_LogLine proto.InternalMessageInfo\n", "file_path": "misc/api/api.pb.go", "rank": 78, "score": 25327.762588294856 }, { "content": "var xxx_messageInfo_PushLogLine proto.InternalMessageInfo\n", "file_path": "katchi/api/api.pb.go", "rank": 79, "score": 24457.494273049342 }, { "content": "var xxx_messageInfo_PushLogLine proto.InternalMessageInfo\n", "file_path": "misc/api/api.pb.go", "rank": 80, "score": 24457.494273049342 }, { "content": "func printLogs(lines []*api.LogLine) {\n\n\tfor _, line := range lines {\n\n\t\tfmt.Printf(\"APP: %s, ts: %s, line: %s \\n\", line.App, time.Unix(int64(line.Ts), 0).String(),\n\n\t\t\tline.Inner)\n\n\t}\n", "file_path": "katchi/main.go", "rank": 81, "score": 86.97851188793007 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\npub mod types;\n", "file_path": "src/types/mod.rs", "rank": 82, "score": 85.55034775112026 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\npub mod retention;\n", "file_path": "src/retention/mod.rs", "rank": 83, "score": 84.87204735702574 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\npub mod executor;\n", "file_path": "src/queryexecutor/mod.rs", "rank": 84, "score": 84.87204735702575 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\npub mod server;\n", "file_path": "src/server/mod.rs", "rank": 85, "score": 84.87204735702572 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\npub mod parser;\n", "file_path": "src/parser/mod.rs", "rank": 86, "score": 84.87204735702575 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\npub mod telementry;\n", "file_path": "src/telementry/mod.rs", "rank": 87, "score": 84.87204735702574 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\npub mod parser;\n", "file_path": "src/json_parser/mod.rs", "rank": 88, "score": 84.87204735702574 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n", "file_path": "src/util/decode.rs", "rank": 89, "score": 84.4952759456675 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\npub mod cron_scheduler;\n", "file_path": "src/cronscheduler/mod.rs", "rank": 90, "score": 84.20452506343463 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\npub mod merge_iterator;\n", "file_path": "src/iterator/mod.rs", "rank": 91, "score": 84.2045250634346 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\nmod iterator;\n\npub mod replayer;\n", "file_path": "src/replayer/mod.rs", "rank": 92, "score": 83.54752414248591 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\nuse crate::config::config::Config;\n\nuse crate::store::batch::Batch;\n\nuse crate::store::store::Store;\n\nuse crate::types::types;\n\nuse crate::types::types::{\n", "file_path": "src/partition/segment_writer.rs", "rank": 93, "score": 82.98143435450245 }, { "content": "#\n\n# Fluentd\n\n#\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n\n# you may not use this file except in compliance with the License.\n\n# You may obtain a copy of the License at\n\n#\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n#\n\n# Unless required by applicable law or agreed to in writing, software\n\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n# See the License for the specific language governing permissions and\n\n# limitations under the License.\n\n#\n\n# \n\n# Copyright 2019 Balaji Jinnah and Contributors\n\n# \n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n\n# you may not use this file except in compliance with the License.\n", "file_path": "fluentd-plugin/debian-chola/plugins/out_chola.rb", "rank": 94, "score": 81.92100527159236 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\nuse crate::config::config::Config;\n\nuse crate::queryexecutor::executor::QueryExecutor;\n\nuse crate::replayer::replayer::Replayer;\n\nuse crate::store::rocks_store;\n\nuse crate::types::types::*;\n", "file_path": "src/server/server.rs", "rank": 95, "score": 80.00393131421633 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\nuse crate::partition::segment_iterator::Entry;\n\nuse std::rc::Rc;\n", "file_path": "src/partition/iterator.rs", "rank": 96, "score": 79.60865913589856 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\nuse crate::config::config::Config;\n\nuse crate::json_parser::parser::flatten_json;\n\nuse crate::replayer::iterator::Iterator;\n\nuse crate::store::batch::Batch;\n\nuse crate::store::store::Store;\n", "file_path": "src/replayer/replayer.rs", "rank": 97, "score": 78.9016301403271 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n#![feature(async_closure)]\n\n#![feature(type_ascription)]\n\n#![feature(result_map_or_else)]\n\nmod config;\n\nmod cronscheduler;\n", "file_path": "src/main.rs", "rank": 98, "score": 78.68590412981187 }, { "content": "/*\n\n * Copyright 2019 Balaji Jinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\nuse crate::config::config::Config;\n\nuse crate::cronscheduler::cron_scheduler::CronJob;\n\nuse crate::server::server::get_partitions;\n\nuse crate::store::store::Store;\n\nuse crate::types::types::{PartitionRegistry, PARTITION_PREFIX};\n", "file_path": "src/retention/retention.rs", "rank": 99, "score": 77.59092215490598 } ]
Rust
src/bin/mv_files.rs
lukaspustina/clams-bin
14cf8732a4809b2003fbe255edffab448047b19a
use clams::prelude::*; use clams_bin::mv_files; use failure::{format_err, Error}; use std::path::{Path, PathBuf}; use structopt::StructOpt; use walkdir::WalkDir; #[derive(StructOpt, Debug)] #[structopt( name = "mv_files", about = "Move video files from a nested directory structure into another, flat directory", raw(setting = "structopt::clap::AppSettings::ColoredHelp") )] struct Args { #[structopt(short = "e", long = "extension", default_value = "avi,mkv,mp4")] extensions: String, #[structopt(short = "s", long = "size", default_value = "100M")] size: String, #[structopt(raw(required = "true", index = "1"))] sources: Vec<String>, #[structopt(raw(index = "2"))] destination: String, #[structopt(short = "d", long = "dry")] dry: bool, #[structopt(long = "no-color")] no_color: bool, #[structopt(short = "p", long = "progress-bar")] progress_bar: bool, #[structopt(long = "silent")] silent: bool, #[structopt(short = "v", long = "verbose", parse(from_occurrences))] verbosity: u64, } fn run(args: Args) -> Result<(), Error> { if args.dry { warn!( "{}", "Running in dry mode. No moves will be performed.".yellow() ); } let size = mv_files::human_size_to_bytes(&args.size)?; if !PathBuf::from(&args.destination).is_dir() { return Err(format_err!( "Destination directory '{}' does not exist.", args.destination )); } let extensions = mv_files::parse_extensions(&args.extensions)?; let source_directories: Vec<&str> = args.sources.iter().map(|s| s.as_ref()).collect(); let dir_entries: Vec<_> = source_directories .into_iter() .map(|d| WalkDir::new(d).into_iter()) .flat_map(|e| e) .collect::<Result<Vec<_>, _>>()?; let moves: Vec<(_, _)> = dir_entries .iter() .map(|e| e.path()) .filter(|p| !p.is_dir()) .filter(|p| { p.extension() .map_or(false, |x| extensions.contains(&x.to_str().unwrap())) }) .filter(|p| p.metadata().map(|m| m.len() >= size).unwrap_or(false)) .map(|p| { let dest_path = mv_files::destination_path(&args.destination, p).unwrap(); (p, dest_path) }) .collect(); debug!( "moving with progess bar = {} and dry mode = {} and moves = ({}) {:#?}", args.progress_bar, args.dry, moves.len(), moves ); if args.progress_bar { move_files_with_progress_bar(moves.as_slice(), args.dry) } else { move_files(moves.as_slice(), args.dry) } } fn move_files_with_progress_bar(moves: &[(&Path, PathBuf)], dry: bool) -> Result<(), Error> { let pb = ProgressBar::new(moves.len() as u64); let style = ProgressStyle::default_clams_bar(); pb.set_style(style); for &(from, ref to) in moves { pb.set_message(&format!( "Moving {} to {} ...", from.to_str().unwrap().yellow(), to.to_str().unwrap().yellow() )); if !dry { match std::fs::rename(from, to) { Ok(_) => {} Err(e) => eprintln!( "Failed to move {} because {}", from.to_str().unwrap().red(), e ), } } pb.inc(1); } pb.finish_with_message("done."); Ok(()) } fn move_files(moves: &[(&Path, PathBuf)], dry: bool) -> Result<(), Error> { for &(from, ref to) in moves { print!( "Moving {} to {} ...", from.to_str().unwrap().yellow(), to.to_str().unwrap().yellow() ); if dry { println!(" {}", "simulated.".blue()); } else { match std::fs::rename(from, to) { Ok(_) => println!(" {}.", "done".green()), Err(e) => eprintln!( "Failed to move {} because {}", from.to_str().unwrap().red(), e ), } } } Ok(()) } fn main() { let args = Args::from_args(); clams::console::set_color(!args.no_color); let name = Args::clap().get_name().to_owned(); let level: Level = args.verbosity.into(); if !args.silent { eprintln!( "{} version={}, log level={:?}", name, env!("CARGO_PKG_VERSION"), &level ); } let log_config = LogConfig::new( std::io::stderr(), !args.no_color, Level(log::LevelFilter::Error), vec![ModLevel { module: name.to_owned(), level, }], None, ); init_logging(log_config).expect("Failed to initialize logging"); match run(args) { Ok(_) => {} Err(e) => { println!("Failed:"); for c in e.iter_chain() { println!("{}", c); } } } }
use clams::prelude::*; use clams_bin::mv_files; use failure::{format_err, Error}; use std::path::{Path, PathBuf}; use structopt::StructOpt; use walkdir::WalkDir; #[derive(StructOpt, Debug)] #[structopt( name = "mv_files", about = "Move video files from a nested directory structure into another, flat directory", raw(setting = "structopt::clap::AppSettings::ColoredHelp") )] struct Args { #[structopt(short = "e", long = "extension", default_value = "avi,mkv,mp4")] extensions: String, #[structopt(short = "s", long = "size", default_value = "100M")] size: String, #[structopt(raw(required = "true", index = "1"))] sources: Vec<String>, #[structopt(raw(index = "2"))] destination: String, #[structopt(short = "d", long = "dry")] dry: bool, #[structopt(long = "no-color")] no_color: bool, #[structopt(short = "p", long = "progress-bar")] progress_bar: bool, #[structopt(long = "silent")] silent: bool, #[structopt(short = "v", long = "verbose", parse(from_occurrences))] verbosity: u64, } fn run(args: Args) -> Result<(), Error> { if args.dry { warn!( "{}", "Running in dry mode. No moves will be performed.".yellow() ); } let size = mv_files::human_size_to_bytes(&args.size)?; if !PathBuf::from(&args.destination).is_dir() { return Err(format_err!( "Destination directory '{}' does not exist.", args.destination )); } let extensions = mv_files::parse_extensions(&args.extensions)?; let source_directories: Vec<&str> = args.sources.iter().map(|s| s.as_ref()).collect(); let dir_entries: Vec<_> = source_directories .into_iter() .map(|d| WalkDir::new(d).into_iter()) .flat_map(|e| e) .collect::<Result<Vec<_>, _>>()?; let moves: Vec<(_, _)> = dir_entries .iter() .map(|e| e.path()) .filter(|p| !p.is_dir()) .filter(|p| { p.extension() .map_or(false, |x| extensions.contains(&x.to_str().unwrap())) }) .filter(|p| p.metadata().map(|m| m.len() >= size).unwrap_or(false)) .map(|p| { let dest_path = mv_files::destination_path(&args.destination, p).unwrap(); (p, dest_path) }) .collect(); debug!( "moving with progess bar = {} and dry mode = {} and moves = ({}) {:#?}", args.progress_bar, args.dry, moves.len(), moves ); if args.progress_bar { move_files_with_progress_bar(moves.as_slice(), args.dry) } else { move_files(moves.as_slice(), args.dry) } } fn move_files_with_progress_bar(moves: &[(&Path, PathBuf)], dry: bool) -> Result<(), Error> { let pb = ProgressBar::new(moves.len() as u64); let style = ProgressStyle::default_clams_bar(); pb.set_style(style); for &(from, ref to) in moves { pb.set_message(&format!( "Moving {} to {} ...", from.to_str().unwrap().yellow(), to.to_str().unwrap().yellow() )); if !dry { match std::fs::rename(from, to) { Ok(_) => {} Err(e) => eprintln!( "Failed to move {} because {}", from.to_str().unwrap().red(), e ), } } pb.inc(1); } pb.finish_with_message("done."); Ok(()) } fn move_files(moves: &[(&Path, PathBuf)], dry: bool) -> Result<(), Error> { for &(from, ref to) in moves { print!( "Moving {} to {} ...", from.to_str().unwrap().yellow(), to.to_str().unwrap().yellow() );
} Ok(()) } fn main() { let args = Args::from_args(); clams::console::set_color(!args.no_color); let name = Args::clap().get_name().to_owned(); let level: Level = args.verbosity.into(); if !args.silent { eprintln!( "{} version={}, log level={:?}", name, env!("CARGO_PKG_VERSION"), &level ); } let log_config = LogConfig::new( std::io::stderr(), !args.no_color, Level(log::LevelFilter::Error), vec![ModLevel { module: name.to_owned(), level, }], None, ); init_logging(log_config).expect("Failed to initialize logging"); match run(args) { Ok(_) => {} Err(e) => { println!("Failed:"); for c in e.iter_chain() { println!("{}", c); } } } }
if dry { println!(" {}", "simulated.".blue()); } else { match std::fs::rename(from, to) { Ok(_) => println!(" {}.", "done".green()), Err(e) => eprintln!( "Failed to move {} because {}", from.to_str().unwrap().red(), e ), } }
if_condition
[ { "content": " };\n\n\n\n let size = size.parse::<u64>().map_err(|_| MvFilesError::InvaildSize {\n\n arg: String::from(size),\n\n })?;\n\n\n\n let size = match scale {\n\n 'k' => size * 1024u64.pow(1),\n\n 'M' => size * 1024u64.pow(2),\n\n 'G' => size * 1024u64.pow(3),\n\n 'T' => size * 1024u64.pow(4),\n\n 'P' => size * 1024u64.pow(5),\n\n _ => size,\n\n };\n\n\n\n Ok(size)\n\n }\n\n\n\n pub fn destination_path<T: AsRef<Path>, S: AsRef<Path>>(\n\n destination_dir: T,\n", "file_path": "src/lib.rs", "rank": 0, "score": 20.058961153918474 }, { "content": " use std::fs::File;\n\n use std::io::{BufReader, Read, Write};\n\n use std::path::Path;\n\n\n\n #[derive(Debug, Fail)]\n\n pub enum ApfError {\n\n #[fail(display = \"Could not open source file because {}\", arg)]\n\n FailedToOpenSourceFile { arg: String },\n\n #[fail(display = \"Could not open destination file because {}\", arg)]\n\n FailedToOpenDestinationFile { arg: String },\n\n #[fail(display = \"Failed to read because {}\", arg)]\n\n FailedToRead { arg: String },\n\n #[fail(display = \"Failed to write because {}\", arg)]\n\n FailedToWrite { arg: String },\n\n }\n\n\n\n mod pelican {\n\n use super::ApfError;\n\n use std::collections::HashMap;\n\n\n", "file_path": "src/lib.rs", "rank": 1, "score": 18.359889624279614 }, { "content": " pub fn write_content_to_file(content: &str, path: &Path) -> Result<(), NnError> {\n\n // Make sure, the destnation dir exists.\n\n let dir = path\n\n .parent()\n\n .ok_or_else(|| NnError::FailedToWriteNoteFile {\n\n arg: \"path does not contain directory\".to_string(),\n\n })?;\n\n if !dir.exists() {\n\n fs::create_dir(dir)\n\n .map_err(|e| NnError::FailedToWriteNoteFile { arg: e.to_string() })?;\n\n }\n\n let mut file = File::create(path)\n\n .map_err(|e| NnError::FailedToWriteNoteFile { arg: e.to_string() })?;\n\n file.write_all(content.as_bytes())\n\n .map_err(|e| NnError::FailedToWriteNoteFile { arg: e.to_string() })?;\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn open_editor(file: &Path) -> Result<(), NnError> {\n", "file_path": "src/lib.rs", "rank": 2, "score": 18.199326396349733 }, { "content": " InvaildSize { arg: String },\n\n #[fail(display = \"Invalid extensions list '{}'\", arg)]\n\n InvalidExtensionsList { arg: String },\n\n #[fail(display = \"Invalid file name'{}'\", arg)]\n\n InvalidFileName { arg: String },\n\n }\n\n\n\n pub fn human_size_to_bytes(size: &str) -> Result<u64, MvFilesError> {\n\n if size.is_empty() {\n\n return Err(MvFilesError::InvaildSize {\n\n arg: String::from(size),\n\n });\n\n };\n\n\n\n let scales: &[_] = &['k', 'M', 'G', 'T', 'P'];\n\n let scale = size.chars().last().unwrap(); // safe because is_empty check\n\n let size = if scales.contains(&scale) {\n\n size.trim_end_matches(scales)\n\n } else {\n\n size\n", "file_path": "src/lib.rs", "rank": 3, "score": 17.637779078681913 }, { "content": "\n\n assert_that(&res).is_ok();\n\n assert_that(&buffer).is_equal_to(expected);\n\n }\n\n\n\n }\n\n }\n\n}\n\n\n\npub mod mv_files {\n\n use failure::Fail;\n\n use std::path::{Path, PathBuf};\n\n\n\n #[derive(Debug, Fail)]\n\n pub enum MvFilesError {\n\n #[fail(display = \"Source directories missing\")]\n\n EmptySources,\n\n #[fail(display = \"Extensions missing\")]\n\n EmptyExtensions,\n\n #[fail(display = \"Invalid size arg '{}'\", arg)]\n", "file_path": "src/lib.rs", "rank": 4, "score": 17.59821555144473 }, { "content": " file_path: S,\n\n ) -> Result<PathBuf, MvFilesError> {\n\n let file = file_path\n\n .as_ref()\n\n .file_name()\n\n .ok_or_else(|| MvFilesError::InvalidFileName {\n\n arg: format!(\"{:?}\", file_path.as_ref()),\n\n })?;\n\n\n\n let mut path = PathBuf::new();\n\n path.push(destination_dir.as_ref());\n\n path.push(file);\n\n\n\n Ok(path)\n\n }\n\n\n\n pub fn parse_extensions(ext: &str) -> Result<Vec<&str>, MvFilesError> {\n\n if ext.is_empty() {\n\n return Err(MvFilesError::InvalidExtensionsList {\n\n arg: String::from(ext),\n", "file_path": "src/lib.rs", "rank": 5, "score": 17.592888949100036 }, { "content": " frontmatter: &FrontMatter,\n\n ) -> Result<(), NnError> {\n\n let content = render_template(template, frontmatter)?;\n\n let _ = write_content_to_file(&content, &path)?;\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn render_template(template: &str, frontmatter: &FrontMatter) -> Result<String, NnError> {\n\n let mut handlebars = Handlebars::new();\n\n handlebars\n\n .register_template_string(\"frontmatter\", template)\n\n .map_err(|e| NnError::FailedToRenderFrontmatterTemplate { arg: e.to_string() })?;\n\n let text = handlebars\n\n .render(\"frontmatter\", frontmatter)\n\n .map_err(|e| NnError::FailedToRenderFrontmatterTemplate { arg: e.to_string() })?;\n\n\n\n Ok(text)\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 6, "score": 13.058856321557602 }, { "content": "\n\n buf\n\n }\n\n }\n\n\n\n pub fn adapt_pelican_frontmatter_in_file(src: &Path, dest: &Path) -> Result<(), ApfError> {\n\n let read =\n\n File::open(src).map_err(|e| ApfError::FailedToOpenSourceFile { arg: e.to_string() })?;\n\n let mut write = File::create(dest)\n\n .map_err(|e| ApfError::FailedToOpenDestinationFile { arg: e.to_string() })?;\n\n\n\n adapt_pelican_frontmatter(read, &mut write)\n\n }\n\n\n\n /// Pelican's Frontmatter is really simple, but does not adhere the to front matter syntax used\n\n /// by Jekyll et al. The format is not yaml, but rather a sequence line separated key: value\n\n /// pairs until a blank.\n\n /// So let's keep this simple and read every line like a key value pair until the first blank\n\n /// line. The semantic has to be hardcoded for category and tags.\n\n fn adapt_pelican_frontmatter<R: Read, W: Write>(src: R, dest: &mut W) -> Result<(), ApfError> {\n", "file_path": "src/lib.rs", "rank": 7, "score": 12.713907824916085 }, { "content": "\n\n dest.write(frontmatter.write().as_bytes())\n\n .map_err(|e| ApfError::FailedToWrite { arg: e.to_string() })?;\n\n\n\n loop {\n\n match lines.next() {\n\n Some(line) => {\n\n dest.write(b\"\\n\")\n\n .map_err(|e| ApfError::FailedToWrite { arg: e.to_string() })?;\n\n dest.write(line.as_bytes())\n\n .map_err(|e| ApfError::FailedToWrite { arg: e.to_string() })?;\n\n }\n\n None => break,\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n #[cfg(test)]\n", "file_path": "src/lib.rs", "rank": 8, "score": 12.417486661179852 }, { "content": " pub notes_directory: String,\n\n pub notes_template: String,\n\n }\n\n\n\n pub fn title_to_file_name(title: &str) -> String {\n\n let mut res = title\n\n .to_lowercase()\n\n .replace(\" \", \"-\")\n\n .replace(\"'\", \"-\")\n\n .replace(\",\", \"-\");\n\n res.push_str(\".md\");\n\n res\n\n }\n\n\n\n pub fn str_date_to_date(date: &str) -> Result<DateTime<Local>, NnError> {\n\n match date {\n\n \"now\" => Ok(Local::now()),\n\n _ => Local\n\n .datetime_from_str(date, \"%Y-%m-%d %H:%M\")\n\n .map_err(|e| NnError::FailedToParsePublicationDate { arg: e.to_string() }),\n", "file_path": "src/lib.rs", "rank": 9, "score": 12.338735807090305 }, { "content": " use std::fs::{self, File};\n\n use std::io::prelude::*;\n\n use std::process::Command;\n\n\n\n #[derive(Debug, Fail)]\n\n pub enum NnError {\n\n #[fail(display = \"Could to parse publication date because {}\", arg)]\n\n FailedToParsePublicationDate { arg: String },\n\n #[fail(display = \"Could to create new note because {}\", arg)]\n\n FailedToCreateNewNote { arg: String },\n\n #[fail(display = \"Could not exec editor because {}\", arg)]\n\n FailedToExecEditor { arg: String },\n\n #[fail(display = \"Could not render frontmatter template because {}\", arg)]\n\n FailedToRenderFrontmatterTemplate { arg: String },\n\n #[fail(display = \"Could not write note file because {}\", arg)]\n\n FailedToWriteNoteFile { arg: String },\n\n }\n\n\n\n #[derive(Config, Debug, Serialize, Deserialize)]\n\n pub struct NewNoteConfig {\n", "file_path": "src/lib.rs", "rank": 10, "score": 11.977148942972368 }, { "content": " .is_equal_to(100 * 1024 * 1024 * 1024 * 1024)\n\n }\n\n\n\n #[test]\n\n fn peta_bytes() {\n\n assert_that(&human_size_to_bytes(\"100P\"))\n\n .is_ok()\n\n .is_equal_to(100 * 1024 * 1024 * 1024 * 1024 * 1024)\n\n }\n\n\n\n #[test]\n\n fn unknown_scale() {\n\n let res = human_size_to_bytes(\"100L\");\n\n assert_that(&res).is_err();\n\n }\n\n }\n\n\n\n mod destination_path {\n\n use super::*;\n\n\n", "file_path": "src/lib.rs", "rank": 11, "score": 11.405741646928217 }, { "content": " #[test]\n\n fn destination_path_ok() {\n\n let destination_dir = PathBuf::from(\"/tmp\");\n\n let abs_file = PathBuf::from(\"/temp/a_file\");\n\n let expected = PathBuf::from(\"/tmp/a_file\");\n\n\n\n let res = destination_path(&destination_dir, &abs_file);\n\n\n\n assert_that(&res).is_ok().is_equal_to(expected);\n\n }\n\n }\n\n\n\n mod parse_extension {\n\n use super::*;\n\n\n\n #[test]\n\n fn empty() {\n\n let res = parse_extensions(\"\");\n\n assert_that(&res).is_err();\n\n }\n", "file_path": "src/lib.rs", "rank": 12, "score": 11.226054345778826 }, { "content": " let editor = env::var_os(\"EDITOR\").unwrap_or_else(|| \"vi\".to_string().into());\n\n\n\n let _ = Command::new(editor)\n\n .arg(file.as_os_str())\n\n .spawn()\n\n .map_err(|e| NnError::FailedToExecEditor { arg: e.to_string() })?;\n\n\n\n Ok(())\n\n }\n\n\n\n #[cfg(test)]\n\n mod test {\n\n pub use super::*;\n\n pub use spectral::prelude::*;\n\n\n\n #[test]\n\n fn title_to_file_name_okay() {\n\n let title = \"This is just a 'Punk, Rock' song\";\n\n let expected = \"this-is-just-a--punk--rock--song.md\".to_owned();\n\n\n", "file_path": "src/lib.rs", "rank": 13, "score": 11.048934653931317 }, { "content": " });\n\n };\n\n\n\n let res: Vec<_> = ext.trim_end_matches(',').split(',').collect();\n\n\n\n Ok(res)\n\n }\n\n\n\n #[cfg(test)]\n\n mod test {\n\n pub use super::*;\n\n pub use spectral::prelude::*;\n\n\n\n mod human_size_to_bytes {\n\n use super::*;\n\n\n\n #[test]\n\n fn empty() {\n\n let res = human_size_to_bytes(\"\");\n\n assert_that(&res).is_err();\n", "file_path": "src/lib.rs", "rank": 14, "score": 9.543316356841036 }, { "content": " #[derive(Debug, PartialEq)]\n\n pub struct FrontMatter {\n\n pub fields: HashMap<String, String>,\n\n }\n\n\n\n pub fn parse_front_matter<T: AsRef<str>>(src: &[T]) -> Result<FrontMatter, ApfError> {\n\n let mut fields = HashMap::new();\n\n\n\n for line in src {\n\n let line = line.as_ref();\n\n let splits: Vec<_> = line.splitn(2, \":\").collect();\n\n // We split once at max, so len==2 is _ to satisfy compiler for exhaustive matching.\n\n match splits.len() {\n\n 0 => {}\n\n 1 => {\n\n fields.insert(splits[0].to_owned(), \"\".to_owned());\n\n }\n\n _ => {\n\n fields.insert(splits[0].to_owned(), splits[1].trim().to_owned());\n\n }\n", "file_path": "src/lib.rs", "rank": 15, "score": 8.471268659478877 }, { "content": "\n\n let mut keys: Vec<_> = self.fields.keys().collect();\n\n keys.sort();\n\n for k in keys {\n\n let line = match *self.fields.get(k).unwrap() {\n\n // Safe unwrap\n\n FrontMatterType::Value(ref s) => format!(\"{}: \\\"{}\\\"\\n\", k, s),\n\n FrontMatterType::List(ref l) => {\n\n let list: String = l\n\n .iter()\n\n .map(|s| format!(\"- \\\"{}\\\"\", s))\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\");\n\n format!(\"{}:\\n{}\\n\", k, list)\n\n }\n\n };\n\n buf.push_str(&line);\n\n }\n\n\n\n buf.push_str(\"---\\n\");\n", "file_path": "src/lib.rs", "rank": 16, "score": 7.3618682090327 }, { "content": "# clams-bin\n\n\n\nShell CLIs build with help from clams\n\n\n\n[![Linux and macOS Build Status](https://travis-ci.org/lukaspustina/clams-bin.svg?branch=master)](https://travis-ci.org/lukaspustina/clams-bin) [![codecov](https://codecov.io/gh/lukaspustina/clams-bin/branch/master/graph/badge.svg)](https://codecov.io/gh/lukaspustina/clams-bin) [![GitHub release](https://img.shields.io/github/release/lukaspustina/clams-bin.svg)](https://github.com/lukaspustina/clams-bin/releases) [![MIT licensed](https://img.shields.io/badge/license-MIT-blue.svg?label=License)](./LICENSE)\n\n\n\n* `mv_files` -- Move files from a nested directory structure, selected by file suffix and file size into another, flat directory.\n\n* `new_note` -- Create new blog article or note from markdown template with frontmatter.\n\n* `pelican_frontmatter` -- Parses markdown frontmatter created by [pelican's importer](http://docs.getpelican.com/en/3.6.3/importer.html) into [Jekyll style frontmatter](https://jekyllrb.com/docs/frontmatter/) with a few adaption.\n\n\n", "file_path": "README.md", "rank": 17, "score": 7.021879361632354 }, { "content": "pub mod netatmo {\n\n use clams::config::prelude::*;\n\n use serde::{Deserialize, Serialize};\n\n\n\n #[derive(Config, Debug, Serialize, Deserialize)]\n\n pub struct NetatmoConfig {\n\n pub client_id: String,\n\n pub client_secret: String,\n\n pub username: String,\n\n pub password: String,\n\n }\n\n}\n\n\n\npub mod new_note {\n\n use chrono::prelude::*;\n\n use clams::config::prelude::*;\n\n use failure::Fail;\n\n use handlebars::Handlebars;\n\n use serde::{Deserialize, Serialize};\n\n use std::env;\n", "file_path": "src/lib.rs", "rank": 18, "score": 6.649530744594876 }, { "content": " let mut buf = String::new();\n\n let mut reader = BufReader::new(src);\n\n reader\n\n .read_to_string(&mut buf)\n\n .map_err(|e| ApfError::FailedToRead { arg: e.to_string() })?;\n\n let mut lines = buf.split('\\n');\n\n\n\n let mut frontmatter_buf = Vec::new();\n\n loop {\n\n match lines.next() {\n\n Some(line) if line.is_empty() => break,\n\n Some(line) => {\n\n frontmatter_buf.push(line);\n\n }\n\n None => break,\n\n }\n\n }\n\n\n\n let pelican_frontmatter = pelican::parse_front_matter(frontmatter_buf.as_slice())?;\n\n let frontmatter: FrontMatter = pelican_frontmatter.into();\n", "file_path": "src/lib.rs", "rank": 19, "score": 6.616975351492954 }, { "content": " }\n\n\n\n #[test]\n\n fn nan() {\n\n let res = human_size_to_bytes(\"a10\");\n\n assert_that(&res).is_err();\n\n }\n\n\n\n #[test]\n\n fn bytes() {\n\n assert_that(&human_size_to_bytes(\"100\"))\n\n .is_ok()\n\n .is_equal_to(100)\n\n }\n\n\n\n #[test]\n\n fn kilo_bytes() {\n\n assert_that(&human_size_to_bytes(\"100k\"))\n\n .is_ok()\n\n .is_equal_to(100 * 1024)\n", "file_path": "src/lib.rs", "rank": 20, "score": 5.930829744585138 }, { "content": "\n\n #[derive(Debug, PartialEq)]\n\n pub enum FrontMatterType {\n\n Value(String),\n\n List(Vec<String>),\n\n }\n\n\n\n #[derive(Debug, PartialEq)]\n\n pub struct FrontMatter {\n\n pub fields: HashMap<String, FrontMatterType>,\n\n }\n\n\n\n impl From<pelican::FrontMatter> for FrontMatter {\n\n fn from(pelican: pelican::FrontMatter) -> Self {\n\n let mut fields = HashMap::new();\n\n\n\n for (k, v) in pelican.fields {\n\n match k.to_lowercase().as_ref() {\n\n \"tags\" => fields.insert(\n\n \"tags\".to_owned(),\n", "file_path": "src/lib.rs", "rank": 21, "score": 5.686479710648264 }, { "content": " }\n\n }\n\n\n\n pub fn date_to_iso_day(dt: &DateTime<Local>) -> String {\n\n dt.format(\"%Y-%m-%d\").to_string()\n\n }\n\n\n\n pub fn date_to_iso_time(dt: &DateTime<Local>) -> String {\n\n dt.format(\"%Y-%m-%d %H:%M\").to_string()\n\n }\n\n\n\n #[derive(Debug, Serialize)]\n\n pub struct FrontMatter {\n\n pub title: String,\n\n pub date: String,\n\n }\n\n\n\n pub fn create_note(\n\n path: &Path,\n\n template: &str,\n", "file_path": "src/lib.rs", "rank": 22, "score": 5.22521620946889 }, { "content": " }\n\n }\n\n\n\n Ok(FrontMatter { fields })\n\n }\n\n\n\n #[cfg(test)]\n\n mod test {\n\n pub use super::*;\n\n pub use spectral::prelude::*;\n\n\n\n #[test]\n\n fn parse_front_matter_empty() {\n\n let front_matter = String::from(\"\");\n\n let expected = FrontMatter {\n\n fields: HashMap::new(),\n\n };\n\n\n\n let front_matter: Vec<_> = front_matter.lines().collect();\n\n let res = parse_front_matter(front_matter.as_slice());\n", "file_path": "src/lib.rs", "rank": 23, "score": 5.211350875125868 }, { "content": " }\n\n\n\n #[test]\n\n fn mega_bytes() {\n\n assert_that(&human_size_to_bytes(\"100M\"))\n\n .is_ok()\n\n .is_equal_to(100 * 1024 * 1024)\n\n }\n\n\n\n #[test]\n\n fn giga_bytes() {\n\n assert_that(&human_size_to_bytes(\"100G\"))\n\n .is_ok()\n\n .is_equal_to(100 * 1024 * 1024 * 1024)\n\n }\n\n\n\n #[test]\n\n fn tera_bytes() {\n\n assert_that(&human_size_to_bytes(\"100T\"))\n\n .is_ok()\n", "file_path": "src/lib.rs", "rank": 24, "score": 4.648361839177479 }, { "content": " let res = date_to_iso_day(&date);\n\n\n\n assert_that(&res).is_equal_to(expected);\n\n }\n\n\n\n #[test]\n\n fn date_to_iso_time_okay() {\n\n let date = Local.ymd(2001, 01, 01).and_hms(01, 01, 00);\n\n let expected = \"2001-01-01 01:01\".to_owned();\n\n\n\n let res = date_to_iso_time(&date);\n\n\n\n assert_that(&res).is_equal_to(expected);\n\n }\n\n }\n\n}\n\n\n\npub mod pelican_frontmatter {\n\n use failure::Fail;\n\n use std::collections::HashMap;\n", "file_path": "src/lib.rs", "rank": 25, "score": 4.367674107573849 }, { "content": "\n\n //assert_that(&res).is_ok().is_equal_to(expected);\n\n assert_that(&res.is_ok()).is_true();\n\n assert_that(&res.unwrap()).is_equal_to(expected);\n\n }\n\n\n\n #[test]\n\n fn parse_front_matter_ok() {\n\n let front_matter = String::from(\n\n r#\"Title: With Proper TDD, You Get That\n\nDate: 2012-07-27 12:00\n\nAuthor: lukas\n\nCategory: Allgemein, Test Driving\n\nTags: TDD, Testing\n\nSlug: with-proper-tdd-you-get-that\n\nStatus: published\"#,\n\n );\n\n let mut fields = HashMap::new();\n\n fields.insert(\n\n \"Title\".to_owned(),\n", "file_path": "src/lib.rs", "rank": 26, "score": 4.056783793185029 }, { "content": " let res = title_to_file_name(title);\n\n\n\n assert_that(&res).is_equal_to(expected);\n\n }\n\n\n\n #[test]\n\n fn str_date_to_date_okay() {\n\n let str_date = \"2001-01-01 01:01\";\n\n let expected = Local.ymd(2001, 01, 01).and_hms(01, 01, 00);\n\n\n\n let res = str_date_to_date(str_date);\n\n\n\n assert_that(&res).is_ok().is_equal_to(expected);\n\n }\n\n\n\n #[test]\n\n fn date_to_iso_day_okay() {\n\n let date = Local.ymd(2001, 01, 01).and_hms(01, 01, 00);\n\n let expected = \"2001-01-01\".to_owned();\n\n\n", "file_path": "src/lib.rs", "rank": 27, "score": 3.6805251216771238 }, { "content": " \"With Proper TDD, You Get That\".to_owned(),\n\n );\n\n fields.insert(\"Date\".to_owned(), \"2012-07-27 12:00\".to_owned());\n\n fields.insert(\"Author\".to_owned(), \"lukas\".to_owned());\n\n fields.insert(\"Category\".to_owned(), \"Allgemein, Test Driving\".to_owned());\n\n fields.insert(\"Tags\".to_owned(), \"TDD, Testing\".to_owned());\n\n fields.insert(\"Slug\".to_owned(), \"with-proper-tdd-you-get-that\".to_owned());\n\n fields.insert(\"Status\".to_owned(), \"published\".to_owned());\n\n\n\n let expected = FrontMatter { fields };\n\n\n\n let front_matter: Vec<_> = front_matter.lines().collect();\n\n let res = parse_front_matter(front_matter.as_slice());\n\n\n\n //assert_that(&res).is_ok().is_equal_to(expected);\n\n assert_that(&res.is_ok()).is_true();\n\n assert_that(&res.unwrap()).is_equal_to(expected);\n\n }\n\n }\n\n }\n", "file_path": "src/lib.rs", "rank": 28, "score": 3.1372322970616215 }, { "content": " FrontMatterType::List(v.split(',').map(|s| s.trim().to_owned()).collect()),\n\n ),\n\n \"category\" => fields.insert(\n\n \"categories\".to_owned(),\n\n FrontMatterType::List(v.split(',').map(|s| s.trim().to_owned()).collect()),\n\n ),\n\n \"slug\" => None, // Remove this frontmatter field\n\n key @ _ => fields.insert(key.to_owned(), FrontMatterType::Value(v.to_owned())),\n\n };\n\n }\n\n\n\n FrontMatter { fields }\n\n }\n\n }\n\n\n\n impl FrontMatter {\n\n pub fn write(&self) -> String {\n\n let mut buf = String::new();\n\n\n\n buf.push_str(\"---\\n\");\n", "file_path": "src/lib.rs", "rank": 29, "score": 3.121448137309147 }, { "content": "\n\n #[test]\n\n fn one_extension() {\n\n let res = parse_extensions(\"mkv\");\n\n assert_that(&res).is_ok().has_length(1);\n\n }\n\n\n\n #[test]\n\n fn two_extension() {\n\n let res = parse_extensions(\"mkv,avi\");\n\n assert_that(&res).is_ok().has_length(2);\n\n }\n\n\n\n #[test]\n\n fn two_extension_trailing_sep() {\n\n let res = parse_extensions(\"mkv,avi,\");\n\n assert_that(&res).is_ok().has_length(2);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 30, "score": 2.0667443977429816 }, { "content": " mod test {\n\n pub use super::*;\n\n pub use spectral::prelude::*;\n\n\n\n use std::io::BufWriter;\n\n\n\n mod adapt_pelican_frontmatter {\n\n use super::*;\n\n\n\n #[test]\n\n fn from_pelican_frontmatter() {\n\n let mut pelican_fields = HashMap::new();\n\n pelican_fields.insert(\n\n \"Title\".to_owned(),\n\n \"With Proper TDD, You Get That\".to_owned(),\n\n );\n\n pelican_fields.insert(\"Date\".to_owned(), \"2012-07-27 12:00\".to_owned());\n\n pelican_fields.insert(\"Author\".to_owned(), \"lukas\".to_owned());\n\n pelican_fields.insert(\"Category\".to_owned(), \"Allgemein, Test Driving\".to_owned());\n\n pelican_fields.insert(\"Tags\".to_owned(), \"TDD, Testing\".to_owned());\n", "file_path": "src/lib.rs", "rank": 31, "score": 2.0407190094880168 }, { "content": "\n\nDariusz Pasciak describes [how developing software without TDD is\n\nlike](http://blog.8thlight.com/dariusz-pasciak/2012/07/18/with-proper-tdd-you-get-that.html \"With Proper TDD, You Get That\")\n\nand concludes:\n\n\n\nEnd.\n\n\"#);\n\n let expected = String::from(\n\nr#\"---\n\nauthor: \"lukas\"\n\ncategories:\n\n- \"Allgemein\"\n\n- \"Test Driving\"\n\ndate: \"2012-07-27 12:00\"\n\nstatus: \"published\"\n\ntags:\n", "file_path": "src/lib.rs", "rank": 32, "score": 1.1852004932862037 }, { "content": " \"tags\".to_owned(),\n\n FrontMatterType::List(vec![\"TDD\".to_owned(), \"Testing\".to_owned()]),\n\n );\n\n fields.insert(\n\n \"slug\".to_owned(),\n\n FrontMatterType::Value(\"with-proper-tdd-you-get-that\".to_owned()),\n\n );\n\n fields.insert(\n\n \"status\".to_owned(),\n\n FrontMatterType::Value(\"published\".to_owned()),\n\n );\n\n let frontmatter = FrontMatter { fields };\n\n\n\n let expected = String::from(\n\n r#\"---\n\nauthor: \"lukas\"\n\ncategories:\n\n- \"Allgemein\"\n\n- \"Test Driving\"\n\ndate: \"2012-07-27 12:00\"\n\nslug: \"with-proper-tdd-you-get-that\"\n\nstatus: \"published\"\n\ntags:\n\n- \"TDD\"\n\n- \"Testing\"\n", "file_path": "src/lib.rs", "rank": 33, "score": 0.9313724927013958 } ]
Rust
benches/raid.rs
geky/gf256
57675335061b18e3614376981482fd7584454fd5
use criterion::criterion_group; use criterion::criterion_main; use criterion::Criterion; use criterion::BatchSize; use criterion::Throughput; use std::iter; use std::convert::TryFrom; #[allow(dead_code)] #[path = "../examples/raid.rs"] mod raid; fn bench_raid(c: &mut Criterion) { let mut group = c.benchmark_group("raid"); fn xorshift64(seed: u64) -> impl Iterator<Item=u64> { let mut x = seed; iter::repeat_with(move || { x ^= x << 13; x ^= x >> 7; x ^= x << 17; x }) } const SIZE: usize = 1024*1024; const COUNT: usize = 5; let mut xs = xorshift64(42).map(|x| x as u8); group.throughput(Throughput::Bytes((COUNT*SIZE) as u64)); group.bench_function("raid5_format", |b| b.iter_batched_ref( || {( iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT) .collect::<Vec<_>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>() )}, |(disks, p)| raid::raid5_format(disks, p), BatchSize::SmallInput )); let mut xs = xorshift64(42).map(|x| x as u8); let mut disks = iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT) .collect::<Vec<_>>(); let mut p = (&mut xs).take(SIZE).collect::<Vec<u8>>(); raid::raid5_format(&disks, &mut p); group.throughput(Throughput::Bytes(SIZE as u64)); group.bench_function("raid5_update", |b| b.iter_batched_ref( || {( usize::from((&mut xs).next().unwrap() % u8::try_from(COUNT).unwrap()), (&mut xs).take(SIZE).collect::<Vec<u8>>(), )}, |(i, data)| { raid::raid5_update(*i, &disks[*i], data, &mut p); disks[*i].copy_from_slice(data); }, BatchSize::SmallInput )); let mut xs = xorshift64(42).map(|x| x as u8); group.throughput(Throughput::Bytes((1*SIZE) as u64)); group.bench_function("raid5_repair", |b| b.iter_batched_ref( || {( usize::from((&mut xs).next().unwrap() % u8::try_from(COUNT+1).unwrap()), iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT) .collect::<Vec<_>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>() )}, |(i, disks, p)| raid::raid5_repair(disks, p, &[*i]), BatchSize::SmallInput )); let mut xs = xorshift64(42).map(|x| x as u8); group.throughput(Throughput::Bytes((COUNT*SIZE) as u64)); group.bench_function("raid6_format", |b| b.iter_batched_ref( || {( iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT) .collect::<Vec<_>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>() )}, |(disks, p, q)| raid::raid6_format(disks, p, q), BatchSize::SmallInput )); let mut xs = xorshift64(42).map(|x| x as u8); let mut disks = iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT+2) .collect::<Vec<_>>(); let mut p = (&mut xs).take(SIZE).collect::<Vec<u8>>(); let mut q = (&mut xs).take(SIZE).collect::<Vec<u8>>(); raid::raid6_format(&mut disks, &mut p, &mut q); group.throughput(Throughput::Bytes(SIZE as u64)); group.bench_function("raid6_update", |b| b.iter_batched_ref( || {( usize::from((&mut xs).next().unwrap() % u8::try_from(COUNT).unwrap()), (&mut xs).take(SIZE).collect::<Vec<u8>>(), )}, |(i, data)| { raid::raid6_update(*i, &disks[*i], data, &mut p, &mut q); disks[*i].copy_from_slice(data); }, BatchSize::SmallInput )); let mut xs = xorshift64(42).map(|x| x as u8); group.throughput(Throughput::Bytes((1*SIZE) as u64)); group.bench_function("raid6_repair_1", |b| b.iter_batched_ref( || {( usize::from((&mut xs).next().unwrap() % u8::try_from(COUNT+2).unwrap()), iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT+2) .collect::<Vec<_>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>() )}, |(i, disks, p, q)| raid::raid6_repair(disks, p, q, &[*i]), BatchSize::SmallInput )); let mut xs = xorshift64(42).map(|x| x as u8); group.throughput(Throughput::Bytes((2*SIZE) as u64)); group.bench_function("raid6_repair_2", |b| b.iter_batched_ref( || { let i = usize::from((&mut xs).next().unwrap() % u8::try_from(COUNT+2).unwrap()); ( i, (i+1) % (COUNT+2), iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT+2) .collect::<Vec<_>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>() ) }, |(i, j, disks, p, q)| raid::raid6_repair(disks, p, q, &[*i, *j]), BatchSize::SmallInput )); let mut xs = xorshift64(42).map(|x| x as u8); group.throughput(Throughput::Bytes((COUNT*SIZE) as u64)); group.bench_function("raid7_format", |b| b.iter_batched_ref( || {( iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT) .collect::<Vec<_>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>() )}, |(disks, p, q, r)| raid::raid7_format(disks, p, q, r), BatchSize::SmallInput )); let mut xs = xorshift64(42).map(|x| x as u8); let mut disks = iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT+2) .collect::<Vec<_>>(); let mut p = (&mut xs).take(SIZE).collect::<Vec<u8>>(); let mut q = (&mut xs).take(SIZE).collect::<Vec<u8>>(); let mut r = (&mut xs).take(SIZE).collect::<Vec<u8>>(); raid::raid6_format(&mut disks, &mut p, &mut q); group.throughput(Throughput::Bytes(SIZE as u64)); group.bench_function("raid7_update", |b| b.iter_batched_ref( || {( usize::from((&mut xs).next().unwrap() % u8::try_from(COUNT).unwrap()), (&mut xs).take(SIZE).collect::<Vec<u8>>(), )}, |(i, data)| { raid::raid7_update(*i, &disks[*i], data, &mut p, &mut q, &mut r); disks[*i].copy_from_slice(data); }, BatchSize::SmallInput )); let mut xs = xorshift64(42).map(|x| x as u8); group.throughput(Throughput::Bytes((1*SIZE) as u64)); group.bench_function("raid7_repair_1", |b| b.iter_batched_ref( || {( usize::from((&mut xs).next().unwrap() % u8::try_from(COUNT+3).unwrap()), iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT+2) .collect::<Vec<_>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>() )}, |(i, disks, p, q, r)| raid::raid7_repair(disks, p, q, r, &[*i]), BatchSize::SmallInput )); let mut xs = xorshift64(42).map(|x| x as u8); group.throughput(Throughput::Bytes((2*SIZE) as u64)); group.bench_function("raid7_repair_2", |b| b.iter_batched_ref( || { let i = usize::from((&mut xs).next().unwrap() % u8::try_from(COUNT+3).unwrap()); ( i, (i+1) % (COUNT+2), iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT+2) .collect::<Vec<_>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>() ) }, |(i, j, disks, p, q, r)| raid::raid7_repair(disks, p, q, r, &[*i, *j]), BatchSize::SmallInput )); let mut xs = xorshift64(42).map(|x| x as u8); group.throughput(Throughput::Bytes((2*SIZE) as u64)); group.bench_function("raid7_repair_3", |b| b.iter_batched_ref( || { let i = usize::from((&mut xs).next().unwrap() % u8::try_from(COUNT+3).unwrap()); ( i, (i+1) % (COUNT+3), (i+2) % (COUNT+3), iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT+2) .collect::<Vec<_>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>() ) }, |(i, j, k, disks, p, q, r)| raid::raid7_repair(disks, p, q, r, &[*i, *j, *k]), BatchSize::SmallInput )); } criterion_group!(benches, bench_raid); criterion_main!(benches);
use criterion::criterion_group; use criterion::criterion_main; use criterion::Criterion; use criterion::BatchSize; use criterion::Throughput; use std::iter; use std::convert::TryFrom; #[allow(dead_code)] #[path = "../examples/raid.rs"] mod raid; fn bench_raid(c: &mut Criterion) { let mut group = c.benchmark_group("raid");
const SIZE: usize = 1024*1024; const COUNT: usize = 5; let mut xs = xorshift64(42).map(|x| x as u8); group.throughput(Throughput::Bytes((COUNT*SIZE) as u64)); group.bench_function("raid5_format", |b| b.iter_batched_ref( || {( iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT) .collect::<Vec<_>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>() )}, |(disks, p)| raid::raid5_format(disks, p), BatchSize::SmallInput )); let mut xs = xorshift64(42).map(|x| x as u8); let mut disks = iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT) .collect::<Vec<_>>(); let mut p = (&mut xs).take(SIZE).collect::<Vec<u8>>(); raid::raid5_format(&disks, &mut p); group.throughput(Throughput::Bytes(SIZE as u64)); group.bench_function("raid5_update", |b| b.iter_batched_ref( || {( usize::from((&mut xs).next().unwrap() % u8::try_from(COUNT).unwrap()), (&mut xs).take(SIZE).collect::<Vec<u8>>(), )}, |(i, data)| { raid::raid5_update(*i, &disks[*i], data, &mut p); disks[*i].copy_from_slice(data); }, BatchSize::SmallInput )); let mut xs = xorshift64(42).map(|x| x as u8); group.throughput(Throughput::Bytes((1*SIZE) as u64)); group.bench_function("raid5_repair", |b| b.iter_batched_ref( || {( usize::from((&mut xs).next().unwrap() % u8::try_from(COUNT+1).unwrap()), iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT) .collect::<Vec<_>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>() )}, |(i, disks, p)| raid::raid5_repair(disks, p, &[*i]), BatchSize::SmallInput )); let mut xs = xorshift64(42).map(|x| x as u8); group.throughput(Throughput::Bytes((COUNT*SIZE) as u64)); group.bench_function("raid6_format", |b| b.iter_batched_ref( || {( iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT) .collect::<Vec<_>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>() )}, |(disks, p, q)| raid::raid6_format(disks, p, q), BatchSize::SmallInput )); let mut xs = xorshift64(42).map(|x| x as u8); let mut disks = iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT+2) .collect::<Vec<_>>(); let mut p = (&mut xs).take(SIZE).collect::<Vec<u8>>(); let mut q = (&mut xs).take(SIZE).collect::<Vec<u8>>(); raid::raid6_format(&mut disks, &mut p, &mut q); group.throughput(Throughput::Bytes(SIZE as u64)); group.bench_function("raid6_update", |b| b.iter_batched_ref( || {( usize::from((&mut xs).next().unwrap() % u8::try_from(COUNT).unwrap()), (&mut xs).take(SIZE).collect::<Vec<u8>>(), )}, |(i, data)| { raid::raid6_update(*i, &disks[*i], data, &mut p, &mut q); disks[*i].copy_from_slice(data); }, BatchSize::SmallInput )); let mut xs = xorshift64(42).map(|x| x as u8); group.throughput(Throughput::Bytes((1*SIZE) as u64)); group.bench_function("raid6_repair_1", |b| b.iter_batched_ref( || {( usize::from((&mut xs).next().unwrap() % u8::try_from(COUNT+2).unwrap()), iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT+2) .collect::<Vec<_>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>() )}, |(i, disks, p, q)| raid::raid6_repair(disks, p, q, &[*i]), BatchSize::SmallInput )); let mut xs = xorshift64(42).map(|x| x as u8); group.throughput(Throughput::Bytes((2*SIZE) as u64)); group.bench_function("raid6_repair_2", |b| b.iter_batched_ref( || { let i = usize::from((&mut xs).next().unwrap() % u8::try_from(COUNT+2).unwrap()); ( i, (i+1) % (COUNT+2), iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT+2) .collect::<Vec<_>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>() ) }, |(i, j, disks, p, q)| raid::raid6_repair(disks, p, q, &[*i, *j]), BatchSize::SmallInput )); let mut xs = xorshift64(42).map(|x| x as u8); group.throughput(Throughput::Bytes((COUNT*SIZE) as u64)); group.bench_function("raid7_format", |b| b.iter_batched_ref( || {( iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT) .collect::<Vec<_>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>() )}, |(disks, p, q, r)| raid::raid7_format(disks, p, q, r), BatchSize::SmallInput )); let mut xs = xorshift64(42).map(|x| x as u8); let mut disks = iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT+2) .collect::<Vec<_>>(); let mut p = (&mut xs).take(SIZE).collect::<Vec<u8>>(); let mut q = (&mut xs).take(SIZE).collect::<Vec<u8>>(); let mut r = (&mut xs).take(SIZE).collect::<Vec<u8>>(); raid::raid6_format(&mut disks, &mut p, &mut q); group.throughput(Throughput::Bytes(SIZE as u64)); group.bench_function("raid7_update", |b| b.iter_batched_ref( || {( usize::from((&mut xs).next().unwrap() % u8::try_from(COUNT).unwrap()), (&mut xs).take(SIZE).collect::<Vec<u8>>(), )}, |(i, data)| { raid::raid7_update(*i, &disks[*i], data, &mut p, &mut q, &mut r); disks[*i].copy_from_slice(data); }, BatchSize::SmallInput )); let mut xs = xorshift64(42).map(|x| x as u8); group.throughput(Throughput::Bytes((1*SIZE) as u64)); group.bench_function("raid7_repair_1", |b| b.iter_batched_ref( || {( usize::from((&mut xs).next().unwrap() % u8::try_from(COUNT+3).unwrap()), iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT+2) .collect::<Vec<_>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>() )}, |(i, disks, p, q, r)| raid::raid7_repair(disks, p, q, r, &[*i]), BatchSize::SmallInput )); let mut xs = xorshift64(42).map(|x| x as u8); group.throughput(Throughput::Bytes((2*SIZE) as u64)); group.bench_function("raid7_repair_2", |b| b.iter_batched_ref( || { let i = usize::from((&mut xs).next().unwrap() % u8::try_from(COUNT+3).unwrap()); ( i, (i+1) % (COUNT+2), iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT+2) .collect::<Vec<_>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>() ) }, |(i, j, disks, p, q, r)| raid::raid7_repair(disks, p, q, r, &[*i, *j]), BatchSize::SmallInput )); let mut xs = xorshift64(42).map(|x| x as u8); group.throughput(Throughput::Bytes((2*SIZE) as u64)); group.bench_function("raid7_repair_3", |b| b.iter_batched_ref( || { let i = usize::from((&mut xs).next().unwrap() % u8::try_from(COUNT+3).unwrap()); ( i, (i+1) % (COUNT+3), (i+2) % (COUNT+3), iter::repeat_with(|| { (&mut xs).take(SIZE).collect::<Vec<u8>>() }) .take(COUNT+2) .collect::<Vec<_>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>(), (&mut xs).take(SIZE).collect::<Vec<u8>>() ) }, |(i, j, k, disks, p, q, r)| raid::raid7_repair(disks, p, q, r, &[*i, *j, *k]), BatchSize::SmallInput )); } criterion_group!(benches, bench_raid); criterion_main!(benches);
fn xorshift64(seed: u64) -> impl Iterator<Item=u64> { let mut x = seed; iter::repeat_with(move || { x ^= x << 13; x ^= x >> 7; x ^= x << 17; x }) }
function_block-full_function
[ { "content": "fn bench_gfmul(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"gfmul\");\n\n\n\n // gf256 mul/div\n\n bench_mul!(group, \"gf256_naive_mul\", gf256_naive);\n\n bench_mul!(group, \"gf256_table_mul\", gf256_table);\n\n bench_mul!(group, \"gf256_rem_table_mul\", gf256_rem_table);\n\n bench_mul!(group, \"gf256_small_rem_table_mul\", gf256_small_rem_table);\n\n bench_mul!(group, \"gf256_barret_mul\", gf256_barret);\n\n\n\n bench_div!(group, \"gf256_naive_div\", gf256_naive);\n\n bench_div!(group, \"gf256_table_div\", gf256_table);\n\n bench_div!(group, \"gf256_rem_table_div\", gf256_rem_table);\n\n bench_div!(group, \"gf256_small_rem_table_div\", gf256_small_rem_table);\n\n bench_div!(group, \"gf256_barret_div\", gf256_barret);\n\n\n\n // gf16 mul/div\n\n bench_mul!(group, \"gf16_naive_mul\", |x: u8| gf16_naive::try_from(x&0xf).unwrap());\n\n bench_mul!(group, \"gf16_table_mul\", |x: u8| gf16_table::try_from(x&0xf).unwrap());\n\n bench_mul!(group, \"gf16_rem_table_mul\", |x: u8| gf16_rem_table::try_from(x&0xf).unwrap());\n", "file_path": "benches/gf.rs", "rank": 1, "score": 172500.24560829473 }, { "content": "fn bench_find_p(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"find_p\");\n\n\n\n // find 9-bit irreducible polynomials\n\n let mut irreducibles = iter::repeat_with(|| find_p::irreducibles(9)).flatten();\n\n group.bench_function(\"find_irreducibles_9\", |b| b.iter(\n\n || irreducibles.next().unwrap(),\n\n ));\n\n\n\n // find 8-bit generators\n\n let polynomial = irreducibles.next().unwrap();\n\n let mut generators = iter::repeat_with(|| find_p::generators(polynomial)).flatten();\n\n group.bench_function(\"find_generators_8\", |b| b.iter(\n\n || generators.next().unwrap(),\n\n ));\n\n\n\n\n\n // find 17-bit irreducible polynomials\n\n let mut irreducibles = iter::repeat_with(|| find_p::irreducibles(17)).flatten();\n\n group.bench_function(\"find_irreducibles_17\", |b| b.iter(\n", "file_path": "benches/find-p.rs", "rank": 2, "score": 172500.24560829473 }, { "content": "fn bench_xmul(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"xmul\");\n\n\n\n // xorshift64 for deterministic random numbers\n\n fn xorshift64(seed: u64) -> impl Iterator<Item=u64> {\n\n let mut x = seed;\n\n iter::repeat_with(move || {\n\n x ^= x << 13;\n\n x ^= x >> 7;\n\n x ^= x << 17;\n\n x\n\n })\n\n }\n\n\n\n // naive xmul\n\n let mut xs = xorshift64(42).map(p64);\n\n let mut ys = xorshift64(42*42).map(p64);\n\n group.bench_function(\"naive_xmul\", |b| b.iter_batched(\n\n || (xs.next().unwrap(), ys.next().unwrap()),\n\n |(x, y)| x.naive_wrapping_mul(y),\n", "file_path": "benches/xmul.rs", "rank": 3, "score": 172500.24560829473 }, { "content": "fn bench_crc(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"crc\");\n\n\n\n // xorshift64 for deterministic random numbers\n\n fn xorshift64(seed: u64) -> impl Iterator<Item=u64> {\n\n let mut x = seed;\n\n iter::repeat_with(move || {\n\n x ^= x << 13;\n\n x ^= x >> 7;\n\n x ^= x << 17;\n\n x\n\n })\n\n }\n\n\n\n // size to bench\n\n const SIZE: usize = 1024*1024;\n\n group.throughput(Throughput::Bytes(SIZE as u64));\n\n\n\n // naive crc\n\n let mut xs = xorshift64(42).map(|x| x as u8);\n", "file_path": "benches/crc.rs", "rank": 4, "score": 172500.24560829473 }, { "content": "fn bench_lfsr(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"lfsr\");\n\n\n\n // size to bench\n\n const SIZE: usize = 1024*1024;\n\n group.throughput(Throughput::Bytes(SIZE as u64));\n\n let mut buffer = vec![0u64; SIZE/size_of::<u64>()];\n\n\n\n // xorshift timing\n\n let mut xorshift64 = Xorshift64::new(0x123456789abcdef0);\n\n group.bench_function(\"xorshift64\", |b| b.iter(\n\n || buffer.fill_with(|| xorshift64.next())\n\n ));\n\n\n\n // lfsr64 timings\n\n let mut lfs64_naive = lfsr::Lfsr64Naive::new(0x123456789abcdef0);\n\n group.bench_function(\"lfsr64_naive\", |b| b.iter(\n\n || buffer.fill_with(|| lfs64_naive.next(64))\n\n ));\n\n\n", "file_path": "benches/lfsr.rs", "rank": 5, "score": 172500.24560829473 }, { "content": "fn bench_rs(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"rs\");\n\n\n\n // xorshift64 for deterministic random numbers\n\n fn xorshift64(seed: u64) -> impl Iterator<Item=u64> {\n\n let mut x = seed;\n\n iter::repeat_with(move || {\n\n x ^= x << 13;\n\n x ^= x >> 7;\n\n x ^= x << 17;\n\n x\n\n })\n\n }\n\n\n\n // size to bench\n\n const SIZE: usize = 1024*1024;\n\n group.throughput(Throughput::Bytes(SIZE as u64));\n\n\n\n // note we are using Reed-Solomon (20, 12) only because it's what is in our\n\n // example, this isn't necessarily the most efficient geometry, but we\n", "file_path": "benches/rs.rs", "rank": 6, "score": 172500.24560829473 }, { "content": "fn bench_shamir(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"shamir\");\n\n\n\n // xorshift64 for deterministic random numbers\n\n fn xorshift64(seed: u64) -> impl Iterator<Item=u64> {\n\n let mut x = seed;\n\n iter::repeat_with(move || {\n\n x ^= x << 13;\n\n x ^= x >> 7;\n\n x ^= x << 17;\n\n x\n\n })\n\n }\n\n\n\n // size to bench\n\n const SIZE: usize = 1024;\n\n const N: usize = 5;\n\n group.throughput(Throughput::Bytes((N*SIZE) as u64));\n\n\n\n // benchmark the time it takes to generate shares\n", "file_path": "benches/shamir.rs", "rank": 7, "score": 172500.24560829476 }, { "content": "fn bench_lfsr_compressability(c: &mut Criterion<Compressability>) {\n\n let mut group = c.benchmark_group(\"lfsr\");\n\n\n\n // size to bench\n\n const SIZE: usize = 1024*1024;\n\n let mut buffer = vec![0; SIZE];\n\n\n\n // xorshift compressability\n\n let mut xorshift64 = Xorshift64::new(0x123456789abcdef0);\n\n group.bench_function(\"xorshift64_compressability\", |b| b.iter_custom(\n\n |iters| {\n\n let mut sum = 0.0;\n\n for _ in 0..iters { \n\n buffer.fill_with(|| xorshift64.next() as u8);\n\n let mut comp = DeflateEncoder::new(Vec::new(), Compression::best());\n\n comp.write_all(&buffer).unwrap();\n\n let comp = comp.finish().unwrap();\n\n sum += ((SIZE as f64) - (comp.len() as f64)) / (SIZE as f64);\n\n }\n\n sum\n", "file_path": "benches/lfsr.rs", "rank": 8, "score": 158761.6455372852 }, { "content": "pub fn raid(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n let __crate = crate_path();\n\n\n\n // parse args\n\n let raw_args = parse_macro_input!(args as AttributeArgsWrapper).0;\n\n let args = match RaidArgs::from_list(&raw_args) {\n\n Ok(args) => args,\n\n Err(err) => {\n\n return err.write_errors().into();\n\n }\n\n };\n\n\n\n // only up to 2 parity blocks are currently supported\n\n assert!(args.parity <= 3);\n\n\n\n // parse type\n\n let ty = parse_macro_input!(input as syn::ItemMod);\n", "file_path": "gf256-macros/src/raid.rs", "rank": 9, "score": 132975.5764488994 }, { "content": "/// Add a block from a RAID7 array\n\n///\n\n/// Note the block index must already exit in the array, otherwise the\n\n/// array will become corrupted. This does not update other block indices.\n\n///\n\npub fn raid7_remove(j: usize, old: &[u8], p: &mut [u8], q: &mut [u8], r: &mut [u8]) {\n\n let len = p.len();\n\n let p = gf256::slice_from_slice_mut(p);\n\n let q = gf256::slice_from_slice_mut(q);\n\n let r = gf256::slice_from_slice_mut(r);\n\n\n\n let g = gf256::GENERATOR.pow(u8::try_from(j).unwrap());\n\n let h = g*g;\n\n for i in 0..len {\n\n // calculate new parity\n\n p[i] -= gf256(old[i]);\n\n q[i] -= gf256(old[i]) * g;\n\n r[i] -= gf256(old[i]) * h;\n\n }\n\n}\n\n\n", "file_path": "examples/raid.rs", "rank": 10, "score": 128999.49425615693 }, { "content": "/// Add a block to a RAID7 array\n\n///\n\n/// Note the block index must be unique in the array! This does not\n\n/// update other block indices.\n\n///\n\npub fn raid7_add(j: usize, new: &[u8], p: &mut [u8], q: &mut [u8], r: &mut [u8]) {\n\n let len = p.len();\n\n let p = gf256::slice_from_slice_mut(p);\n\n let q = gf256::slice_from_slice_mut(q);\n\n let r = gf256::slice_from_slice_mut(r);\n\n\n\n let g = gf256::GENERATOR.pow(u8::try_from(j).unwrap());\n\n let h = g*g;\n\n for i in 0..len {\n\n // calculate new parity\n\n p[i] += gf256(new[i]);\n\n q[i] += gf256(new[i]) * g;\n\n r[i] += gf256(new[i]) * h;\n\n }\n\n}\n\n\n", "file_path": "examples/raid.rs", "rank": 11, "score": 128999.49425615693 }, { "content": "/// Add a block to a RAID6 array\n\n///\n\n/// Note the block index must be unique in the array! This does not\n\n/// update other block indices.\n\n///\n\npub fn raid6_add(j: usize, new: &[u8], p: &mut [u8], q: &mut [u8]) {\n\n let len = p.len();\n\n let p = gf256::slice_from_slice_mut(p);\n\n let q = gf256::slice_from_slice_mut(q);\n\n\n\n let g = gf256::GENERATOR.pow(u8::try_from(j).unwrap());\n\n for i in 0..len {\n\n // calculate new parity\n\n p[i] += gf256(new[i]);\n\n q[i] += gf256(new[i]) * g;\n\n }\n\n}\n\n\n", "file_path": "examples/raid.rs", "rank": 12, "score": 126223.1949032655 }, { "content": "/// Add a block from a RAID6 array\n\n///\n\n/// Note the block index must already exit in the array, otherwise the\n\n/// array will become corrupted. This does not update other block indices.\n\n///\n\npub fn raid6_remove(j: usize, old: &[u8], p: &mut [u8], q: &mut [u8]) {\n\n let len = p.len();\n\n let p = gf256::slice_from_slice_mut(p);\n\n let q = gf256::slice_from_slice_mut(q);\n\n\n\n let g = gf256::GENERATOR.pow(u8::try_from(j).unwrap());\n\n for i in 0..len {\n\n // calculate new parity\n\n p[i] -= gf256(old[i]);\n\n q[i] -= gf256(old[i]) * g;\n\n }\n\n}\n\n\n", "file_path": "examples/raid.rs", "rank": 13, "score": 126223.1949032655 }, { "content": "/// Format blocks with RAID7, aka three blocks of parity\n\npub fn raid7_format<B: AsRef<[u8]>>(blocks: &[B], p: &mut [u8], q: &mut [u8], r: &mut [u8]) {\n\n let len = p.len();\n\n assert!(q.len() == len);\n\n assert!(r.len() == len);\n\n assert!(blocks.iter().all(|b| b.as_ref().len() == len));\n\n assert!(blocks.len() <= 255);\n\n let p = gf256::slice_from_slice_mut(p);\n\n let q = gf256::slice_from_slice_mut(q);\n\n let r = gf256::slice_from_slice_mut(r);\n\n\n\n for i in 0..len {\n\n p[i] = gf256(0);\n\n q[i] = gf256(0);\n\n r[i] = gf256(0);\n\n }\n\n\n\n for (j, b) in blocks.iter().enumerate() {\n\n let g = gf256::GENERATOR.pow(u8::try_from(j).unwrap());\n\n let h = g*g;\n\n for i in 0..len {\n\n p[i] += gf256(b.as_ref()[i]);\n\n q[i] += gf256(b.as_ref()[i]) * g;\n\n r[i] += gf256(b.as_ref()[i]) * h;\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/raid.rs", "rank": 14, "score": 123000.27916668174 }, { "content": "/// Update a block in a RAID7 array\n\n///\n\n/// This is functionally equivalent to remove(i)+add(i), but more efficient.\n\n///\n\npub fn raid7_update(j: usize, old: &[u8], new: &[u8], p: &mut [u8], q: &mut [u8], r: &mut [u8]) {\n\n let len = p.len();\n\n let p = gf256::slice_from_slice_mut(p);\n\n let q = gf256::slice_from_slice_mut(q);\n\n let r = gf256::slice_from_slice_mut(r);\n\n\n\n let g = gf256::GENERATOR.pow(u8::try_from(j).unwrap());\n\n let h = g*g;\n\n for i in 0..len {\n\n // calculate new parity\n\n p[i] += gf256(new[i]) - gf256(old[i]);\n\n q[i] += (gf256(new[i]) - gf256(old[i])) * g;\n\n r[i] += (gf256(new[i]) - gf256(old[i])) * h;\n\n }\n\n}\n\n\n\n\n", "file_path": "examples/raid.rs", "rank": 15, "score": 121920.90813566904 }, { "content": "#[cfg(feature=\"raid\")]\n\n#[proc_macro_attribute]\n\npub fn raid(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n raid::raid(args, input)\n\n}\n\n\n", "file_path": "gf256-macros/src/lib.rs", "rank": 16, "score": 121055.33304083631 }, { "content": "/// Format blocks with RAID6, aka two blocks of parity\n\npub fn raid6_format<B: AsRef<[u8]>>(blocks: &[B], p: &mut [u8], q: &mut [u8]) {\n\n let len = p.len();\n\n assert!(q.len() == len);\n\n assert!(blocks.iter().all(|b| b.as_ref().len() == len));\n\n assert!(blocks.len() <= 255);\n\n let p = gf256::slice_from_slice_mut(p);\n\n let q = gf256::slice_from_slice_mut(q);\n\n\n\n for i in 0..len {\n\n p[i] = gf256(0);\n\n q[i] = gf256(0);\n\n }\n\n\n\n for (j, b) in blocks.iter().enumerate() {\n\n let g = gf256::GENERATOR.pow(u8::try_from(j).unwrap());\n\n for i in 0..len {\n\n p[i] += gf256(b.as_ref()[i]);\n\n q[i] += gf256(b.as_ref()[i]) * g;\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/raid.rs", "rank": 17, "score": 119229.77605325924 }, { "content": "/// Repair up to `n` bad blocks.\n\n///\n\n/// Where `n` <= the number of parity blocks. This can include the parity\n\n/// blocks themselves. `bad_blocks` must be an array of indices indicating\n\n/// which blocks are bad.\n\n///\n\n/// ``` rust\n\n/// # use ::gf256::raid::*;\n\n/// let mut data = b\"Hellxxxxxxxx\".to_vec();\n\n/// let mut datas = data.chunks_mut(4).collect::<Vec<_>>();\n\n/// let mut parity1 = b\"xxxx\".to_vec();\n\n/// let mut parity2 = b\"\\x43\\x88\\x4f\\x36\".to_vec();\n\n/// let mut parity3 = b\"\\x9a\\x6b\\x23\\xe7\".to_vec();\n\n///\n\n/// // repair\n\n/// raid7::repair(&mut datas, &mut parity1, &mut parity2, &mut parity3, &[1, 2, 3]);\n\n/// assert_eq!(&data, b\"Hello World!\");\n\n/// ```\n\n///\n\npub fn repair<B: AsMut<[__u]>>(\n\n blocks: &mut [B],\n\n #[cfg(__if(__parity >= 1))] p: &mut [__u],\n\n #[cfg(__if(__parity >= 2))] q: &mut [__u],\n\n #[cfg(__if(__parity >= 3))] r: &mut [__u],\n\n bad_blocks: &[usize]\n\n) -> Result<(), Error> {\n\n let len = blocks[0].as_mut().len();\n\n #[cfg(__if(__parity >= 1))] let p = unsafe { __gf::slice_from_slice_mut_unchecked(p) };\n\n #[cfg(__if(__parity >= 2))] let q = unsafe { __gf::slice_from_slice_mut_unchecked(q) };\n\n #[cfg(__if(__parity >= 3))] let r = unsafe { __gf::slice_from_slice_mut_unchecked(r) };\n\n\n\n if bad_blocks.len() > __parity {\n\n // can't repair\n\n return Err(Error::TooManyBadBlocks);\n\n }\n\n\n\n // sort the data blocks without alloc, this is only so we can split\n\n // the mut blocks array safely\n\n let mut bad_blocks_array = [\n", "file_path": "templates/raid.rs", "rank": 18, "score": 118057.5358244492 }, { "content": "/// Update a block in a RAID6 array\n\n///\n\n/// This is functionally equivalent to remove(i)+add(i), but more efficient.\n\n///\n\npub fn raid6_update(j: usize, old: &[u8], new: &[u8], p: &mut [u8], q: &mut [u8]) {\n\n let len = p.len();\n\n let p = gf256::slice_from_slice_mut(p);\n\n let q = gf256::slice_from_slice_mut(q);\n\n\n\n let g = gf256::GENERATOR.pow(u8::try_from(j).unwrap());\n\n for i in 0..len {\n\n // calculate new parity\n\n p[i] += gf256(new[i]) - gf256(old[i]);\n\n q[i] += (gf256(new[i]) - gf256(old[i])) * g;\n\n }\n\n}\n\n\n\n\n\n//// RAID7 ////\n\n\n", "file_path": "examples/raid.rs", "rank": 19, "score": 117919.51092421469 }, { "content": "/// Add a block to a RAID5 array\n\n///\n\n/// Note the block index must be unique in the array! This does not\n\n/// update other block indices.\n\n///\n\npub fn raid5_add(_j: usize, new: &[u8], p: &mut [u8]) {\n\n let len = p.len();\n\n\n\n for i in 0..len {\n\n // calculate new parity\n\n p[i] ^= new[i];\n\n }\n\n}\n\n\n", "file_path": "examples/raid.rs", "rank": 20, "score": 116722.3385264322 }, { "content": "/// Add a block from a RAID5 array\n\n///\n\n/// Note the block index must already exit in the array, otherwise the\n\n/// array will become corrupted. This does not update other block indices.\n\n///\n\npub fn raid5_remove(_j: usize, old: &[u8], p: &mut [u8]) {\n\n let len = p.len();\n\n\n\n for i in 0..len {\n\n // calculate new parity\n\n p[i] ^= old[i];\n\n }\n\n}\n\n\n", "file_path": "examples/raid.rs", "rank": 21, "score": 116722.3385264322 }, { "content": "/// Encode a message using Reed-Solomon error-correction.\n\n///\n\n/// This writes [`ECC_SIZE`] bytes of error-correction information to the end\n\n/// of the provided slice, based on the data provided in the first\n\n/// `message.len()-ECC_SIZE` bytes. The entire codeword is limited to at most\n\n/// [`BLOCK_SIZE`] bytes, but can be smaller.\n\n///\n\n/// ``` rust\n\n/// # use gf256::rs::rs255w223;\n\n/// let mut codeword = b\"Hello World!\".to_vec();\n\n/// codeword.resize(codeword.len()+32, 0u8);\n\n/// rs255w223::encode(&mut codeword);\n\n/// assert_eq!(&codeword, b\"Hello World!\\\n\n/// \\x85\\xa6\\xad\\xf8\\xbd\\x15\\x94\\x6e\\x5f\\xb6\\x07\\x12\\x4b\\xbd\\x11\\xd3\\\n\n/// \\x34\\x14\\xa7\\x06\\xd6\\x25\\xfd\\x84\\xc2\\x61\\x81\\xa7\\x8a\\x15\\xc9\\x35\");\n\n/// ```\n\n///\n\npub fn encode(message: &mut [__u]) {\n\n assert!(message.len() <= BLOCK_SIZE);\n\n assert!(message.len() >= ECC_SIZE);\n\n let data_len = message.len() - ECC_SIZE;\n\n\n\n // create copy for polynomial division\n\n //\n\n // note if message is < DATA_SIZE we just treat it as a smaller polynomial,\n\n // this is equivalent to prepending zeros\n\n //\n\n let mut divrem = message.to_vec();\n\n divrem[data_len..].fill(0);\n\n\n\n // divide by our generator polynomial\n\n poly_divrem(\n\n unsafe { __gf::slice_from_slice_mut_unchecked(&mut divrem) },\n\n &GENERATOR_POLY\n\n );\n\n\n\n // return message + remainder, this new message is a polynomial\n\n // perfectly divisable by our generator polynomial\n\n message[data_len..].copy_from_slice(&divrem[data_len..]);\n\n}\n\n\n", "file_path": "templates/rs.rs", "rank": 22, "score": 110054.9006988256 }, { "content": "/// Repair up to two blocks of failure\n\npub fn raid6_repair<B: AsMut<[u8]>>(\n\n blocks: &mut [B],\n\n p: &mut [u8],\n\n q: &mut [u8],\n\n bad_blocks: &[usize]\n\n) -> Result<(), RaidError> {\n\n let len = p.len();\n\n let p = gf256::slice_from_slice_mut(p);\n\n let q = gf256::slice_from_slice_mut(q);\n\n\n\n if bad_blocks.len() > 2 {\n\n // can't repair\n\n return Err(RaidError::TooManyBadBlocks);\n\n }\n\n\n\n // sort the data blocks without alloc, this is only so we can split\n\n // the mut blocks array safely\n\n let mut bad_blocks_array = [\n\n bad_blocks.get(0).copied().unwrap_or(0),\n\n bad_blocks.get(1).copied().unwrap_or(0),\n", "file_path": "examples/raid.rs", "rank": 23, "score": 109112.07436754914 }, { "content": "/// Repair up to three blocks of failure\n\npub fn raid7_repair<B: AsMut<[u8]>>(\n\n blocks: &mut [B],\n\n p: &mut [u8],\n\n q: &mut [u8],\n\n r: &mut [u8],\n\n bad_blocks: &[usize]\n\n) -> Result<(), RaidError> {\n\n let len = p.len();\n\n let p = gf256::slice_from_slice_mut(p);\n\n let q = gf256::slice_from_slice_mut(q);\n\n let r = gf256::slice_from_slice_mut(r);\n\n\n\n if bad_blocks.len() > 3 {\n\n // can't repair\n\n return Err(RaidError::TooManyBadBlocks);\n\n }\n\n\n\n // sort the data blocks without alloc, this is only so we can split\n\n // the mut blocks array safely\n\n let mut bad_blocks_array = [\n", "file_path": "examples/raid.rs", "rank": 24, "score": 109112.07436754912 }, { "content": "/// Repair up to one block of failure\n\npub fn raid5_repair<B: AsMut<[u8]>>(\n\n blocks: &mut [B],\n\n p: &mut [u8],\n\n bad_blocks: &[usize]\n\n) -> Result<(), RaidError> {\n\n let len = p.len();\n\n\n\n if bad_blocks.len() > 1 {\n\n // can't repair\n\n return Err(RaidError::TooManyBadBlocks);\n\n }\n\n\n\n if bad_blocks[0] < blocks.len() {\n\n // repair using p\n\n let (before, after) = blocks.split_at_mut(bad_blocks[0]);\n\n let (d, after) = after.split_first_mut().unwrap();\n\n let d = d.as_mut();\n\n\n\n for i in 0..len {\n\n d[i] = p[i];\n", "file_path": "examples/raid.rs", "rank": 25, "score": 109112.07436754912 }, { "content": "/// Format blocks with RAID5, aka single block of parity\n\npub fn raid5_format<B: AsRef<[u8]>>(blocks: &[B], p: &mut [u8]) {\n\n let len = p.len();\n\n assert!(blocks.iter().all(|b| b.as_ref().len() == len));\n\n\n\n for i in 0..len {\n\n p[i] = 0;\n\n }\n\n\n\n for b in blocks {\n\n for i in 0..len {\n\n // this could be gf256(a) + gf256(b), but that's just xor anyways\n\n p[i] ^= b.as_ref()[i];\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/raid.rs", "rank": 26, "score": 108718.66613497518 }, { "content": "/// Update a block in a RAID5 array\n\n///\n\n/// This is functionally equivalent to remove(i)+add(i), but more efficient.\n\n///\n\npub fn raid5_update(_j: usize, old: &[u8], new: &[u8], p: &mut [u8]) {\n\n let len = p.len();\n\n\n\n for i in 0..len {\n\n // calculate new parity\n\n p[i] ^= old[i] ^ new[i];\n\n }\n\n}\n\n\n\n\n\n//// RAID6 ////\n\n\n", "file_path": "examples/raid.rs", "rank": 27, "score": 107153.19323127915 }, { "content": "/// Multiply a polynomial by a scalar\n\nfn poly_scale(f: &mut [__gf], c: __gf) {\n\n for i in 0..f.len() {\n\n f[i] *= c;\n\n }\n\n}\n\n\n", "file_path": "templates/rs.rs", "rank": 28, "score": 104228.00441258981 }, { "content": "/// Multiply two polynomials together\n\nfn poly_mul(f: &mut [__gf], g: &[__gf]) {\n\n debug_assert!(f[..g.len()-1].iter().all(|x| *x == __gf::new(0)));\n\n\n\n // This is in-place, at the cost of being a bit confusing,\n\n // note that we only write to i+j, and i+j is always >= i\n\n //\n\n // What makes this confusing is that f and g are both big-endian\n\n // polynomials, reverse order from what you would expect. And in\n\n // order to leverage the i+j non-overlap, we need to write to \n\n // f in reverse-reverse order.\n\n //\n\n for i in (0..f.len()-g.len()+1).rev() {\n\n let fi = f[f.len()-1-i];\n\n f[f.len()-1-i] = __gf::new(0);\n\n\n\n for j in 0..g.len() {\n\n f[f.len()-1-(i+j)] += fi * g[g.len()-1-j];\n\n }\n\n }\n\n}\n\n\n", "file_path": "templates/rs.rs", "rank": 29, "score": 104228.00441258981 }, { "content": "/// Divide polynomials via synthetic division\n\n///\n\n/// Note both the quotient and remainder are left in the dividend\n\n///\n\nfn poly_divrem(f: &mut [__gf], g: &[__gf]) {\n\n debug_assert!(f.len() >= g.len());\n\n\n\n // find leading coeff to normalize g, note you could avoid\n\n // this if g is already normalized\n\n let leading_coeff = g[0];\n\n\n\n for i in 0 .. (f.len() - g.len() + 1) {\n\n if f[i] != __gf::new(0) {\n\n f[i] /= leading_coeff;\n\n\n\n for j in 1..g.len() {\n\n f[i+j] -= f[i] * g[j];\n\n }\n\n }\n\n }\n\n}\n\n\n\n// Encode using Reed-Solomon error correction\n\n//\n", "file_path": "templates/rs.rs", "rank": 30, "score": 104228.00441258981 }, { "content": "/// Add two polynomials together\n\nfn poly_add(f: &mut [__gf], g: &[__gf]) {\n\n debug_assert!(f.len() >= g.len());\n\n\n\n // note g.len() may be <= f.len()!\n\n for i in 0..f.len() {\n\n f[f.len()-1-i] += g[g.len()-1-i];\n\n }\n\n}\n\n\n", "file_path": "templates/rs.rs", "rank": 31, "score": 104228.00441258981 }, { "content": "/// Encode using Reed-Solomon error correction\n\n///\n\n/// Much like in CRC, we want to make the message a multiple of G(x),\n\n/// our generator polynomial. We can do this by appending the remainder\n\n/// of our message after division by G(x).\n\n///\n\n/// ``` text\n\n/// c(x) = m(x) - (m(x) % G(x))\n\n/// ```\n\n///\n\n/// Note we expect the message to only take up the first message.len()-ECC_SIZE\n\n/// bytes, but this can be smaller than BLOCK_SIZE\n\n///\n\npub fn rs_encode(message: &mut [u8]) {\n\n assert!(message.len() <= BLOCK_SIZE);\n\n assert!(message.len() >= ECC_SIZE);\n\n let data_len = message.len() - ECC_SIZE;\n\n\n\n // create copy for polynomial division\n\n //\n\n // note if message is < DATA_SIZE we just treat it as a smaller polynomial,\n\n // this is equivalent to prepending zeros\n\n //\n\n let mut divrem = message.to_vec();\n\n divrem[data_len..].fill(0);\n\n\n\n // divide by our generator polynomial\n\n rs_poly_divrem(\n\n gf256::slice_from_slice_mut(&mut divrem),\n\n &GENERATOR_POLY\n\n );\n\n\n\n // return message + remainder, this new message is a polynomial\n\n // perfectly divisable by our generator polynomial\n\n message[data_len..].copy_from_slice(&divrem[data_len..]);\n\n}\n\n\n", "file_path": "examples/rs.rs", "rank": 32, "score": 104065.55660343418 }, { "content": "/// Add two polynomials together\n\nfn rs_poly_add(f: &mut [gf256], g: &[gf256]) {\n\n debug_assert!(f.len() >= g.len());\n\n\n\n // note g.len() may be <= f.len()!\n\n for i in 0..f.len() {\n\n f[f.len()-1-i] += g[g.len()-1-i];\n\n }\n\n}\n\n\n", "file_path": "examples/rs.rs", "rank": 33, "score": 101379.49615738707 }, { "content": "/// Multiply two polynomials together\n\nfn rs_poly_mul(f: &mut [gf256], g: &[gf256]) {\n\n debug_assert!(f[..g.len()-1].iter().all(|x| *x == gf256(0)));\n\n\n\n // This is in-place, at the cost of being a bit confusing,\n\n // note that we only write to i+j, and i+j is always >= i\n\n //\n\n // What makes this confusing is that f and g are both big-endian\n\n // polynomials, reverse order from what you would expect. And in\n\n // order to leverage the i+j non-overlap, we need to write to \n\n // f in reverse-reverse order.\n\n //\n\n for i in (0..f.len()-g.len()+1).rev() {\n\n let fi = f[f.len()-1-i];\n\n f[f.len()-1-i] = gf256(0);\n\n\n\n for j in 0..g.len() {\n\n f[f.len()-1-(i+j)] += fi * g[g.len()-1-j];\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/rs.rs", "rank": 34, "score": 101379.49615738707 }, { "content": "/// Divide polynomials via synthetic division\n\n///\n\n/// Note both the quotient and remainder are left in the dividend\n\n///\n\nfn rs_poly_divrem(f: &mut [gf256], g: &[gf256]) {\n\n debug_assert!(f.len() >= g.len());\n\n\n\n // find leading coeff to normalize g, note you could avoid\n\n // this if g is already normalized\n\n let leading_coeff = g[0];\n\n\n\n for i in 0 .. (f.len() - g.len() + 1) {\n\n if f[i] != gf256(0) {\n\n f[i] /= leading_coeff;\n\n\n\n for j in 1..g.len() {\n\n f[i+j] -= f[i] * g[j];\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/rs.rs", "rank": 35, "score": 101379.49615738707 }, { "content": "/// Multiply a polynomial by a scalar\n\nfn rs_poly_scale(f: &mut [gf256], c: gf256) {\n\n for i in 0..f.len() {\n\n f[i] *= c;\n\n }\n\n}\n\n\n", "file_path": "examples/rs.rs", "rank": 36, "score": 101379.49615738707 }, { "content": "/// Update a block in a RAID array.\n\n///\n\n/// ``` rust\n\n/// # use ::gf256::raid::*;\n\n/// let mut data = b\"Hello World!\".to_vec();\n\n/// let mut datas = data.chunks_mut(4).collect::<Vec<_>>();\n\n/// let mut parity1 = b\"\\x55\\x29\\x5f\\x22\".to_vec();\n\n/// let mut parity2 = b\"\\x43\\x88\\x4f\\x36\".to_vec();\n\n/// let mut parity3 = b\"\\x9a\\x6b\\x23\\xe7\".to_vec();\n\n///\n\n/// // update\n\n/// let new_data = b\"Jell\";\n\n/// raid7::update(0, datas[0], new_data, &mut parity1, &mut parity2, &mut parity3);\n\n/// datas[0].copy_from_slice(new_data);\n\n///\n\n/// assert_eq!(&datas[0], b\"Jell\");\n\n/// assert_eq!(&datas[1], b\"o Wo\");\n\n/// assert_eq!(&datas[2], b\"rld!\");\n\n/// assert_eq!(&parity1, b\"\\x57\\x29\\x5f\\x22\");\n\n/// assert_eq!(&parity2, b\"\\x41\\x88\\x4f\\x36\");\n\n/// assert_eq!(&parity3, b\"\\x98\\x6b\\x23\\xe7\");\n\n/// ```\n\n///\n\npub fn update(\n\n j: usize,\n\n old: &[__u],\n\n new: &[__u],\n\n #[cfg(__if(__parity >= 1))] p: &mut [__u],\n\n #[cfg(__if(__parity >= 2))] q: &mut [__u],\n\n #[cfg(__if(__parity >= 3))] r: &mut [__u],\n\n) {\n\n let len = old.len();\n\n assert!(new.len() == old.len());\n\n #[cfg(__if(__parity >= 1))] let p = unsafe { __gf::slice_from_slice_mut_unchecked(p) };\n\n #[cfg(__if(__parity >= 2))] let q = unsafe { __gf::slice_from_slice_mut_unchecked(q) };\n\n #[cfg(__if(__parity >= 3))] let r = unsafe { __gf::slice_from_slice_mut_unchecked(r) };\n\n\n\n #[cfg(__if(__parity >= 2))] let g = __gf::GENERATOR.pow(__u::try_from(j).unwrap());\n\n #[cfg(__if(__parity >= 3))] let h = g*g;\n\n for i in 0..len {\n\n // calculate new parity\n\n #[cfg(__if(__parity >= 1))] { p[i] += (__gf::from_lossy(new[i])-__gf::from_lossy(old[i])); }\n\n #[cfg(__if(__parity >= 2))] { q[i] += (__gf::from_lossy(new[i])-__gf::from_lossy(old[i])) * g; }\n\n #[cfg(__if(__parity >= 3))] { r[i] += (__gf::from_lossy(new[i])-__gf::from_lossy(old[i])) * h; }\n\n }\n\n}\n\n\n", "file_path": "templates/raid.rs", "rank": 37, "score": 97404.3318565347 }, { "content": "/// Remove a block from a RAID array.\n\n///\n\n/// Note the block index must already exist in the array, otherwise the\n\n/// array will become corrupted. This does not update other block indices.\n\n///\n\n/// ``` rust\n\n/// # use ::gf256::raid::*;\n\n/// let mut data = b\"Hello World!\".to_vec();\n\n/// let mut datas = data.chunks_mut(4).collect::<Vec<_>>();\n\n/// let mut parity1 = b\"\\x55\\x29\\x5f\\x22\".to_vec();\n\n/// let mut parity2 = b\"\\x43\\x88\\x4f\\x36\".to_vec();\n\n/// let mut parity3 = b\"\\x9a\\x6b\\x23\\xe7\".to_vec();\n\n///\n\n/// // remove \n\n/// raid7::remove(0, datas[0], &mut parity1, &mut parity2, &mut parity3);\n\n///\n\n/// assert_eq!(&datas[1], b\"o Wo\");\n\n/// assert_eq!(&datas[2], b\"rld!\");\n\n/// assert_eq!(&parity1, b\"\\x1d\\x4c\\x33\\x4e\");\n\n/// assert_eq!(&parity2, b\"\\x0b\\xed\\x23\\x5a\");\n\n/// assert_eq!(&parity3, b\"\\xd2\\x0e\\x4f\\x8b\");\n\n/// ```\n\n///\n\npub fn remove(\n\n j: usize,\n\n old: &[__u],\n\n #[cfg(__if(__parity >= 1))] p: &mut [__u],\n\n #[cfg(__if(__parity >= 2))] q: &mut [__u],\n\n #[cfg(__if(__parity >= 3))] r: &mut [__u],\n\n) {\n\n let len = old.len();\n\n #[cfg(__if(__parity >= 1))] let p = unsafe { __gf::slice_from_slice_mut_unchecked(p) };\n\n #[cfg(__if(__parity >= 2))] let q = unsafe { __gf::slice_from_slice_mut_unchecked(q) };\n\n #[cfg(__if(__parity >= 3))] let r = unsafe { __gf::slice_from_slice_mut_unchecked(r) };\n\n\n\n #[cfg(__if(__parity >= 2))] let g = __gf::GENERATOR.pow(__u::try_from(j).unwrap());\n\n #[cfg(__if(__parity >= 3))] let h = g*g;\n\n for i in 0..len {\n\n // calculate new parity\n\n #[cfg(__if(__parity >= 1))] { p[i] -= __gf::from_lossy(old[i]); }\n\n #[cfg(__if(__parity >= 2))] { q[i] -= __gf::from_lossy(old[i]) * g; }\n\n #[cfg(__if(__parity >= 3))] { r[i] -= __gf::from_lossy(old[i]) * h; }\n\n }\n\n}\n\n\n", "file_path": "templates/raid.rs", "rank": 38, "score": 97404.0394705015 }, { "content": "/// Add a block to a RAID array.\n\n///\n\n/// Note the block index must be unique in the array, otherwise the array will\n\n/// become corrupted. This does not update other block indices.\n\n///\n\n/// ``` rust\n\n/// # use ::gf256::raid::*;\n\n/// let mut data = b\"xxxxo World!\".to_vec();\n\n/// let mut datas = data.chunks_mut(4).collect::<Vec<_>>();\n\n/// let mut parity1 = b\"\\x1d\\x4c\\x33\\x4e\".to_vec();\n\n/// let mut parity2 = b\"\\x0b\\xed\\x23\\x5a\".to_vec();\n\n/// let mut parity3 = b\"\\xd2\\x0e\\x4f\\x8b\".to_vec();\n\n///\n\n/// // add\n\n/// let new_data = b\"Jell\";\n\n/// raid7::add(0, new_data, &mut parity1, &mut parity2, &mut parity3);\n\n/// datas[0].copy_from_slice(new_data);\n\n///\n\n/// assert_eq!(&datas[0], b\"Jell\");\n\n/// assert_eq!(&datas[1], b\"o Wo\");\n\n/// assert_eq!(&datas[2], b\"rld!\");\n\n/// assert_eq!(&parity1, b\"\\x57\\x29\\x5f\\x22\");\n\n/// assert_eq!(&parity2, b\"\\x41\\x88\\x4f\\x36\");\n\n/// assert_eq!(&parity3, b\"\\x98\\x6b\\x23\\xe7\");\n\n/// ```\n\n///\n\npub fn add(\n\n j: usize,\n\n new: &[__u],\n\n #[cfg(__if(__parity >= 1))] p: &mut [__u],\n\n #[cfg(__if(__parity >= 2))] q: &mut [__u],\n\n #[cfg(__if(__parity >= 3))] r: &mut [__u],\n\n) {\n\n let len = new.len();\n\n #[cfg(__if(__parity >= 1))] let p = unsafe { __gf::slice_from_slice_mut_unchecked(p) };\n\n #[cfg(__if(__parity >= 2))] let q = unsafe { __gf::slice_from_slice_mut_unchecked(q) };\n\n #[cfg(__if(__parity >= 3))] let r = unsafe { __gf::slice_from_slice_mut_unchecked(r) };\n\n\n\n #[cfg(__if(__parity >= 2))] let g = __gf::GENERATOR.pow(__u::try_from(j).unwrap());\n\n #[cfg(__if(__parity >= 3))] let h = g*g;\n\n for i in 0..len {\n\n // calculate new parity\n\n #[cfg(__if(__parity >= 1))] { p[i] += __gf::from_lossy(new[i]); }\n\n #[cfg(__if(__parity >= 2))] { q[i] += __gf::from_lossy(new[i]) * g; }\n\n #[cfg(__if(__parity >= 3))] { r[i] += __gf::from_lossy(new[i]) * h; }\n\n }\n\n}\n\n\n", "file_path": "templates/raid.rs", "rank": 39, "score": 97403.69153961647 }, { "content": "pub fn main() {\n\n fn hex(xs: &[u8]) -> String {\n\n xs.iter()\n\n .map(|x| format!(\"{:02x}\", x))\n\n .collect()\n\n }\n\n\n\n fn ascii(xs: &[u8]) -> String {\n\n xs.iter()\n\n .map(|x| {\n\n if *x < b' ' || *x > b'~' {\n\n '.'\n\n } else {\n\n char::from(*x)\n\n }\n\n })\n\n .collect::<String>()\n\n }\n\n\n\n\n", "file_path": "examples/raid.rs", "rank": 40, "score": 97389.70156732603 }, { "content": "/// Correct up to [`ECC_SIZE/2`](ECC_SIZE) errors at unknown locations.\n\n///\n\n/// Returns the number of errors, or [`Error::TooManyErrors`] if the codeword\n\n/// can not be corrected.\n\n///\n\n/// ``` rust\n\n/// # use gf256::rs::rs255w223;\n\n/// let mut codeword = b\"xexlx xoxlx!\\\n\n/// x\\xa6x\\xf8x\\x15x\\x6ex\\xb6x\\x12x\\xbdx\\xd3\\\n\n/// x\\x14x\\x06\\xd6\\x25\\xfd\\x84\\xc2\\x61\\x81\\xa7\\x8a\\x15\\xc9\\x35\".to_vec();\n\n///\n\n/// assert_eq!(rs255w223::correct_errors(&mut codeword), Ok(16));\n\n/// assert_eq!(&codeword, b\"Hello World!\\\n\n/// \\x85\\xa6\\xad\\xf8\\xbd\\x15\\x94\\x6e\\x5f\\xb6\\x07\\x12\\x4b\\xbd\\x11\\xd3\\\n\n/// \\x34\\x14\\xa7\\x06\\xd6\\x25\\xfd\\x84\\xc2\\x61\\x81\\xa7\\x8a\\x15\\xc9\\x35\");\n\n/// ```\n\n///\n\npub fn correct_errors(codeword: &mut [__u]) -> Result<usize, Error> {\n\n let codeword = unsafe { __gf::slice_from_slice_mut_unchecked(codeword) };\n\n\n\n // find syndromes, syndromes of all zero means there are no errors\n\n let S = find_syndromes(codeword);\n\n if S.iter().all(|s| *s == __gf::new(0)) {\n\n return Ok(0);\n\n }\n\n\n\n // find error locator polynomial\n\n let Λ = find_error_locator(&S);\n\n\n\n // too many errors?\n\n let error_count = Λ.len() - 1;\n\n if error_count*2 > ECC_SIZE {\n\n return Err(Error::TooManyErrors);\n\n }\n\n\n\n // find error locations\n\n let error_locations = find_error_locations(codeword, &Λ);\n", "file_path": "templates/rs.rs", "rank": 41, "score": 91902.12536797905 }, { "content": "/// Correct up to ECC_SIZE/2 errors at unknown locations\n\n///\n\npub fn rs_correct_errors(codeword: &mut [u8]) -> Result<usize, RsError> {\n\n let codeword = gf256::slice_from_slice_mut(codeword);\n\n\n\n // find syndromes, syndromes of all zero means there are no errors\n\n let S = rs_find_syndromes(codeword);\n\n if S.iter().all(|s| *s == gf256(0)) {\n\n return Ok(0);\n\n }\n\n\n\n // find error locator polynomial\n\n let Λ = rs_find_error_locator(&S);\n\n\n\n // too many errors?\n\n let error_count = Λ.len() - 1;\n\n if error_count*2 > ECC_SIZE {\n\n return Err(RsError::TooManyErrors);\n\n }\n\n\n\n // find error locations\n\n let error_locations = rs_find_error_locations(codeword, &Λ);\n", "file_path": "examples/rs.rs", "rank": 42, "score": 85819.20649983645 }, { "content": "/// Format blocks as a RAID array.\n\n///\n\n/// This writes the parity data to the provided parity blocks based on the\n\n/// provided data blocks.\n\n///\n\n/// ``` rust\n\n/// # use ::gf256::raid::*;\n\n/// let mut data = b\"Hello World!\".to_vec();\n\n/// let datas = data.chunks(4).collect::<Vec<_>>();\n\n/// let mut parity1 = vec![0u8; 4];\n\n/// let mut parity2 = vec![0u8; 4];\n\n/// let mut parity3 = vec![0u8; 4];\n\n/// raid7::format(&datas, &mut parity1, &mut parity2, &mut parity3);\n\n///\n\n/// assert_eq!(&datas[0], b\"Hell\");\n\n/// assert_eq!(&datas[1], b\"o Wo\");\n\n/// assert_eq!(&datas[2], b\"rld!\");\n\n/// assert_eq!(&parity1, b\"\\x55\\x29\\x5f\\x22\");\n\n/// assert_eq!(&parity2, b\"\\x43\\x88\\x4f\\x36\");\n\n/// assert_eq!(&parity3, b\"\\x9a\\x6b\\x23\\xe7\");\n\n/// ```\n\n///\n\npub fn format<B: AsRef<[__u]>>(\n\n blocks: &[B],\n\n #[cfg(__if(__parity >= 1))] p: &mut [__u],\n\n #[cfg(__if(__parity >= 2))] q: &mut [__u],\n\n #[cfg(__if(__parity >= 3))] r: &mut [__u],\n\n) {\n\n assert!(blocks.len() >= 1);\n\n #[cfg(__if(__parity >= 2))] { assert!(blocks.len() <= usize::try_from(__gf::NONZEROS).unwrap_or(usize::MAX)); }\n\n\n\n let len = blocks[0].as_ref().len();\n\n assert!(blocks.iter().all(|b| b.as_ref().len() == len));\n\n #[cfg(__if(__parity >= 1))] { assert!(p.len() == len); }\n\n #[cfg(__if(__parity >= 1))] let p = unsafe { __gf::slice_from_slice_mut_unchecked(p) };\n\n #[cfg(__if(__parity >= 2))] { assert!(q.len() == len); }\n\n #[cfg(__if(__parity >= 2))] let q = unsafe { __gf::slice_from_slice_mut_unchecked(q) };\n\n #[cfg(__if(__parity >= 3))] { assert!(r.len() == len); }\n\n #[cfg(__if(__parity >= 3))] let r = unsafe { __gf::slice_from_slice_mut_unchecked(r) };\n\n\n\n for i in 0..len {\n\n #[cfg(__if(__parity >= 1))] { p[i] = __gf::new(0); }\n", "file_path": "templates/raid.rs", "rank": 43, "score": 85270.42202556808 }, { "content": "/// Generate a random polynomial of a given degree, fixing f(0) = secret\n\nfn poly_random<R: Rng>(rng: &mut R, secret: __gf, degree: usize) -> Vec<__gf> {\n\n let mut f = vec![secret];\n\n for _ in 0..degree {\n\n f.push(__gf::from_lossy(rng.gen_range(1..=__gf::NONZEROS)));\n\n }\n\n f\n\n}\n\n\n", "file_path": "templates/shamir.rs", "rank": 44, "score": 75844.7055923362 }, { "content": "fn main() {\n\n fn hex(xs: &[u8]) -> String {\n\n xs.iter()\n\n .map(|x| format!(\"{:02x}\", x))\n\n .collect()\n\n }\n\n\n\n fn ascii(xs: &[u8]) -> String {\n\n xs.iter()\n\n .map(|x| {\n\n if *x < b' ' || *x > b'~' {\n\n '.'\n\n } else {\n\n char::from(*x)\n\n }\n\n })\n\n .collect::<String>()\n\n }\n\n\n\n let orig_message = b\"Hello World!\";\n", "file_path": "examples/rs.rs", "rank": 45, "score": 65981.38030131187 }, { "content": "fn main() {\n\n let opt = Opt::from_args();\n\n\n\n // note we don't use the iterators, this is so we can print some progress\n\n\n\n // find iterators of polynomials to test\n\n let width = match (opt.width, opt.polynomial) {\n\n (Some(width), _ ) => width,\n\n (_, Some(p)) => usize::try_from(128-p.leading_zeros()).unwrap(),\n\n (None, None ) => unreachable!(),\n\n };\n\n\n\n let ps: Box<dyn Iterator<Item=p128>> = match (opt.polynomial, opt.n) {\n\n (Some(p), None ) => Box::new(iter::once(p)),\n\n (Some(p), Some(_)) => Box::new((u128::from(p) .. (1u128 << width)).map(p128)),\n\n (None, _ ) => Box::new(((1u128 << (width-1)) .. (1u128 << width)).map(p128)),\n\n };\n\n\n\n let gs = || -> Box<dyn Iterator<Item=p128>> {\n\n match (opt.generator, opt.m) {\n", "file_path": "examples/find-p.rs", "rank": 46, "score": 65981.38030131187 }, { "content": "fn main() {\n\n let input = b\"Hello World!\";\n\n let expected = 0x1c291ca3;\n\n println!();\n\n println!(\"testing crc({:?})\", String::from_utf8_lossy(input));\n\n\n\n let output = naive_crc(input);\n\n println!(\"{:<24} => 0x{:08x}\", \"naive_crc\", output);\n\n assert_eq!(output, expected);\n\n\n\n let output = less_naive_crc(input);\n\n println!(\"{:<24} => 0x{:08x}\", \"less_naive_crc\", output);\n\n assert_eq!(output, expected);\n\n\n\n let output = word_less_naive_crc(input);\n\n println!(\"{:<24} => 0x{:08x}\", \"word_less_naive_crc\", output);\n\n assert_eq!(output, expected);\n\n\n\n let output = table_crc(input);\n\n println!(\"{:<24} => 0x{:08x}\", \"table_crc\", output);\n", "file_path": "examples/crc.rs", "rank": 47, "score": 65981.38030131187 }, { "content": "fn main() {\n\n fn hex(xs: &[u8]) -> String {\n\n xs.iter()\n\n .map(|x| format!(\"{:02x}\", x))\n\n .collect()\n\n }\n\n\n\n fn grid<'a>(width: usize, bs: &'a [u8]) -> impl Iterator<Item=String> + 'a {\n\n (0 .. (bs.len()+width-1)/width)\n\n .step_by(2)\n\n .rev()\n\n .map(move |y| {\n\n let mut line = String::new();\n\n for x in 0..width {\n\n let mut b = 0;\n\n for i in 0..2 {\n\n if bs.get((y+i)*width + x).filter(|b| **b != 0).is_some() {\n\n b |= 1 << (1-i);\n\n }\n\n }\n", "file_path": "examples/lfsr.rs", "rank": 48, "score": 65981.38030131187 }, { "content": "fn main() {\n\n fn hex(xs: &[u8]) -> String {\n\n xs.iter()\n\n .map(|x| format!(\"{:02x}\", x))\n\n .collect()\n\n }\n\n\n\n fn ascii(xs: &[u8]) -> String {\n\n xs.iter()\n\n .map(|x| {\n\n if *x < b' ' || *x > b'~' {\n\n '.'\n\n } else {\n\n char::from(*x)\n\n }\n\n })\n\n .collect::<String>()\n\n }\n\n\n\n\n", "file_path": "examples/shamir.rs", "rank": 49, "score": 65981.38030131187 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn test_generators() {\n\n // we know there are 128 primitive elements in gf256, and since all\n\n // representations of gf256 are isomorphic, the irreducible polynomial\n\n // shouldn't matter\n\n //\n\n // (we only check the first couple irreducible polynomials to make the test\n\n // run faster)\n\n //\n\n for p in irreducibles(9).take(3) {\n\n assert_eq!(generators(p).count(), 128);\n\n }\n\n}\n\n\n", "file_path": "examples/find-p.rs", "rank": 50, "score": 63832.309895069295 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn test_irreducibles() {\n\n // we know there are 30 irreducible polynomials in gf256\n\n assert_eq!(irreducibles(9).count(), 30);\n\n}\n\n\n", "file_path": "examples/find-p.rs", "rank": 51, "score": 63832.309895069295 }, { "content": "/// Find the error magnitudes using Forney's algorithm\n\n///\n\n/// ``` text\n\n/// Xj*Ω(Xj^-1)\n\n/// Yj = - -----------\n\n/// Λ'(Xj^-1)\n\n/// ```\n\n///\n\n/// Where Ω(x) is the error evaluator polynomial:\n\n///\n\n/// ``` text\n\n/// Ω(x) = S(x)*Λ(x) mod x^2v\n\n/// ```\n\n/// \n\n/// And S(x) is the partial syndrome polynomial:\n\n/// \n\n/// ``` text\n\n/// 2v\n\n/// S(x) = Σ Si*x^i\n\n/// i\n\n/// ```\n\n///\n\n/// And Λ’(x) is the formal derivative of Λ(x):\n\n///\n\n/// ``` text\n\n/// v\n\n/// Λ'(x) = Σ i*Λi*x^(i-1)\n\n/// i=1\n\n/// ```\n\n///\n\nfn find_error_magnitudes(\n\n codeword: &[__gf],\n\n S: &[__gf],\n\n Λ: &[__gf],\n\n error_locations: &[usize]\n\n) -> Vec<__gf> {\n\n // find the erasure evaluator polynomial\n\n //\n\n // Ω(x) = S(x)*Λ(x) mod x^2v\n\n //\n\n let mut Ω = vec![__gf::new(0); S.len()+Λ.len()-1];\n\n let Ω_len = Ω.len();\n\n Ω[Ω_len-S.len()..].copy_from_slice(&S);\n\n Ω[Ω_len-S.len()..].reverse();\n\n poly_mul(&mut Ω, &Λ);\n\n Ω.drain(..Ω.len()-S.len());\n\n\n\n // find the formal derivative of Λ\n\n //\n\n // Λ'(x) = Σ i*Λi*x^(i-1)\n", "file_path": "templates/rs.rs", "rank": 52, "score": 61918.70178088744 }, { "content": "/// Find Forney syndromes, these hide known erasures from the original syndromes\n\n/// so error detection doesn't try (and possibly fail) to find known erasures\n\n///\n\nfn find_forney_syndromes(\n\n codeword: &[__gf],\n\n S: &[__gf],\n\n erasures: &[usize]\n\n) -> Vec<__gf> {\n\n let mut S = S.to_vec();\n\n for j in erasures {\n\n let Xj = __gf::GENERATOR.pow(__u::try_from(codeword.len()-1-j).unwrap());\n\n for i in 0 .. S.len()-1 {\n\n S[i] = S[i+1] - S[i]*Xj;\n\n }\n\n }\n\n\n\n // trim unnecessary syndromes\n\n S.drain(S.len()-erasures.len()..);\n\n S\n\n}\n\n\n", "file_path": "templates/rs.rs", "rank": 53, "score": 61911.68329375092 }, { "content": "/// Correct a mixture of errors and erasures, up to `2*errors+erasures <= ECC_SIZE`.\n\n///\n\n/// Where erasures are at known locations and errors are at unknown locations.\n\n/// Errors must be <= [`ECC_SIZE`], erasures must be <= [`ECC_SIZE/2`](ECC_SIZE),\n\n/// and `2*errors+erasures` must be <= [`ECC_SIZE`].\n\n///\n\n/// Returns the number of errors and erasures, or [`Error::TooManyErrors`] if the\n\n/// codeword can not be corrected.\n\n///\n\n/// ``` rust\n\n/// # use gf256::rs::rs255w223;\n\n/// let mut codeword = b\"xxxxxxxxxxxx\\\n\n/// xxxx\\xbd\\x15\\x94\\x6e\\x5f\\xb6\\x07\\x12\\x4b\\xbd\\x11\\xd3\\\n\n/// \\x34x\\xa7x\\xd6x\\xfdx\\xc2x\\x81x\\x8ax\\xc9x\".to_vec();\n\n///\n\n/// let erasures = (0..16).collect::<Vec<_>>();\n\n/// assert_eq!(rs255w223::correct(&mut codeword, &erasures), Ok(24));\n\n/// assert_eq!(&codeword, b\"Hello World!\\\n\n/// \\x85\\xa6\\xad\\xf8\\xbd\\x15\\x94\\x6e\\x5f\\xb6\\x07\\x12\\x4b\\xbd\\x11\\xd3\\\n\n/// \\x34\\x14\\xa7\\x06\\xd6\\x25\\xfd\\x84\\xc2\\x61\\x81\\xa7\\x8a\\x15\\xc9\\x35\");\n\n/// ```\n\n///\n\npub fn correct(\n\n codeword: &mut [__u],\n\n erasures: &[usize]\n\n) -> Result<usize, Error> {\n\n let codeword = unsafe { __gf::slice_from_slice_mut_unchecked(codeword) };\n\n\n\n // too many erasures?\n\n if erasures.len() > ECC_SIZE {\n\n return Err(Error::TooManyErrors);\n\n }\n\n\n\n // find syndromes, syndromes of all zero means there are no errors\n\n let S = find_syndromes(codeword);\n\n if S.iter().all(|s| *s == __gf::new(0)) {\n\n return Ok(0);\n\n }\n\n\n\n // find Forney syndromes, hiding known erasures from the syndromes\n\n let forney_S = find_forney_syndromes(codeword, &S, &erasures);\n\n\n", "file_path": "templates/rs.rs", "rank": 54, "score": 61656.00888093429 }, { "content": "#[crc(polynomial=0x11021)]\n\npub fn crc16() {}\n\n\n\n// HD=3, up to 4294967263+32 bits\n\n// HD=4, up to 91607+32 bits\n\n// HD=5, up to 2974+32 bits\n\n// HD=6, up to 268+32 bits\n\n// HD=7, up to 171+32 bits\n\n// HD=8, up to 91+32 bits\n", "file_path": "src/crc.rs", "rank": 55, "score": 61649.24100238837 }, { "content": "#[crc(polynomial=0x142f0e1eba9ea3693)]\n\npub fn crc64() {}\n\n\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::p::*;\n\n\n\n #[test]\n\n fn crc() {\n\n assert_eq!(crc8(b\"Hello World!\", 0), 0xb3);\n\n assert_eq!(crc16(b\"Hello World!\", 0), 0x0bbb);\n\n assert_eq!(crc32(b\"Hello World!\", 0), 0x1c291ca3);\n\n assert_eq!(crc32c(b\"Hello World!\", 0), 0xfe6cf1dc);\n\n assert_eq!(crc64(b\"Hello World!\", 0), 0x75045245c9ea6fe2);\n\n }\n\n\n\n // explicit modes\n\n #[crc(polynomial=0x107, naive)] fn crc8_naive() {}\n\n #[crc(polynomial=0x11021, naive)] fn crc16_naive() {}\n", "file_path": "src/crc.rs", "rank": 56, "score": 61649.24100238837 }, { "content": "pub fn p(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n let __crate = crate_path();\n\n\n\n // parse args\n\n let raw_args = parse_macro_input!(args as AttributeArgsWrapper).0;\n\n let args = match PArgs::from_list(&raw_args) {\n\n Ok(args) => args,\n\n Err(err) => {\n\n return err.write_errors().into();\n\n }\n\n };\n\n\n\n let is_usize = match args.is_usize {\n\n Some(is_usize) => is_usize,\n\n None => {\n\n match args.u.as_ref().and_then(guess_is_usize) {\n\n Some(is_usize) => is_usize,\n", "file_path": "gf256-macros/src/p.rs", "rank": 57, "score": 61649.24100238837 }, { "content": "#[crc(polynomial=0x11edc6f41)]\n\npub fn crc32c() {}\n\n\n\n// HD=3,4, up to 8589606850+64 bits\n\n// HD=5,6, up to 126701+64 bits\n\n// HD=7,7, up to ~33710+64 bits\n", "file_path": "src/crc.rs", "rank": 58, "score": 61649.24100238837 }, { "content": "#[crc(polynomial=0x107)]\n\npub fn crc8() {}\n\n\n\n// HD=3,4, up to 32751+16 bits\n", "file_path": "src/crc.rs", "rank": 59, "score": 61649.24100238837 }, { "content": "#[crc(polynomial=0x104c11db7)]\n\npub fn crc32() {}\n\n\n\n// HD=3,4, up to 2147483615+32 bits\n\n// HD=5,6, up to 5243+32 bits\n\n// HD=7,8, up to 177+32 bits\n", "file_path": "src/crc.rs", "rank": 60, "score": 61649.24100238837 }, { "content": "/// Find the error magnitudes using Forney's algorithm\n\n///\n\n/// ``` text\n\n/// Xj*Ω(Xj^-1)\n\n/// Yj = - -----------\n\n/// Λ'(Xj^-1)\n\n/// ```\n\n///\n\n/// Where Ω(x) is the error evaluator polynomial:\n\n///\n\n/// ``` text\n\n/// Ω(x) = S(x)*Λ(x) mod x^2v\n\n/// ```\n\n/// \n\n/// And S(x) is the partial syndrome polynomial:\n\n/// \n\n/// ``` text\n\n/// 2v\n\n/// S(x) = Σ Si*x^i\n\n/// i\n\n/// ```\n\n///\n\n/// And Λ’(x) is the formal derivative of Λ(x):\n\n///\n\n/// ``` text\n\n/// v\n\n/// Λ'(x) = Σ i*Λi*x^(i-1)\n\n/// i=1\n\n/// ```\n\n///\n\nfn rs_find_error_magnitudes(\n\n codeword: &[gf256],\n\n S: &[gf256],\n\n Λ: &[gf256],\n\n error_locations: &[usize]\n\n) -> Vec<gf256> {\n\n // find the erasure evaluator polynomial\n\n //\n\n // Ω(x) = S(x)*Λ(x) mod x^2v\n\n //\n\n let mut Ω = vec![gf256(0); S.len()+Λ.len()-1];\n\n let Ω_len = Ω.len();\n\n Ω[Ω_len-S.len()..].copy_from_slice(&S);\n\n Ω[Ω_len-S.len()..].reverse();\n\n rs_poly_mul(&mut Ω, &Λ);\n\n Ω.drain(..Ω.len()-S.len());\n\n\n\n // find the formal derivative of Λ\n\n //\n\n // Λ'(x) = Σ i*Λi*x^(i-1)\n", "file_path": "examples/rs.rs", "rank": 61, "score": 60191.93226420471 }, { "content": "/// Find Forney syndromes, these hide known erasures from the original syndromes\n\n/// so error detection doesn't try (and possibly fail) to find known erasures\n\n///\n\nfn rs_find_forney_syndromes(\n\n codeword: &[gf256],\n\n S: &[gf256],\n\n erasures: &[usize]\n\n) -> Vec<gf256> {\n\n let mut S = S.to_vec();\n\n for j in erasures {\n\n let Xj = gf256::GENERATOR.pow(u8::try_from(codeword.len()-1-j).unwrap());\n\n for i in 0 .. S.len()-1 {\n\n S[i] = S[i+1] - S[i]*Xj;\n\n }\n\n }\n\n\n\n // trim unnecessary syndromes\n\n S.drain(S.len()-erasures.len()..);\n\n S\n\n}\n\n\n", "file_path": "examples/rs.rs", "rank": 62, "score": 60184.9137770682 }, { "content": "/// Correct up to [`ECC_SIZE`] erasures at known locations.\n\n///\n\n/// Returns the number of erasures, or [`Error::TooManyErrors`] if the codeword\n\n/// can not be corrected.\n\n///\n\n/// ``` rust\n\n/// # use gf256::rs::rs255w223;\n\n/// let mut codeword = b\"xxxxxxxxxxxx\\\n\n/// xxxxxxxxxxxxxxxx\\\n\n/// xxxx\\xd6\\x25\\xfd\\x84\\xc2\\x61\\x81\\xa7\\x8a\\x15\\xc9\\x35\".to_vec();\n\n///\n\n/// let erasures = (0..32).collect::<Vec<_>>();\n\n/// assert_eq!(rs255w223::correct_erasures(&mut codeword, &erasures), Ok(32));\n\n/// assert_eq!(&codeword, b\"Hello World!\\\n\n/// \\x85\\xa6\\xad\\xf8\\xbd\\x15\\x94\\x6e\\x5f\\xb6\\x07\\x12\\x4b\\xbd\\x11\\xd3\\\n\n/// \\x34\\x14\\xa7\\x06\\xd6\\x25\\xfd\\x84\\xc2\\x61\\x81\\xa7\\x8a\\x15\\xc9\\x35\");\n\n/// ```\n\n///\n\npub fn correct_erasures(\n\n codeword: &mut [__u],\n\n erasures: &[usize]\n\n) -> Result<usize, Error> {\n\n let codeword = unsafe { __gf::slice_from_slice_mut_unchecked(codeword) };\n\n\n\n // too many erasures?\n\n if erasures.len() > ECC_SIZE {\n\n return Err(Error::TooManyErrors);\n\n }\n\n\n\n // find syndromes, syndromes of all zero means there are no errors\n\n let S = find_syndromes(codeword);\n\n if S.iter().all(|s| *s == __gf::new(0)) {\n\n return Ok(0);\n\n }\n\n\n\n // find erasure locator polynomial\n\n let Λ = find_erasure_locator(codeword, &erasures);\n\n\n", "file_path": "templates/rs.rs", "rank": 63, "score": 59736.58687435733 }, { "content": "#[proc_macro_attribute]\n\npub fn p(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n p::p(args, input)\n\n}\n\n\n", "file_path": "gf256-macros/src/lib.rs", "rank": 64, "score": 59728.61440106999 }, { "content": "/// Correct a mixture of erasures at unknown locations and erasures\n\n/// as known locations, can correct up to 2*errors+erasures <= ECC_SIZE\n\n///\n\npub fn rs_correct(\n\n codeword: &mut [u8],\n\n erasures: &[usize]\n\n) -> Result<usize, RsError> {\n\n let codeword = gf256::slice_from_slice_mut(codeword);\n\n\n\n // too many erasures?\n\n if erasures.len() > ECC_SIZE {\n\n return Err(RsError::TooManyErrors);\n\n }\n\n\n\n // find syndromes, syndromes of all zero means there are no errors\n\n let S = rs_find_syndromes(codeword);\n\n if S.iter().all(|s| *s == gf256(0)) {\n\n return Ok(0);\n\n }\n\n\n\n // find Forney syndromes, hiding known erasures from the syndromes\n\n let forney_S = rs_find_forney_syndromes(codeword, &S, &erasures);\n\n\n", "file_path": "examples/rs.rs", "rank": 65, "score": 59728.61440106999 }, { "content": "#[cfg(feature=\"lfsr\")]\n\n#[proc_macro_attribute]\n\npub fn lfsr(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n lfsr::lfsr(args, input)\n\n}\n\n\n", "file_path": "gf256-macros/src/lib.rs", "rank": 66, "score": 58001.844884387276 }, { "content": "pub fn gf(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n let __crate = crate_path();\n\n\n\n // parse args\n\n let raw_args = parse_macro_input!(args as AttributeArgsWrapper).0;\n\n let args = match GfArgs::from_list(&raw_args) {\n\n Ok(args) => args,\n\n Err(err) => {\n\n return err.write_errors().into();\n\n }\n\n };\n\n\n\n let width = {\n\n // default to 1 less than the width of the irreducible polynomial\n\n // that defines the field, since, well, this is actually the only\n\n // width that would work with that polynomial\n\n let polynomial = args.polynomial.0;\n", "file_path": "gf256-macros/src/gf.rs", "rank": 67, "score": 58001.844884387276 }, { "content": "/// Correct up to ECC_SIZE erasures at known locations\n\n///\n\npub fn rs_correct_erasures(\n\n codeword: &mut [u8],\n\n erasures: &[usize]\n\n) -> Result<usize, RsError> {\n\n let codeword = gf256::slice_from_slice_mut(codeword);\n\n\n\n // too many erasures?\n\n if erasures.len() > ECC_SIZE {\n\n return Err(RsError::TooManyErrors);\n\n }\n\n\n\n // find syndromes, syndromes of all zero means there are no errors\n\n let S = rs_find_syndromes(codeword);\n\n if S.iter().all(|s| *s == gf256(0)) {\n\n return Ok(0);\n\n }\n\n\n\n // find erasure locator polynomial\n\n let Λ = rs_find_erasure_locator(codeword, &erasures);\n\n\n", "file_path": "examples/rs.rs", "rank": 68, "score": 58001.844884387276 }, { "content": "#[proc_macro_attribute]\n\npub fn gf(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n gf::gf(args, input)\n\n}\n\n\n", "file_path": "gf256-macros/src/lib.rs", "rank": 69, "score": 58001.844884387276 }, { "content": "pub fn crc(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n let __crate = crate_path();\n\n\n\n // parse args\n\n let raw_args = parse_macro_input!(args as AttributeArgsWrapper).0;\n\n let args = match CrcArgs::from_list(&raw_args) {\n\n Ok(args) => args,\n\n Err(err) => {\n\n return err.write_errors().into();\n\n }\n\n };\n\n\n\n let width = {\n\n // default to 1 less than the width of the given polynomial, this\n\n // is the only width that would really work\n\n let polynomial = args.polynomial.0;\n\n (128-usize::try_from(polynomial.leading_zeros()).unwrap()) - 1\n", "file_path": "gf256-macros/src/crc.rs", "rank": 70, "score": 58001.844884387276 }, { "content": "pub fn shamir(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n let __crate = crate_path();\n\n\n\n // parse args\n\n let raw_args = parse_macro_input!(args as AttributeArgsWrapper).0;\n\n let args = match ShamirArgs::from_list(&raw_args) {\n\n Ok(args) => args,\n\n Err(err) => {\n\n return err.write_errors().into();\n\n }\n\n };\n\n\n\n // parse type\n\n let ty = parse_macro_input!(input as syn::ItemMod);\n\n let attrs = ty.attrs;\n\n let vis = ty.vis;\n\n let shamir = ty.ident;\n", "file_path": "gf256-macros/src/shamir.rs", "rank": 71, "score": 58001.844884387276 }, { "content": "pub fn rs(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n let __crate = crate_path();\n\n\n\n // parse args\n\n let raw_args = parse_macro_input!(args as AttributeArgsWrapper).0;\n\n let args = match RsArgs::from_list(&raw_args) {\n\n Ok(args) => args,\n\n Err(err) => {\n\n return err.write_errors().into();\n\n }\n\n };\n\n\n\n // gf256 is limited to 255 elements\n\n assert!(args.block <= 255);\n\n assert!(args.data <= args.block);\n\n\n\n // parse type\n", "file_path": "gf256-macros/src/rs.rs", "rank": 72, "score": 58001.844884387276 }, { "content": "pub fn lfsr(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n let __crate = crate_path();\n\n\n\n // parse args\n\n let raw_args = parse_macro_input!(args as AttributeArgsWrapper).0;\n\n let args = match LfsrArgs::from_list(&raw_args) {\n\n Ok(args) => args,\n\n Err(err) => {\n\n return err.write_errors().into();\n\n }\n\n };\n\n\n\n let width = {\n\n // default to 1 less than the width of the given polynomial, this\n\n // is the only width that would really work\n\n let polynomial = args.polynomial.0;\n\n (128-usize::try_from(polynomial.leading_zeros()).unwrap()) - 1\n", "file_path": "gf256-macros/src/lfsr.rs", "rank": 73, "score": 58001.844884387276 }, { "content": "#[cfg(feature=\"shamir\")]\n\n#[proc_macro_attribute]\n\npub fn shamir(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n shamir::shamir(args, input)\n\n}\n\n\n", "file_path": "gf256-macros/src/lib.rs", "rank": 74, "score": 58001.844884387276 }, { "content": "#[cfg(feature=\"crc\")]\n\n#[proc_macro_attribute]\n\npub fn crc(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n crc::crc(args, input)\n\n}\n\n\n", "file_path": "gf256-macros/src/lib.rs", "rank": 75, "score": 58001.844884387276 }, { "content": "#[cfg(feature=\"rs\")]\n\n#[proc_macro_attribute]\n\npub fn rs(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n rs::rs(args, input)\n\n}\n", "file_path": "gf256-macros/src/lib.rs", "rank": 76, "score": 58001.844884387276 }, { "content": "/// Determine if codeword is correct and has no errors/erasures.\n\n///\n\n/// This is quite a bit faster than actually finding the errors/erasures.\n\n///\n\n/// ``` rust\n\n/// # use gf256::rs::rs255w223;\n\n/// let codeword = b\"Hello World!\\\n\n/// \\x85\\xa6\\xad\\xf8\\xbd\\x15\\x94\\x6e\\x5f\\xb6\\x07\\x12\\x4b\\xbd\\x11\\xd3\\\n\n/// \\x34\\x14\\xa7\\x06\\xd6\\x25\\xfd\\x84\\xc2\\x61\\x81\\xa7\\x8a\\x15\\xc9\\x35\".to_vec();\n\n/// assert!(rs255w223::is_correct(&codeword));\n\n/// ```\n\n///\n\npub fn is_correct(codeword: &[__u]) -> bool {\n\n let codeword = unsafe { __gf::slice_from_slice_unchecked(codeword) };\n\n\n\n // find syndromes, syndromes of all zero means there are no errors\n\n let syndromes = find_syndromes(codeword);\n\n syndromes.iter().all(|s| *s == __gf::new(0))\n\n}\n\n\n", "file_path": "templates/rs.rs", "rank": 77, "score": 53127.676371279755 }, { "content": "/// Is a given polynomial irreducible?\n\n///\n\n/// This is roughly equivalent to asking if a number is prime\n\n///\n\npub fn is_irreducible(p: p128) -> bool {\n\n // some corner cases\n\n if p == p128(0) || p == p128(1) {\n\n return false;\n\n }\n\n\n\n // check for 2 so we can skip all multiples of 2, seems like\n\n // a minor optimization but speeds things up by ~2x\n\n if p % p128(2) == p128(0) {\n\n return p == p128(2);\n\n }\n\n\n\n // test division of all polynomials < sqrt(p), or a simpler\n\n // heuristic of < 2^(log2(p)/2)\n\n let npw2 = 128 - (u128::from(p)-1).leading_zeros();\n\n let roughsqrt = 1u128 << ((npw2+1)/2);\n\n\n\n for x in (3..roughsqrt).step_by(2).map(p128) {\n\n if p % x == p128(0) {\n\n return false;\n\n }\n\n }\n\n\n\n true\n\n}\n\n\n", "file_path": "examples/find-p.rs", "rank": 78, "score": 53122.63928120851 }, { "content": "/// Find syndromes, which should be zero if there are no errors\n\n///\n\n/// ``` text\n\n/// Si = c'(g^i)\n\n/// ```\n\n///\n\nfn find_syndromes(f: &[__gf]) -> Vec<__gf> {\n\n let mut S = vec![];\n\n for i in 0..ECC_SIZE {\n\n S.push(\n\n poly_eval(f, __gf::GENERATOR.pow(__u::try_from(i).unwrap()))\n\n );\n\n }\n\n S\n\n}\n\n\n", "file_path": "templates/rs.rs", "rank": 79, "score": 51561.78666915053 }, { "content": "/// Evaluate a polynomial at x using Horner's method\n\nfn poly_eval(f: &[__gf], x: __gf) -> __gf {\n\n let mut y = __gf::new(0);\n\n for c in f.iter().rev() {\n\n y = y*x + c;\n\n }\n\n y\n\n}\n\n\n", "file_path": "templates/shamir.rs", "rank": 80, "score": 50330.0372129122 }, { "content": "/// Evaluate a polynomial at x using Horner's method\n\n///\n\n/// Note polynomials here are ordered biggest-coefficient first\n\n///\n\nfn poly_eval(f: &[__gf], x: __gf) -> __gf {\n\n let mut y = __gf::new(0);\n\n for c in f {\n\n y = y*x + c;\n\n }\n\n y\n\n}\n\n\n", "file_path": "templates/rs.rs", "rank": 81, "score": 50329.57575135156 }, { "content": "/// A table-based CRC implementation using precomputed remainders\n\n/// post-addition\n\n///\n\n/// This requires a 4*256 = 1024 byte table (computed at compile-time thanks\n\n/// to Rust's const evaluation), and is the most common CRC implementation\n\n/// thanks to its portability and speed.\n\n///\n\npub fn table_crc(data: &[u8]) -> u32 {\n\n const CRC_TABLE: [u32; 256] = {\n\n let mut table = [0; 256];\n\n let mut i = 0;\n\n while i < table.len() {\n\n let x = (i as u32).reverse_bits();\n\n let x = p64((x as u64) << 8).naive_rem(POLYNOMIAL).0 as u32;\n\n table[i] = x.reverse_bits();\n\n i += 1;\n\n }\n\n table\n\n };\n\n\n\n let mut crc = 0xffffffff;\n\n\n\n for b in data {\n\n crc = (crc >> 8) ^ CRC_TABLE[usize::from((crc as u8) ^ b)];\n\n }\n\n\n\n crc ^ 0xffffffff\n\n}\n\n\n", "file_path": "examples/crc.rs", "rank": 82, "score": 50147.56315511206 }, { "content": "/// A naive CRC implementation using the textbook definition of polynomial\n\n/// remainder, the input is padded with 32-bits of zeros to represent the\n\n/// correct polynomial.\n\n///\n\n/// The bit-invert of the CRC is a bit strange when mapped to the\n\n/// textbook definition as this appears as xoring the input with\n\n/// 32-bits of ones followed by zeros.\n\n///\n\n/// We also have to bit-reverse the input/output in order to match\n\n/// the common CRC32 behavior.\n\n///\n\npub fn naive_crc(data: &[u8]) -> u32 {\n\n let mut crc = p64(0);\n\n\n\n for b in\n\n data.iter().copied()\n\n // pad with 32-bits\n\n .chain(iter::repeat(0x00).take(4))\n\n // invert the first 32-bits\n\n .zip(iter::repeat(0xff).take(4).chain(iter::repeat(0x00)))\n\n .map(|(m, b)| m ^ b)\n\n {\n\n crc = (crc << 8) | p64::from(b.reverse_bits());\n\n crc = crc % POLYNOMIAL;\n\n }\n\n\n\n u32::try_from(crc).unwrap().reverse_bits() ^ 0xffffffff\n\n}\n\n\n", "file_path": "examples/crc.rs", "rank": 83, "score": 50146.98277722653 }, { "content": "/// A hardware-accelerated CRC implementation using Barret reduction\n\n///\n\n/// This leverages polynomial multiplication instructions (pclmulqdq,\n\n/// pmull, etc) to provide an efficient CRC implementation without the need\n\n/// of a lookup table.\n\n///\n\n/// You may notice that polynomial multiplication is not the polynomial\n\n/// remainder operation needed for CRC, and that is where Barret reduction\n\n/// comes in. Barret reduction allows you to turn division/remainder\n\n/// by a constant into a cheaper multiply by a different constant.\n\n///\n\n/// Fortunately Rust makes it easy to precompute this constant at\n\n/// compile-time.\n\n///\n\npub fn barret_crc(data: &[u8]) -> u32 {\n\n // Normally this would be 0x10000000000000000 / __polynomial, but\n\n // we eagerly do one step of division so we avoid needing a 4x wide\n\n // type. We can also drop the highest bit if we add the high bits\n\n // manually we use use this constant.\n\n //\n\n // = x % p\n\n // = 0xffffffff & (x + p*(((x >> 32) * [0x10000000000000000/p]) >> 32))\n\n // = 0xffffffff & (x + p*(((x >> 32) * [(p << 32)/p + 0x100000000]) >> 32))\n\n // = 0xffffffff & (x + p*((((x >> 32) * [(p << 32)/p]) >> 32) + (x >> 32)))\n\n // \\-----+-----/\n\n // '-- Barret constant\n\n //\n\n // Note that the shifts and masks can go away if we operate on u32s,\n\n // leaving 2 xmuls and 2 xors.\n\n //\n\n const BARRET_CONSTANT: p32 = {\n\n p32(p64(POLYNOMIAL.0 << 32).naive_div(POLYNOMIAL).0 as u32)\n\n };\n\n\n", "file_path": "examples/crc.rs", "rank": 84, "score": 50146.61492679987 }, { "content": "/// Determine if message is correct\n\n///\n\n/// Note this is quite a bit faster than correcting the errors\n\n///\n\npub fn rs_is_correct(codeword: &[u8]) -> bool {\n\n let codeword = gf256::slice_from_slice(codeword);\n\n\n\n // find syndromes, syndromes of all zero means there are no errors\n\n let syndromes = rs_find_syndromes(codeword);\n\n syndromes.iter().all(|s| *s == gf256(0))\n\n}\n\n\n", "file_path": "examples/rs.rs", "rank": 85, "score": 50144.03392536275 }, { "content": "/// Find syndromes, which should be zero if there are no errors\n\n///\n\n/// ``` text\n\n/// Si = c'(g^i)\n\n/// ```\n\n///\n\nfn rs_find_syndromes(f: &[gf256]) -> Vec<gf256> {\n\n let mut S = vec![];\n\n for i in 0..ECC_SIZE {\n\n S.push(\n\n rs_poly_eval(f, gf256::GENERATOR.pow(u8::try_from(i).unwrap()))\n\n );\n\n }\n\n S\n\n}\n\n\n", "file_path": "examples/rs.rs", "rank": 86, "score": 50144.03392536275 }, { "content": "/// Calculate the CRC for a piece of data.\n\n///\n\n/// ``` rust\n\n/// # use ::gf256::crc::*;\n\n/// assert_eq!(crc32c(b\"Hello World!\", 0), 0xfe6cf1dc);\n\n/// ```\n\n///\n\n/// Note that this takes the previous state of the CRC as an argument,\n\n/// allowing the CRC to be computed incrementally:\n\n///\n\n/// ``` rust\n\n/// # use ::gf256::crc::*;\n\n/// assert_eq!(crc32c(b\"Hell\", 0x00000000), 0x77bce1bf);\n\n/// assert_eq!(crc32c(b\"o Wo\", 0x77bce1bf), 0xf92d22b8);\n\n/// assert_eq!(crc32c(b\"rld!\", 0xf92d22b8), 0xfe6cf1dc);\n\n/// assert_eq!(crc32c(b\"Hello World!\", 0), 0xfe6cf1dc);\n\n/// ```\n\n///\n\n/// See the [module-level documentation](../crc) for more info.\n\n///\n\npub fn __crc(data: &[u8], crc: __u) -> __u {\n\n cfg_if! {\n\n if #[cfg(__if(__naive))] {\n\n let mut crc = __p(crc ^ __xor);\n\n\n\n cfg_if! {\n\n if #[cfg(__if(__reflected))] {\n\n crc = crc.reverse_bits() >> (8*size_of::<__u>()-__width);\n\n }\n\n }\n\n\n\n crc = crc << 8*size_of::<__u>()-__width;\n\n\n\n // iterate over words\n\n let mut words = data.chunks_exact(size_of::<__u>());\n\n for word in &mut words {\n\n let word = <[u8; size_of::<__u>()]>::try_from(word).unwrap();\n\n cfg_if! {\n\n if #[cfg(__if(__reflected))] {\n\n crc = crc + __p::from_le_bytes(word).reverse_bits();\n", "file_path": "templates/crc.rs", "rank": 87, "score": 49232.16428117298 }, { "content": "/// Is a given polynomial a primitive element, aka generator, of the\n\n/// finite-field defined by modulo the given irreducible polynomial?\n\n///\n\n/// That's a mouthful, the question being asked here is do successive\n\n/// powers of the generator iterate over every non-zero element in the\n\n/// finite-field defined by the given irreducible polynomial\n\n///\n\npub fn is_generator(g: p128, p: p128) -> bool {\n\n if g == p128(0) {\n\n return false;\n\n }\n\n\n\n // Define a few operations over the finite field defined by the irreducible\n\n // polynomial p. Normally we could use our gf-types, except this tool\n\n // is being used to generate the polynomials for the gf-types, so...\n\n //\n\n let width = (128-p.leading_zeros()) - 1;\n\n\n\n // Multiplication uses carry-less multiplicatio modulo our irreducible\n\n // polynomial\n\n let gfmul = |a: p128, b: p128| -> p128 {\n\n (a * b) % p\n\n };\n\n\n\n // Exponentiation via squaring\n\n let gfpow = |mut a: p128, mut exp: u128| -> p128 {\n\n let mut x = p128(1);\n", "file_path": "examples/find-p.rs", "rank": 88, "score": 49228.544366634145 }, { "content": "/// Evaluate a polynomial at x using Horner's method\n\nfn shamir_poly_eval(f: &[gf256], x: gf256) -> gf256 {\n\n let mut y = gf256(0);\n\n for c in f.iter().rev() {\n\n y = y*x + c;\n\n }\n\n y\n\n}\n\n\n", "file_path": "examples/shamir.rs", "rank": 89, "score": 48912.284469124425 }, { "content": "/// Evaluate a polynomial at x using Horner's method\n\n///\n\n/// Note polynomials here are ordered biggest-coefficient first\n\n///\n\nfn rs_poly_eval(f: &[gf256], x: gf256) -> gf256 {\n\n let mut y = gf256(0);\n\n for c in f {\n\n y = y*x + c;\n\n }\n\n y\n\n}\n\n\n", "file_path": "examples/rs.rs", "rank": 90, "score": 48911.82300756378 }, { "content": "/// A hardware-accelerated CRC implementation using the same technique as\n\n/// barret_crc, but operating on a 32-bit word at a time\n\n///\n\npub fn word_barret_crc(data: &[u8]) -> u32 {\n\n // Normally this would be 0x10000000000000000 / __polynomial, but\n\n // we eagerly do one step of division so we avoid needing a 4x wide\n\n // type. We can also drop the highest bit if we add the high bits\n\n // manually we use use this constant.\n\n //\n\n // = x % p\n\n // = 0xffffffff & (x + p*(((x >> 32) * [0x10000000000000000/p]) >> 32))\n\n // = 0xffffffff & (x + p*(((x >> 32) * [(p << 32)/p + 0x100000000]) >> 32))\n\n // = 0xffffffff & (x + p*((((x >> 32) * [(p << 32)/p]) >> 32) + (x >> 32)))\n\n // \\-----+-----/\n\n // '-- Barret constant\n\n //\n\n // Note that the shifts and masks can go away if we operate on u32s,\n\n // leaving 2 xmuls and 2 xors.\n\n //\n\n const BARRET_CONSTANT: p32 = {\n\n p32(p64(POLYNOMIAL.0 << 32).naive_div(POLYNOMIAL).0 as u32)\n\n };\n\n\n", "file_path": "examples/crc.rs", "rank": 91, "score": 48854.81941400313 }, { "content": "/// A CRC implementation that uses the first common optimization:\n\n/// delaying the addition of the next byte to when overflow can occur\n\n///\n\npub fn less_naive_crc(data: &[u8]) -> u32 {\n\n let mut crc = p32(0xffffffff);\n\n\n\n for b in data {\n\n crc = crc + (p32::from(b.reverse_bits()) << 24);\n\n crc = p32::try_from((p64::from(crc) << 8) % POLYNOMIAL).unwrap();\n\n }\n\n\n\n u32::from(crc).reverse_bits() ^ 0xffffffff\n\n}\n\n\n", "file_path": "examples/crc.rs", "rank": 92, "score": 48854.753150615375 }, { "content": "/// Iteratively find the error locator polynomial using the\n\n/// Berlekamp-Massey algorithm when we don't know the location of errors\n\n///\n\nfn find_error_locator(S: &[__gf]) -> Vec<__gf> {\n\n // the current estimate for the error locator polynomial\n\n let mut Λ = vec![__gf::new(0); S.len()+1];\n\n let Λ_len = Λ.len();\n\n Λ[Λ_len-1] = __gf::new(1);\n\n\n\n let mut prev_Λ = Λ.clone();\n\n let mut delta_Λ = Λ.clone();\n\n\n\n // the current estimate for the number of errors\n\n let mut v = 0;\n\n\n\n for i in 0..S.len() {\n\n let mut delta = S[i];\n\n for j in 1..v+1 {\n\n delta += Λ[Λ.len()-1-j] * S[i-j];\n\n }\n\n\n\n prev_Λ.rotate_left(1);\n\n\n", "file_path": "templates/rs.rs", "rank": 93, "score": 48854.753150615375 }, { "content": "/// A hardware-accelerated CRC implementation using Barret reduction without\n\n/// needing to bit-reverse the internal representation\n\n///\n\n/// CRC32 and polynomial multiplication instructions unfortunately are defined\n\n/// with different bit-endianness. This would normally mean we need to\n\n/// bit-reverse the incoming data before we can use polynomial multiplication.\n\n///\n\n/// However, polynomial multiplication has the odd property that it is\n\n/// symmetric, brev(a) * brev(b) = brev((a * b) << 1)\n\n///\n\n/// This means we can rewrite our Barret reduction CRC to operate entirely\n\n/// on a bit-reversed representation, shaving off several instructions.\n\n///\n\n/// In theory this should be faster, but measurements show this as actually\n\n/// being slightly slower, perhaps the extra 1-bit shift costs more on\n\n/// machines with bit-reverse instructions?\n\n///\n\npub fn reversed_barret_crc(data: &[u8]) -> u32 {\n\n // Normally this would be 0x10000000000000000 / __polynomial, but\n\n // we eagerly do one step of division so we avoid needing a 4x wide\n\n // type. We can also drop the highest bit if we add the high bits\n\n // manually we use use this constant.\n\n //\n\n // = x % p\n\n // = 0xffffffff & (x + p*(((x >> 32) * [0x10000000000000000/p]) >> 32))\n\n // = 0xffffffff & (x + p*(((x >> 32) * [(p << 32)/p + 0x100000000]) >> 32))\n\n // = 0xffffffff & (x + p*((((x >> 32) * [(p << 32)/p]) >> 32) + (x >> 32)))\n\n // \\-----+-----/\n\n // '-- Barret constant\n\n //\n\n // Note that the shifts and masks can go away if we operate on u32s,\n\n // leaving 2 xmuls and 2 xors.\n\n //\n\n const BARRET_CONSTANT: p32 = {\n\n p32(p64(POLYNOMIAL.0 << 32).naive_div(POLYNOMIAL).0 as u32)\n\n };\n\n const POLYNOMIAL_REV: p32 = p32(POLYNOMIAL.0 as u32).reverse_bits();\n", "file_path": "examples/crc.rs", "rank": 94, "score": 48853.87264699623 }, { "content": "/// A smaller table-based CRC implementation using 4-bit precomputed\n\n/// remainders post-addition\n\n///\n\n/// This requires a 4*16 = 64 byte table (computed at compile-time thanks\n\n/// to Rust's const evaluation), significantly reducing the code-size\n\n/// at the cost of 2x the number of operations. This CRC implementation\n\n/// is common on embedded systems.\n\n///\n\npub fn small_table_crc(data: &[u8]) -> u32 {\n\n const CRC_SMALL_TABLE: [u32; 16] = {\n\n let mut table = [0; 16];\n\n let mut i = 0;\n\n while i < table.len() {\n\n let x = (i as u32).reverse_bits();\n\n let x = p64((x as u64) << 4).naive_rem(POLYNOMIAL).0 as u32;\n\n table[i] = x.reverse_bits();\n\n i += 1;\n\n }\n\n table\n\n };\n\n\n\n let mut crc = 0xffffffff;\n\n\n\n for b in data {\n\n crc = (crc >> 4) ^ CRC_SMALL_TABLE[usize::from(((crc as u8) ^ (b >> 0)) & 0xf)];\n\n crc = (crc >> 4) ^ CRC_SMALL_TABLE[usize::from(((crc as u8) ^ (b >> 4)) & 0xf)];\n\n }\n\n\n\n crc ^ 0xffffffff\n\n}\n\n\n", "file_path": "examples/crc.rs", "rank": 95, "score": 48853.75835883633 }, { "content": "/// A CRC implementation using the same technique as less_naive_crc but\n\n/// operating on a 32-bit word at a time\n\n///\n\npub fn word_less_naive_crc(data: &[u8]) -> u32 {\n\n let mut crc = p32(0xffffffff);\n\n\n\n // iterate over 4-byte words\n\n let mut words = data.chunks_exact(4);\n\n for word in &mut words {\n\n let word = <[u8; 4]>::try_from(word).unwrap();\n\n crc = crc + p32::from_le_bytes(word).reverse_bits();\n\n crc = p32::try_from((p64::from(crc) << 32) % POLYNOMIAL).unwrap();\n\n }\n\n\n\n for b in words.remainder() {\n\n crc = crc + (p32::from(b.reverse_bits()) << 24);\n\n crc = p32::try_from((p64::from(crc) << 8) % POLYNOMIAL).unwrap();\n\n }\n\n\n\n u32::from(crc).reverse_bits() ^ 0xffffffff\n\n}\n\n\n", "file_path": "examples/crc.rs", "rank": 96, "score": 47670.04531846749 }, { "content": "/// Iteratively find the error locator polynomial using the\n\n/// Berlekamp-Massey algorithm when we don't know the location of errors\n\n///\n\nfn rs_find_error_locator(S: &[gf256]) -> Vec<gf256> {\n\n // the current estimate for the error locator polynomial\n\n let mut Λ = vec![gf256(0); S.len()+1];\n\n let Λ_len = Λ.len();\n\n Λ[Λ_len-1] = gf256(1);\n\n\n\n let mut prev_Λ = Λ.clone();\n\n let mut delta_Λ = Λ.clone();\n\n\n\n // the current estimate for the number of errors\n\n let mut v = 0;\n\n\n\n for i in 0..S.len() {\n\n let mut delta = S[i];\n\n for j in 1..v+1 {\n\n delta += Λ[Λ.len()-1-j] * S[i-j];\n\n }\n\n\n\n prev_Λ.rotate_left(1);\n\n\n", "file_path": "examples/rs.rs", "rank": 97, "score": 47669.910661507514 }, { "content": "/// A hardware-accelerated CRC implementation using the same technique as\n\n/// reversed_barret_crc, but operating on a 32-bit word at a time\n\n///\n\npub fn word_reversed_barret_crc(data: &[u8]) -> u32 {\n\n // Normally this would be 0x10000000000000000 / __polynomial, but\n\n // we eagerly do one step of division so we avoid needing a 4x wide\n\n // type. We can also drop the highest bit if we add the high bits\n\n // manually we use use this constant.\n\n //\n\n // = x % p\n\n // = 0xffffffff & (x + p*(((x >> 32) * [0x10000000000000000/p]) >> 32))\n\n // = 0xffffffff & (x + p*(((x >> 32) * [(p << 32)/p + 0x100000000]) >> 32))\n\n // = 0xffffffff & (x + p*((((x >> 32) * [(p << 32)/p]) >> 32) + (x >> 32)))\n\n // \\-----+-----/\n\n // '-- Barret constant\n\n //\n\n // Note that the shifts and masks can go away if we operate on u32s,\n\n // leaving 2 xmuls and 2 xors.\n\n //\n\n const BARRET_CONSTANT: p32 = {\n\n p32(p64(POLYNOMIAL.0 << 32).naive_div(POLYNOMIAL).0 as u32)\n\n };\n\n const POLYNOMIAL_REV: p32 = p32(POLYNOMIAL.0 as u32).reverse_bits();\n", "file_path": "examples/crc.rs", "rank": 98, "score": 47669.910661507514 }, { "content": "/// Find f(0) using Lagrange interpolation\n\nfn poly_interpolate(xs: &[__gf], ys: &[__gf]) -> __gf {\n\n assert!(xs.len() == ys.len());\n\n\n\n let mut y = __gf::new(0);\n\n for (i, (x0, y0)) in xs.iter().zip(ys).enumerate() {\n\n let mut li = __gf::new(1);\n\n for (j, (x1, _y1)) in xs.iter().zip(ys).enumerate() {\n\n if i != j {\n\n li *= x1 / (x1-x0);\n\n }\n\n }\n\n\n\n y += li*y0;\n\n }\n\n\n\n y\n\n}\n\n\n", "file_path": "templates/shamir.rs", "rank": 99, "score": 47618.90159776386 } ]
Rust
src/types/option_value.rs
aCLr/telegram-tdlib
40262849d87608931f22d0bbfd5baa0b410461ce
use crate::errors::*; use crate::types::*; use uuid::Uuid; use std::fmt::Debug; pub trait TDOptionValue: Debug + RObject {} #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(tag = "@type")] pub enum OptionValue { #[doc(hidden)] _Default, #[serde(rename(serialize = "getOption", deserialize = "getOption"))] GetOption(GetOption), #[serde(rename(serialize = "optionValueBoolean", deserialize = "optionValueBoolean"))] Boolean(OptionValueBoolean), #[serde(rename(serialize = "optionValueEmpty", deserialize = "optionValueEmpty"))] Empty(OptionValueEmpty), #[serde(rename(serialize = "optionValueInteger", deserialize = "optionValueInteger"))] Integer(OptionValueInteger), #[serde(rename(serialize = "optionValueString", deserialize = "optionValueString"))] String(OptionValueString), } impl Default for OptionValue { fn default() -> Self { OptionValue::_Default } } impl RObject for OptionValue { #[doc(hidden)] fn extra(&self) -> Option<&str> { match self { OptionValue::GetOption(t) => t.extra(), OptionValue::Boolean(t) => t.extra(), OptionValue::Empty(t) => t.extra(), OptionValue::Integer(t) => t.extra(), OptionValue::String(t) => t.extra(), _ => None, } } #[doc(hidden)] fn client_id(&self) -> Option<i32> { match self { OptionValue::GetOption(t) => t.client_id(), OptionValue::Boolean(t) => t.client_id(), OptionValue::Empty(t) => t.client_id(), OptionValue::Integer(t) => t.client_id(), OptionValue::String(t) => t.client_id(), _ => None, } } } impl OptionValue { pub fn from_json<S: AsRef<str>>(json: S) -> RTDResult<Self> { Ok(serde_json::from_str(json.as_ref())?) } #[doc(hidden)] pub fn _is_default(&self) -> bool { matches!(self, OptionValue::_Default) } } impl AsRef<OptionValue> for OptionValue { fn as_ref(&self) -> &OptionValue { self } } #[derive(Debug, Clone, Default, Serialize, Deserialize)] pub struct OptionValueBoolean { #[doc(hidden)] #[serde(rename(serialize = "@extra", deserialize = "@extra"))] extra: Option<String>, #[serde(rename(serialize = "@client_id", deserialize = "@client_id"))] client_id: Option<i32>, value: bool, } impl RObject for OptionValueBoolean { #[doc(hidden)] fn extra(&self) -> Option<&str> { self.extra.as_deref() } #[doc(hidden)] fn client_id(&self) -> Option<i32> { self.client_id } } impl TDOptionValue for OptionValueBoolean {} impl OptionValueBoolean { pub fn from_json<S: AsRef<str>>(json: S) -> RTDResult<Self> { Ok(serde_json::from_str(json.as_ref())?) } pub fn builder() -> RTDOptionValueBooleanBuilder { let mut inner = OptionValueBoolean::default(); inner.extra = Some(Uuid::new_v4().to_string()); RTDOptionValueBooleanBuilder { inner } } pub fn value(&self) -> bool { self.value } } #[doc(hidden)] pub struct RTDOptionValueBooleanBuilder { inner: OptionValueBoolean, } impl RTDOptionValueBooleanBuilder { pub fn build(&self) -> OptionValueBoolean { self.inner.clone() } pub fn value(&mut self, value: bool) -> &mut Self { self.inner.value = value; self } } impl AsRef<OptionValueBoolean> for OptionValueBoolean { fn as_ref(&self) -> &OptionValueBoolean { self } } impl AsRef<OptionValueBoolean> for RTDOptionValueBooleanBuilder { fn as_ref(&self) -> &OptionValueBoolean { &self.inner } } #[derive(Debug, Clone, Default, Serialize, Deserialize)] pub struct OptionValueEmpty { #[doc(hidden)] #[serde(rename(serialize = "@extra", deserialize = "@extra"))] extra: Option<String>, #[serde(rename(serialize = "@client_id", deserialize = "@client_id"))] client_id: Option<i32>, } impl RObject for OptionValueEmpty { #[doc(hidden)] fn extra(&self) -> Option<&str> { self.extra.as_deref() } #[doc(hidden)] fn client_id(&self) -> Option<i32> { self.client_id } } impl TDOptionValue for OptionValueEmpty {} impl OptionValueEmpty { pub fn from_json<S: AsRef<str>>(json: S) -> RTDResult<Self> { Ok(serde_json::from_str(json.as_ref())?) } pub fn builder() -> RTDOptionValueEmptyBuilder { let mut inner = OptionValueEmpty::default(); inner.extra = Some(Uuid::new_v4().to_string()); RTDOptionValueEmptyBuilder { inner } } } #[doc(hidden)] pub struct RTDOptionValueEmptyBuilder { inner: OptionValueEmpty, } impl RTDOptionValueEmptyBuilder { pub fn build(&self) -> OptionValueEmpty { self.inner.clone() } } impl AsRef<OptionValueEmpty> for OptionValueEmpty { fn as_ref(&self) -> &OptionValueEmpty { self } } impl AsRef<OptionValueEmpty> for RTDOptionValueEmptyBuilder { fn as_ref(&self) -> &OptionValueEmpty { &self.inner } } #[derive(Debug, Clone, Default, Serialize, Deserialize)] pub struct OptionValueInteger { #[doc(hidden)] #[serde(rename(serialize = "@extra", deserialize = "@extra"))] extra: Option<String>, #[serde(rename(serialize = "@client_id", deserialize = "@client_id"))] client_id: Option<i32>, #[serde(deserialize_with = "super::_common::number_from_string")] value: i64, } impl RObject for OptionValueInteger { #[doc(hidden)] fn extra(&self) -> Option<&str> { self.extra.as_deref() } #[doc(hidden)] fn client_id(&self) -> Option<i32> { self.client_id } } impl TDOptionValue for OptionValueInteger {} impl OptionValueInteger { pub fn from_json<S: AsRef<str>>(json: S) -> RTDResult<Self> { Ok(serde_json::from_str(json.as_ref())?) } pub fn builder() -> RTDOptionValueIntegerBuilder { let mut inner = OptionValueInteger::default(); inner.extra = Some(Uuid::new_v4().to_string()); RTDOptionValueIntegerBuilder { inner } } pub fn value(&self) -> i64 { self.value } } #[doc(hidden)] pub struct RTDOptionValueIntegerBuilder { inner: OptionValueInteger, } impl RTDOptionValueIntegerBuilder { pub fn build(&self) -> OptionValueInteger { self.inner.clone() } pub fn value(&mut self, value: i64) -> &mut Self { self.inner.value = value; self } } impl AsRef<OptionValueInteger> for OptionValueInteger { fn as_ref(&self) -> &OptionValueInteger { self } } impl AsRef<OptionValueInteger> for RTDOptionValueIntegerBuilder { fn as_ref(&self) -> &OptionValueInteger { &self.inner } } #[derive(Debug, Clone, Default, Serialize, Deserialize)] pub struct OptionValueString { #[doc(hidden)] #[serde(rename(serialize = "@extra", deserialize = "@extra"))] extra: Option<String>, #[serde(rename(serialize = "@client_id", deserialize = "@client_id"))] client_id: Option<i32>, value: String, } impl RObject for OptionValueString { #[doc(hidden)] fn extra(&self) -> Option<&str> { self.extra.as_deref() } #[doc(hidden)] fn client_id(&self) -> Option<i32> { self.client_id } } impl TDOptionValue for OptionValueString {} impl OptionValueString { pub fn from_json<S: AsRef<str>>(json: S) -> RTDResult<Self> { Ok(serde_json::from_str(json.as_ref())?) } pub fn builder() -> RTDOptionValueStringBuilder { let mut inner = OptionValueString::default(); inner.extra = Some(Uuid::new_v4().to_string()); RTDOptionValueStringBuilder { inner } } pub fn value(&self) -> &String { &self.value } } #[doc(hidden)] pub struct RTDOptionValueStringBuilder { inner: OptionValueString, } impl RTDOptionValueStringBuilder { pub fn build(&self) -> OptionValueString { self.inner.clone() } pub fn value<T: AsRef<str>>(&mut self, value: T) -> &mut Self { self.inner.value = value.as_ref().to_string(); self } } impl AsRef<OptionValueString> for OptionValueString { fn as_ref(&self) -> &OptionValueString { self } } impl AsRef<OptionValueString> for RTDOptionValueStringBuilder { fn as_ref(&self) -> &OptionValueString { &self.inner } }
use crate::errors::*; use crate::types::*; use uuid::Uuid; use std::fmt::Debug; pub trait TDOptionValue: Debug + RObject {} #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(tag = "@type")] pub enum OptionValue { #[doc(hidden)] _Default, #[serde(rename(serialize = "getOption", deserialize = "getOption"))] GetOption(GetOption), #[serde(rename(serialize = "optionValueBoolean", deserialize = "optionValueBoolean"))] Boolean(OptionValueBoolean), #[serde(rename(serialize = "optionValueEmpty", deserialize = "optionValueEmpty"))] Empty(OptionValueEmpty), #[serde(rename(serialize = "optionValueInteger", deserialize = "optionValueInteger"))] Integer(OptionValueInteger), #[serde(rename(serialize = "optionValueString", deserialize = "optionValueString"))] String(OptionValueString), } impl Default for OptionValue { fn default() -> Self { OptionValue::_Default } } impl RObject for OptionValue { #[doc(hidden)] fn extra(&self) -> Option<&str> {
} #[doc(hidden)] fn client_id(&self) -> Option<i32> { match self { OptionValue::GetOption(t) => t.client_id(), OptionValue::Boolean(t) => t.client_id(), OptionValue::Empty(t) => t.client_id(), OptionValue::Integer(t) => t.client_id(), OptionValue::String(t) => t.client_id(), _ => None, } } } impl OptionValue { pub fn from_json<S: AsRef<str>>(json: S) -> RTDResult<Self> { Ok(serde_json::from_str(json.as_ref())?) } #[doc(hidden)] pub fn _is_default(&self) -> bool { matches!(self, OptionValue::_Default) } } impl AsRef<OptionValue> for OptionValue { fn as_ref(&self) -> &OptionValue { self } } #[derive(Debug, Clone, Default, Serialize, Deserialize)] pub struct OptionValueBoolean { #[doc(hidden)] #[serde(rename(serialize = "@extra", deserialize = "@extra"))] extra: Option<String>, #[serde(rename(serialize = "@client_id", deserialize = "@client_id"))] client_id: Option<i32>, value: bool, } impl RObject for OptionValueBoolean { #[doc(hidden)] fn extra(&self) -> Option<&str> { self.extra.as_deref() } #[doc(hidden)] fn client_id(&self) -> Option<i32> { self.client_id } } impl TDOptionValue for OptionValueBoolean {} impl OptionValueBoolean { pub fn from_json<S: AsRef<str>>(json: S) -> RTDResult<Self> { Ok(serde_json::from_str(json.as_ref())?) } pub fn builder() -> RTDOptionValueBooleanBuilder { let mut inner = OptionValueBoolean::default(); inner.extra = Some(Uuid::new_v4().to_string()); RTDOptionValueBooleanBuilder { inner } } pub fn value(&self) -> bool { self.value } } #[doc(hidden)] pub struct RTDOptionValueBooleanBuilder { inner: OptionValueBoolean, } impl RTDOptionValueBooleanBuilder { pub fn build(&self) -> OptionValueBoolean { self.inner.clone() } pub fn value(&mut self, value: bool) -> &mut Self { self.inner.value = value; self } } impl AsRef<OptionValueBoolean> for OptionValueBoolean { fn as_ref(&self) -> &OptionValueBoolean { self } } impl AsRef<OptionValueBoolean> for RTDOptionValueBooleanBuilder { fn as_ref(&self) -> &OptionValueBoolean { &self.inner } } #[derive(Debug, Clone, Default, Serialize, Deserialize)] pub struct OptionValueEmpty { #[doc(hidden)] #[serde(rename(serialize = "@extra", deserialize = "@extra"))] extra: Option<String>, #[serde(rename(serialize = "@client_id", deserialize = "@client_id"))] client_id: Option<i32>, } impl RObject for OptionValueEmpty { #[doc(hidden)] fn extra(&self) -> Option<&str> { self.extra.as_deref() } #[doc(hidden)] fn client_id(&self) -> Option<i32> { self.client_id } } impl TDOptionValue for OptionValueEmpty {} impl OptionValueEmpty { pub fn from_json<S: AsRef<str>>(json: S) -> RTDResult<Self> { Ok(serde_json::from_str(json.as_ref())?) } pub fn builder() -> RTDOptionValueEmptyBuilder { let mut inner = OptionValueEmpty::default(); inner.extra = Some(Uuid::new_v4().to_string()); RTDOptionValueEmptyBuilder { inner } } } #[doc(hidden)] pub struct RTDOptionValueEmptyBuilder { inner: OptionValueEmpty, } impl RTDOptionValueEmptyBuilder { pub fn build(&self) -> OptionValueEmpty { self.inner.clone() } } impl AsRef<OptionValueEmpty> for OptionValueEmpty { fn as_ref(&self) -> &OptionValueEmpty { self } } impl AsRef<OptionValueEmpty> for RTDOptionValueEmptyBuilder { fn as_ref(&self) -> &OptionValueEmpty { &self.inner } } #[derive(Debug, Clone, Default, Serialize, Deserialize)] pub struct OptionValueInteger { #[doc(hidden)] #[serde(rename(serialize = "@extra", deserialize = "@extra"))] extra: Option<String>, #[serde(rename(serialize = "@client_id", deserialize = "@client_id"))] client_id: Option<i32>, #[serde(deserialize_with = "super::_common::number_from_string")] value: i64, } impl RObject for OptionValueInteger { #[doc(hidden)] fn extra(&self) -> Option<&str> { self.extra.as_deref() } #[doc(hidden)] fn client_id(&self) -> Option<i32> { self.client_id } } impl TDOptionValue for OptionValueInteger {} impl OptionValueInteger { pub fn from_json<S: AsRef<str>>(json: S) -> RTDResult<Self> { Ok(serde_json::from_str(json.as_ref())?) } pub fn builder() -> RTDOptionValueIntegerBuilder { let mut inner = OptionValueInteger::default(); inner.extra = Some(Uuid::new_v4().to_string()); RTDOptionValueIntegerBuilder { inner } } pub fn value(&self) -> i64 { self.value } } #[doc(hidden)] pub struct RTDOptionValueIntegerBuilder { inner: OptionValueInteger, } impl RTDOptionValueIntegerBuilder { pub fn build(&self) -> OptionValueInteger { self.inner.clone() } pub fn value(&mut self, value: i64) -> &mut Self { self.inner.value = value; self } } impl AsRef<OptionValueInteger> for OptionValueInteger { fn as_ref(&self) -> &OptionValueInteger { self } } impl AsRef<OptionValueInteger> for RTDOptionValueIntegerBuilder { fn as_ref(&self) -> &OptionValueInteger { &self.inner } } #[derive(Debug, Clone, Default, Serialize, Deserialize)] pub struct OptionValueString { #[doc(hidden)] #[serde(rename(serialize = "@extra", deserialize = "@extra"))] extra: Option<String>, #[serde(rename(serialize = "@client_id", deserialize = "@client_id"))] client_id: Option<i32>, value: String, } impl RObject for OptionValueString { #[doc(hidden)] fn extra(&self) -> Option<&str> { self.extra.as_deref() } #[doc(hidden)] fn client_id(&self) -> Option<i32> { self.client_id } } impl TDOptionValue for OptionValueString {} impl OptionValueString { pub fn from_json<S: AsRef<str>>(json: S) -> RTDResult<Self> { Ok(serde_json::from_str(json.as_ref())?) } pub fn builder() -> RTDOptionValueStringBuilder { let mut inner = OptionValueString::default(); inner.extra = Some(Uuid::new_v4().to_string()); RTDOptionValueStringBuilder { inner } } pub fn value(&self) -> &String { &self.value } } #[doc(hidden)] pub struct RTDOptionValueStringBuilder { inner: OptionValueString, } impl RTDOptionValueStringBuilder { pub fn build(&self) -> OptionValueString { self.inner.clone() } pub fn value<T: AsRef<str>>(&mut self, value: T) -> &mut Self { self.inner.value = value.as_ref().to_string(); self } } impl AsRef<OptionValueString> for OptionValueString { fn as_ref(&self) -> &OptionValueString { self } } impl AsRef<OptionValueString> for RTDOptionValueStringBuilder { fn as_ref(&self) -> &OptionValueString { &self.inner } }
match self { OptionValue::GetOption(t) => t.extra(), OptionValue::Boolean(t) => t.extra(), OptionValue::Empty(t) => t.extra(), OptionValue::Integer(t) => t.extra(), OptionValue::String(t) => t.extra(), _ => None, }
if_condition
[ { "content": "pub trait RFunction: Debug + RObject + Serialize {\n\n fn to_json(&self) -> RTDResult<String> {\n\n Ok(serde_json::to_string(self)?)\n\n }\n\n}\n\n\n\nimpl<'a, RObj: RObject> RObject for &'a RObj {\n\n fn extra(&self) -> Option<&str> {\n\n (*self).extra()\n\n }\n\n fn client_id(&self) -> Option<i32> {\n\n (*self).client_id()\n\n }\n\n}\n\n\n\nimpl<'a, RObj: RObject> RObject for &'a mut RObj {\n\n fn extra(&self) -> Option<&str> {\n\n (**self).extra()\n\n }\n\n fn client_id(&self) -> Option<i32> {\n", "file_path": "src/types/_common.rs", "rank": 0, "score": 306538.39670049277 }, { "content": "/// All tdlib type abstract class defined the same behavior\n\npub trait RObject: Debug {\n\n #[doc(hidden)]\n\n fn extra(&self) -> Option<&str>;\n\n fn client_id(&self) -> Option<i32>;\n\n}\n\n\n", "file_path": "src/types/_common.rs", "rank": 1, "score": 288500.8590150145 }, { "content": "/// Represents the type of a user. The following types are possible: regular users, deleted users and bots\n\npub trait TDUserType: Debug + RObject {}\n\n\n\n/// Represents the type of a user. The following types are possible: regular users, deleted users and bots\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum UserType {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A bot (see https://core.telegram.org/bots)\n\n #[serde(rename(serialize = \"userTypeBot\", deserialize = \"userTypeBot\"))]\n\n Bot(UserTypeBot),\n\n /// A deleted user or deleted bot. No information on the user besides the user identifier is available. It is not possible to perform any active actions on this type of user\n\n #[serde(rename(serialize = \"userTypeDeleted\", deserialize = \"userTypeDeleted\"))]\n\n Deleted(UserTypeDeleted),\n\n /// A regular user\n\n #[serde(rename(serialize = \"userTypeRegular\", deserialize = \"userTypeRegular\"))]\n\n Regular(UserTypeRegular),\n\n /// No information on the user besides the user identifier is available, yet this user has not been deleted. This object is extremely rare and must be handled like a deleted user. It is not possible to perform any actions on users of this type\n\n #[serde(rename(serialize = \"userTypeUnknown\", deserialize = \"userTypeUnknown\"))]\n\n Unknown(UserTypeUnknown),\n", "file_path": "src/types/user_type.rs", "rank": 2, "score": 278029.6369194432 }, { "content": "/// Describes the type of a background\n\npub trait TDBackgroundType: Debug + RObject {}\n\n\n\n/// Describes the type of a background\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum BackgroundType {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A filled background\n\n #[serde(rename(serialize = \"backgroundTypeFill\", deserialize = \"backgroundTypeFill\"))]\n\n Fill(BackgroundTypeFill),\n\n /// A PNG or TGV (gzipped subset of SVG with MIME type \"application/x-tgwallpattern\") pattern to be combined with the background fill chosen by the user\n\n #[serde(rename(\n\n serialize = \"backgroundTypePattern\",\n\n deserialize = \"backgroundTypePattern\"\n\n ))]\n\n Pattern(BackgroundTypePattern),\n\n /// A wallpaper in JPEG format\n\n #[serde(rename(\n\n serialize = \"backgroundTypeWallpaper\",\n", "file_path": "src/types/background_type.rs", "rank": 3, "score": 278029.0164518839 }, { "content": "/// Represents the type of a network\n\npub trait TDNetworkType: Debug + RObject {}\n\n\n\n/// Represents the type of a network\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum NetworkType {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A mobile network\n\n #[serde(rename(serialize = \"networkTypeMobile\", deserialize = \"networkTypeMobile\"))]\n\n Mobile(NetworkTypeMobile),\n\n /// A mobile roaming network\n\n #[serde(rename(\n\n serialize = \"networkTypeMobileRoaming\",\n\n deserialize = \"networkTypeMobileRoaming\"\n\n ))]\n\n MobileRoaming(NetworkTypeMobileRoaming),\n\n /// The network is not available\n\n #[serde(rename(serialize = \"networkTypeNone\", deserialize = \"networkTypeNone\"))]\n\n None(NetworkTypeNone),\n", "file_path": "src/types/network_type.rs", "rank": 4, "score": 278029.0164518838 }, { "content": "/// Describes the type of a poll\n\npub trait TDPollType: Debug + RObject {}\n\n\n\n/// Describes the type of a poll\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum PollType {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A poll in quiz mode, which has exactly one correct answer option and can be answered only once\n\n #[serde(rename(serialize = \"pollTypeQuiz\", deserialize = \"pollTypeQuiz\"))]\n\n Quiz(PollTypeQuiz),\n\n /// A regular poll\n\n #[serde(rename(serialize = \"pollTypeRegular\", deserialize = \"pollTypeRegular\"))]\n\n Regular(PollTypeRegular),\n\n}\n\n\n\nimpl Default for PollType {\n\n fn default() -> Self {\n\n PollType::_Default\n\n }\n", "file_path": "src/types/poll_type.rs", "rank": 5, "score": 278029.0164518839 }, { "content": "/// Describes the type of a chat\n\npub trait TDChatType: Debug + RObject {}\n\n\n\n/// Describes the type of a chat\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum ChatType {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A basic group (i.e., a chat with 0-200 other users)\n\n #[serde(rename(serialize = \"chatTypeBasicGroup\", deserialize = \"chatTypeBasicGroup\"))]\n\n BasicGroup(ChatTypeBasicGroup),\n\n /// An ordinary chat with a user\n\n #[serde(rename(serialize = \"chatTypePrivate\", deserialize = \"chatTypePrivate\"))]\n\n Private(ChatTypePrivate),\n\n /// A secret chat with a user\n\n #[serde(rename(serialize = \"chatTypeSecret\", deserialize = \"chatTypeSecret\"))]\n\n Secret(ChatTypeSecret),\n\n /// A supergroup (i.e. a chat with up to GetOption(\"supergroup_max_size\") other users), or channel (with unlimited members)\n\n #[serde(rename(serialize = \"chatTypeSupergroup\", deserialize = \"chatTypeSupergroup\"))]\n\n Supergroup(ChatTypeSupergroup),\n", "file_path": "src/types/chat_type.rs", "rank": 6, "score": 278029.0164518839 }, { "content": "/// Represents the type of a file\n\npub trait TDFileType: Debug + RObject {}\n\n\n\n/// Represents the type of a file\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum FileType {\n\n #[doc(hidden)]\n\n _Default,\n\n /// The file is an animation\n\n #[serde(rename(serialize = \"fileTypeAnimation\", deserialize = \"fileTypeAnimation\"))]\n\n Animation(FileTypeAnimation),\n\n /// The file is an audio file\n\n #[serde(rename(serialize = \"fileTypeAudio\", deserialize = \"fileTypeAudio\"))]\n\n Audio(FileTypeAudio),\n\n /// The file is a document\n\n #[serde(rename(serialize = \"fileTypeDocument\", deserialize = \"fileTypeDocument\"))]\n\n Document(FileTypeDocument),\n\n /// The data is not a file\n\n #[serde(rename(serialize = \"fileTypeNone\", deserialize = \"fileTypeNone\"))]\n\n None(FileTypeNone),\n", "file_path": "src/types/file_type.rs", "rank": 7, "score": 278029.0164518839 }, { "content": "/// Describes the type of a proxy server\n\npub trait TDProxyType: Debug + RObject {}\n\n\n\n/// Describes the type of a proxy server\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum ProxyType {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A HTTP transparent proxy server\n\n #[serde(rename(serialize = \"proxyTypeHttp\", deserialize = \"proxyTypeHttp\"))]\n\n Http(ProxyTypeHttp),\n\n /// An MTProto proxy server\n\n #[serde(rename(serialize = \"proxyTypeMtproto\", deserialize = \"proxyTypeMtproto\"))]\n\n Mtproto(ProxyTypeMtproto),\n\n /// A SOCKS5 proxy server\n\n #[serde(rename(serialize = \"proxyTypeSocks5\", deserialize = \"proxyTypeSocks5\"))]\n\n Socks5(ProxyTypeSocks5),\n\n}\n\n\n\nimpl Default for ProxyType {\n", "file_path": "src/types/proxy_type.rs", "rank": 8, "score": 278028.9597786254 }, { "content": "/// Describes the type of a URL linking to an internal Telegram entity\n\npub trait TDTMeUrlType: Debug + RObject {}\n\n\n\n/// Describes the type of a URL linking to an internal Telegram entity\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum TMeUrlType {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A chat invite link\n\n #[serde(rename(\n\n serialize = \"tMeUrlTypeChatInvite\",\n\n deserialize = \"tMeUrlTypeChatInvite\"\n\n ))]\n\n ChatInvite(TMeUrlTypeChatInvite),\n\n /// A URL linking to a sticker set\n\n #[serde(rename(\n\n serialize = \"tMeUrlTypeStickerSet\",\n\n deserialize = \"tMeUrlTypeStickerSet\"\n\n ))]\n\n StickerSet(TMeUrlTypeStickerSet),\n", "file_path": "src/types/t_me_url_type.rs", "rank": 9, "score": 278028.79571009404 }, { "content": "/// Contains detailed information about a notification\n\npub trait TDNotificationType: Debug + RObject {}\n\n\n\n/// Contains detailed information about a notification\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum NotificationType {\n\n #[doc(hidden)]\n\n _Default,\n\n /// New call was received\n\n #[serde(rename(\n\n serialize = \"notificationTypeNewCall\",\n\n deserialize = \"notificationTypeNewCall\"\n\n ))]\n\n NewCall(NotificationTypeNewCall),\n\n /// New message was received\n\n #[serde(rename(\n\n serialize = \"notificationTypeNewMessage\",\n\n deserialize = \"notificationTypeNewMessage\"\n\n ))]\n\n NewMessage(NotificationTypeNewMessage),\n", "file_path": "src/types/notification_type.rs", "rank": 10, "score": 278022.7101286711 }, { "content": "/// Contains notifications about data changes\n\npub trait TDUpdate: Debug + RObject {}\n\n\n\n/// Contains notifications about data changes\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum Update {\n\n #[doc(hidden)]\n\n _Default,\n\n /// Does nothing and ensures that the Update object is used; for testing only. This is an offline method. Can be called before authorization\n\n #[serde(rename(serialize = \"testUseUpdate\", deserialize = \"testUseUpdate\"))]\n\n TestUseUpdate(TestUseUpdate),\n\n /// Contains active notifications that was shown on previous application launches. This update is sent only if the message database is used. In that case it comes once before any updateNotification and updateNotificationGroup update\n\n #[serde(rename(\n\n serialize = \"updateActiveNotifications\",\n\n deserialize = \"updateActiveNotifications\"\n\n ))]\n\n ActiveNotifications(UpdateActiveNotifications),\n\n /// The parameters of animation search through GetOption(\"animation_search_bot_username\") bot has changed\n\n #[serde(rename(\n\n serialize = \"updateAnimationSearchParameters\",\n", "file_path": "src/types/update.rs", "rank": 11, "score": 271352.69280782196 }, { "content": "/// Describes the type of a call server\n\npub trait TDCallServerType: Debug + RObject {}\n\n\n\n/// Describes the type of a call server\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum CallServerType {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A Telegram call reflector\n\n #[serde(rename(\n\n serialize = \"callServerTypeTelegramReflector\",\n\n deserialize = \"callServerTypeTelegramReflector\"\n\n ))]\n\n TelegramReflector(CallServerTypeTelegramReflector),\n\n /// A WebRTC server\n\n #[serde(rename(\n\n serialize = \"callServerTypeWebrtc\",\n\n deserialize = \"callServerTypeWebrtc\"\n\n ))]\n\n Webrtc(CallServerTypeWebrtc),\n", "file_path": "src/types/call_server_type.rs", "rank": 12, "score": 270230.2300434907 }, { "content": "/// Describes a keyboard button type\n\npub trait TDKeyboardButtonType: Debug + RObject {}\n\n\n\n/// Describes a keyboard button type\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum KeyboardButtonType {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A button that sends the user's location when pressed; available only in private chats\n\n #[serde(rename(\n\n serialize = \"keyboardButtonTypeRequestLocation\",\n\n deserialize = \"keyboardButtonTypeRequestLocation\"\n\n ))]\n\n RequestLocation(KeyboardButtonTypeRequestLocation),\n\n /// A button that sends the user's phone number when pressed; available only in private chats\n\n #[serde(rename(\n\n serialize = \"keyboardButtonTypeRequestPhoneNumber\",\n\n deserialize = \"keyboardButtonTypeRequestPhoneNumber\"\n\n ))]\n\n RequestPhoneNumber(KeyboardButtonTypeRequestPhoneNumber),\n", "file_path": "src/types/keyboard_button_type.rs", "rank": 13, "score": 270230.2300434907 }, { "content": "/// Describes a type of public chats\n\npub trait TDPublicChatType: Debug + RObject {}\n\n\n\n/// Describes a type of public chats\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum PublicChatType {\n\n #[doc(hidden)]\n\n _Default,\n\n /// The chat is public, because it has username\n\n #[serde(rename(\n\n serialize = \"publicChatTypeHasUsername\",\n\n deserialize = \"publicChatTypeHasUsername\"\n\n ))]\n\n HasUsername(PublicChatTypeHasUsername),\n\n /// The chat is public, because it is a location-based supergroup\n\n #[serde(rename(\n\n serialize = \"publicChatTypeIsLocationBased\",\n\n deserialize = \"publicChatTypeIsLocationBased\"\n\n ))]\n\n IsLocationBased(PublicChatTypeIsLocationBased),\n", "file_path": "src/types/public_chat_type.rs", "rank": 14, "score": 270230.2300434907 }, { "content": "/// Contains the type of a Telegram Passport element\n\npub trait TDPassportElementType: Debug + RObject {}\n\n\n\n/// Contains the type of a Telegram Passport element\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum PassportElementType {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A Telegram Passport element containing the user's address\n\n #[serde(rename(\n\n serialize = \"passportElementTypeAddress\",\n\n deserialize = \"passportElementTypeAddress\"\n\n ))]\n\n Address(PassportElementTypeAddress),\n\n /// A Telegram Passport element containing the user's bank statement\n\n #[serde(rename(\n\n serialize = \"passportElementTypeBankStatement\",\n\n deserialize = \"passportElementTypeBankStatement\"\n\n ))]\n\n BankStatement(PassportElementTypeBankStatement),\n", "file_path": "src/types/passport_element_type.rs", "rank": 15, "score": 270230.17437977483 }, { "content": "/// Describes the type of notifications in a notification group\n\npub trait TDNotificationGroupType: Debug + RObject {}\n\n\n\n/// Describes the type of notifications in a notification group\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum NotificationGroupType {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A group containing notifications of type notificationTypeNewCall\n\n #[serde(rename(\n\n serialize = \"notificationGroupTypeCalls\",\n\n deserialize = \"notificationGroupTypeCalls\"\n\n ))]\n\n Calls(NotificationGroupTypeCalls),\n\n /// A group containing notifications of type notificationTypeNewMessage and notificationTypeNewPushMessage with unread mentions of the current user, replies to their messages, or a pinned message\n\n #[serde(rename(\n\n serialize = \"notificationGroupTypeMentions\",\n\n deserialize = \"notificationGroupTypeMentions\"\n\n ))]\n\n Mentions(NotificationGroupTypeMentions),\n", "file_path": "src/types/notification_group_type.rs", "rank": 16, "score": 270230.1743797749 }, { "content": "/// Provides information about the method by which an authentication code is delivered to the user\n\npub trait TDAuthenticationCodeType: Debug + RObject {}\n\n\n\n/// Provides information about the method by which an authentication code is delivered to the user\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum AuthenticationCodeType {\n\n #[doc(hidden)]\n\n _Default,\n\n /// An authentication code is delivered via a phone call to the specified phone number\n\n #[serde(rename(\n\n serialize = \"authenticationCodeTypeCall\",\n\n deserialize = \"authenticationCodeTypeCall\"\n\n ))]\n\n Call(AuthenticationCodeTypeCall),\n\n /// An authentication code is delivered by an immediately cancelled call to the specified phone number. The number from which the call was made is the code\n\n #[serde(rename(\n\n serialize = \"authenticationCodeTypeFlashCall\",\n\n deserialize = \"authenticationCodeTypeFlashCall\"\n\n ))]\n\n FlashCall(AuthenticationCodeTypeFlashCall),\n", "file_path": "src/types/authentication_code_type.rs", "rank": 17, "score": 270223.98039353645 }, { "content": "/// Represents a part of the text which must be formatted differently\n\npub trait TDTextEntityType: Debug + RObject {}\n\n\n\n/// Represents a part of the text which must be formatted differently\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum TextEntityType {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A bank card number. The getBankCardInfo method can be used to get information about the bank card\n\n #[serde(rename(\n\n serialize = \"textEntityTypeBankCardNumber\",\n\n deserialize = \"textEntityTypeBankCardNumber\"\n\n ))]\n\n BankCardNumber(TextEntityTypeBankCardNumber),\n\n /// A bold text\n\n #[serde(rename(serialize = \"textEntityTypeBold\", deserialize = \"textEntityTypeBold\"))]\n\n Bold(TextEntityTypeBold),\n\n /// A bot command, beginning with \"/\". This shouldn't be highlighted if there are no bots in the chat\n\n #[serde(rename(\n\n serialize = \"textEntityTypeBotCommand\",\n", "file_path": "src/types/text_entity_type.rs", "rank": 18, "score": 270223.98039353645 }, { "content": "/// Describes the type of an inline keyboard button\n\npub trait TDInlineKeyboardButtonType: Debug + RObject {}\n\n\n\n/// Describes the type of an inline keyboard button\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum InlineKeyboardButtonType {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A button to buy something. This button must be in the first column and row of the keyboard and can be attached only to a message with content of the type messageInvoice\n\n #[serde(rename(\n\n serialize = \"inlineKeyboardButtonTypeBuy\",\n\n deserialize = \"inlineKeyboardButtonTypeBuy\"\n\n ))]\n\n Buy(InlineKeyboardButtonTypeBuy),\n\n /// A button that sends a callback query to a bot\n\n #[serde(rename(\n\n serialize = \"inlineKeyboardButtonTypeCallback\",\n\n deserialize = \"inlineKeyboardButtonTypeCallback\"\n\n ))]\n\n Callback(InlineKeyboardButtonTypeCallback),\n", "file_path": "src/types/inline_keyboard_button_type.rs", "rank": 19, "score": 263108.1140884477 }, { "content": "/// Contains animated stickers which should be used for dice animation rendering\n\npub trait TDDiceStickers: Debug + RObject {}\n\n\n\n/// Contains animated stickers which should be used for dice animation rendering\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum DiceStickers {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A regular animated sticker\n\n #[serde(rename(serialize = \"diceStickersRegular\", deserialize = \"diceStickersRegular\"))]\n\n Regular(DiceStickersRegular),\n\n /// Animated stickers to be combined into a slot machine\n\n #[serde(rename(\n\n serialize = \"diceStickersSlotMachine\",\n\n deserialize = \"diceStickersSlotMachine\"\n\n ))]\n\n SlotMachine(DiceStickersSlotMachine),\n\n}\n\n\n\nimpl Default for DiceStickers {\n", "file_path": "src/types/dice_stickers.rs", "rank": 20, "score": 262271.1644971982 }, { "content": "/// Describes the different types of activity in a chat\n\npub trait TDChatAction: Debug + RObject {}\n\n\n\n/// Describes the different types of activity in a chat\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum ChatAction {\n\n #[doc(hidden)]\n\n _Default,\n\n /// The user has cancelled the previous action\n\n #[serde(rename(serialize = \"chatActionCancel\", deserialize = \"chatActionCancel\"))]\n\n Cancel(ChatActionCancel),\n\n /// The user is picking a contact to send\n\n #[serde(rename(\n\n serialize = \"chatActionChoosingContact\",\n\n deserialize = \"chatActionChoosingContact\"\n\n ))]\n\n ChoosingContact(ChatActionChoosingContact),\n\n /// The user is picking a location or venue to send\n\n #[serde(rename(\n\n serialize = \"chatActionChoosingLocation\",\n", "file_path": "src/types/chat_action.rs", "rank": 21, "score": 262271.1425896326 }, { "content": "/// Describes the exact type of a problem with a call\n\npub trait TDCallProblem: Debug + RObject {}\n\n\n\n/// Describes the exact type of a problem with a call\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum CallProblem {\n\n #[doc(hidden)]\n\n _Default,\n\n /// The speech was distorted\n\n #[serde(rename(\n\n serialize = \"callProblemDistortedSpeech\",\n\n deserialize = \"callProblemDistortedSpeech\"\n\n ))]\n\n DistortedSpeech(CallProblemDistortedSpeech),\n\n /// The video was distorted\n\n #[serde(rename(\n\n serialize = \"callProblemDistortedVideo\",\n\n deserialize = \"callProblemDistortedVideo\"\n\n ))]\n\n DistortedVideo(CallProblemDistortedVideo),\n", "file_path": "src/types/call_problem.rs", "rank": 22, "score": 262271.1425896326 }, { "content": "/// Represents a data needed to subscribe for push notifications through registerDevice method. To use specific push notification service, the correct application platform must be specified and a valid server authentication data must be uploaded at https://my.telegram.org\n\npub trait TDDeviceToken: Debug + RObject {}\n\n\n\n/// Represents a data needed to subscribe for push notifications through registerDevice method. To use specific push notification service, the correct application platform must be specified and a valid server authentication data must be uploaded at https://my.telegram.org\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum DeviceToken {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A token for Apple Push Notification service\n\n #[serde(rename(\n\n serialize = \"deviceTokenApplePush\",\n\n deserialize = \"deviceTokenApplePush\"\n\n ))]\n\n ApplePush(DeviceTokenApplePush),\n\n /// A token for Apple Push Notification service VoIP notifications\n\n #[serde(rename(\n\n serialize = \"deviceTokenApplePushVoIP\",\n\n deserialize = \"deviceTokenApplePushVoIP\"\n\n ))]\n\n ApplePushVoIP(DeviceTokenApplePushVoIP),\n", "file_path": "src/types/device_token.rs", "rank": 23, "score": 262270.1995468276 }, { "content": "/// Contains information about a Telegram Passport element\n\npub trait TDPassportElement: Debug + RObject {}\n\n\n\n/// Contains information about a Telegram Passport element\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum PassportElement {\n\n #[doc(hidden)]\n\n _Default,\n\n /// Returns one of the available Telegram Passport elements\n\n #[serde(rename(serialize = \"getPassportElement\", deserialize = \"getPassportElement\"))]\n\n GetPassportElement(GetPassportElement),\n\n /// A Telegram Passport element containing the user's address\n\n #[serde(rename(\n\n serialize = \"passportElementAddress\",\n\n deserialize = \"passportElementAddress\"\n\n ))]\n\n Address(PassportElementAddress),\n\n /// A Telegram Passport element containing the user's bank statement\n\n #[serde(rename(\n\n serialize = \"passportElementBankStatement\",\n", "file_path": "src/types/passport_element.rs", "rank": 24, "score": 262264.9486033942 }, { "content": "/// Represents the current authorization state of the TDLib client\n\npub trait TDAuthorizationState: Debug + RObject {}\n\n\n\n/// Represents the current authorization state of the TDLib client\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum AuthorizationState {\n\n #[doc(hidden)]\n\n _Default,\n\n /// TDLib client is in its final state. All databases are closed and all resources are released. No other updates will be received after this. All queries will be responded to with error code 500. To continue working, one should create a new instance of the TDLib client\n\n #[serde(rename(\n\n serialize = \"authorizationStateClosed\",\n\n deserialize = \"authorizationStateClosed\"\n\n ))]\n\n Closed(AuthorizationStateClosed),\n\n /// TDLib is closing, all subsequent queries will be answered with the error 500. Note that closing TDLib can take a while. All resources will be freed only after authorizationStateClosed has been received\n\n #[serde(rename(\n\n serialize = \"authorizationStateClosing\",\n\n deserialize = \"authorizationStateClosing\"\n\n ))]\n\n Closing(AuthorizationStateClosing),\n", "file_path": "src/types/authorization_state.rs", "rank": 25, "score": 262264.94860339415 }, { "content": "/// Points to a file\n\npub trait TDInputFile: Debug + RObject {}\n\n\n\n/// Points to a file\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum InputFile {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A file generated by the application\n\n #[serde(rename(serialize = \"inputFileGenerated\", deserialize = \"inputFileGenerated\"))]\n\n Generated(InputFileGenerated),\n\n /// A file defined by its unique ID\n\n #[serde(rename(serialize = \"inputFileId\", deserialize = \"inputFileId\"))]\n\n Id(InputFileId),\n\n /// A file defined by a local path\n\n #[serde(rename(serialize = \"inputFileLocal\", deserialize = \"inputFileLocal\"))]\n\n Local(InputFileLocal),\n\n /// A file defined by its remote ID. The remote ID is guaranteed to be usable only if the corresponding file is still accessible to the user and known to TDLib. For example, if the file is from a message, then the message must be not deleted and accessible to the user. If the file database is disabled, then the corresponding object with the file must be preloaded by the application\n\n #[serde(rename(serialize = \"inputFileRemote\", deserialize = \"inputFileRemote\"))]\n\n Remote(InputFileRemote),\n", "file_path": "src/types/input_file.rs", "rank": 26, "score": 262264.9486033942 }, { "content": "/// Describes a fill of a background\n\npub trait TDBackgroundFill: Debug + RObject {}\n\n\n\n/// Describes a fill of a background\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum BackgroundFill {\n\n #[doc(hidden)]\n\n _Default,\n\n /// Describes a gradient fill of a background\n\n #[serde(rename(\n\n serialize = \"backgroundFillGradient\",\n\n deserialize = \"backgroundFillGradient\"\n\n ))]\n\n Gradient(BackgroundFillGradient),\n\n /// Describes a solid fill of a background\n\n #[serde(rename(serialize = \"backgroundFillSolid\", deserialize = \"backgroundFillSolid\"))]\n\n Solid(BackgroundFillSolid),\n\n}\n\n\n\nimpl Default for BackgroundFill {\n", "file_path": "src/types/background_fill.rs", "rank": 27, "score": 262264.9486033942 }, { "content": "/// Contains a detailed statistics about a chat\n\npub trait TDChatStatistics: Debug + RObject {}\n\n\n\n/// Contains a detailed statistics about a chat\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum ChatStatistics {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A detailed statistics about a channel chat\n\n #[serde(rename(\n\n serialize = \"chatStatisticsChannel\",\n\n deserialize = \"chatStatisticsChannel\"\n\n ))]\n\n Channel(ChatStatisticsChannel),\n\n /// A detailed statistics about a supergroup chat\n\n #[serde(rename(\n\n serialize = \"chatStatisticsSupergroup\",\n\n deserialize = \"chatStatisticsSupergroup\"\n\n ))]\n\n Supergroup(ChatStatisticsSupergroup),\n", "file_path": "src/types/chat_statistics.rs", "rank": 28, "score": 262264.94860339415 }, { "content": "/// Describes a stream to which TDLib internal log is written\n\npub trait TDLogStream: Debug + RObject {}\n\n\n\n/// Describes a stream to which TDLib internal log is written\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum LogStream {\n\n #[doc(hidden)]\n\n _Default,\n\n /// Returns information about currently used log stream for internal logging of TDLib. Can be called synchronously\n\n #[serde(rename(serialize = \"getLogStream\", deserialize = \"getLogStream\"))]\n\n GetLogStream(GetLogStream),\n\n /// The log is written to stderr or an OS specific log\n\n #[serde(rename(serialize = \"logStreamDefault\", deserialize = \"logStreamDefault\"))]\n\n Default(LogStreamDefault),\n\n /// The log is written nowhere\n\n #[serde(rename(serialize = \"logStreamEmpty\", deserialize = \"logStreamEmpty\"))]\n\n Empty(LogStreamEmpty),\n\n /// The log is written to a file\n\n #[serde(rename(serialize = \"logStreamFile\", deserialize = \"logStreamFile\"))]\n\n File(LogStreamFile),\n", "file_path": "src/types/log_stream.rs", "rank": 29, "score": 262264.9486033942 }, { "content": "/// Describes a statistical graph\n\npub trait TDStatisticalGraph: Debug + RObject {}\n\n\n\n/// Describes a statistical graph\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum StatisticalGraph {\n\n #[doc(hidden)]\n\n _Default,\n\n /// Loads an asynchronous or a zoomed in statistical graph\n\n #[serde(rename(serialize = \"getStatisticalGraph\", deserialize = \"getStatisticalGraph\"))]\n\n GetStatisticalGraph(GetStatisticalGraph),\n\n /// The graph data to be asynchronously loaded through getStatisticalGraph\n\n #[serde(rename(\n\n serialize = \"statisticalGraphAsync\",\n\n deserialize = \"statisticalGraphAsync\"\n\n ))]\n\n Async(StatisticalGraphAsync),\n\n /// A graph data\n\n #[serde(rename(\n\n serialize = \"statisticalGraphData\",\n", "file_path": "src/types/statistical_graph.rs", "rank": 30, "score": 262264.9486033942 }, { "content": "/// Contains information about the payment method chosen by the user\n\npub trait TDInputCredentials: Debug + RObject {}\n\n\n\n/// Contains information about the payment method chosen by the user\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum InputCredentials {\n\n #[doc(hidden)]\n\n _Default,\n\n /// Applies if a user enters new credentials using Android Pay\n\n #[serde(rename(\n\n serialize = \"inputCredentialsAndroidPay\",\n\n deserialize = \"inputCredentialsAndroidPay\"\n\n ))]\n\n AndroidPay(InputCredentialsAndroidPay),\n\n /// Applies if a user enters new credentials using Apple Pay\n\n #[serde(rename(\n\n serialize = \"inputCredentialsApplePay\",\n\n deserialize = \"inputCredentialsApplePay\"\n\n ))]\n\n ApplePay(InputCredentialsApplePay),\n", "file_path": "src/types/input_credentials.rs", "rank": 31, "score": 262264.94860339415 }, { "content": "/// Contains the content of a message\n\npub trait TDMessageContent: Debug + RObject {}\n\n\n\n/// Contains the content of a message\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum MessageContent {\n\n #[doc(hidden)]\n\n _Default,\n\n /// An animation message (GIF-style).\n\n #[serde(rename(serialize = \"messageAnimation\", deserialize = \"messageAnimation\"))]\n\n MessageAnimation(MessageAnimation),\n\n /// An audio message\n\n #[serde(rename(serialize = \"messageAudio\", deserialize = \"messageAudio\"))]\n\n MessageAudio(MessageAudio),\n\n /// A newly created basic group\n\n #[serde(rename(\n\n serialize = \"messageBasicGroupChatCreate\",\n\n deserialize = \"messageBasicGroupChatCreate\"\n\n ))]\n\n MessageBasicGroupChatCreate(MessageBasicGroupChatCreate),\n", "file_path": "src/types/message_content.rs", "rank": 32, "score": 262264.9486033942 }, { "content": "/// Describes a text object inside an instant-view web page\n\npub trait TDRichText: Debug + RObject {}\n\n\n\n/// Describes a text object inside an instant-view web page\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum RichText {\n\n #[doc(hidden)]\n\n _Default,\n\n /// An anchor\n\n #[serde(rename(serialize = \"richTextAnchor\", deserialize = \"richTextAnchor\"))]\n\n Anchor(RichTextAnchor),\n\n /// A link to an anchor on the same web page\n\n #[serde(rename(serialize = \"richTextAnchorLink\", deserialize = \"richTextAnchorLink\"))]\n\n AnchorLink(RichTextAnchorLink),\n\n /// A bold rich text\n\n #[serde(rename(serialize = \"richTextBold\", deserialize = \"richTextBold\"))]\n\n Bold(RichTextBold),\n\n /// A rich text email link\n\n #[serde(rename(\n\n serialize = \"richTextEmailAddress\",\n", "file_path": "src/types/rich_text.rs", "rank": 33, "score": 262264.94860339415 }, { "content": "/// Describes a list of chats\n\npub trait TDChatList: Debug + RObject {}\n\n\n\n/// Describes a list of chats\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum ChatList {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A list of chats usually located at the top of the main chat list. Unmuted chats are automatically moved from the Archive to the Main chat list when a new message arrives\n\n #[serde(rename(serialize = \"chatListArchive\", deserialize = \"chatListArchive\"))]\n\n Archive(ChatListArchive),\n\n /// A list of chats belonging to a chat filter\n\n #[serde(rename(serialize = \"chatListFilter\", deserialize = \"chatListFilter\"))]\n\n Filter(ChatListFilter),\n\n /// A main list of chats\n\n #[serde(rename(serialize = \"chatListMain\", deserialize = \"chatListMain\"))]\n\n Main(ChatListMain),\n\n}\n\n\n\nimpl Default for ChatList {\n", "file_path": "src/types/chat_list.rs", "rank": 34, "score": 262264.94860339415 }, { "content": "/// Describes a block of an instant view web page\n\npub trait TDPageBlock: Debug + RObject {}\n\n\n\n/// Describes a block of an instant view web page\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum PageBlock {\n\n #[doc(hidden)]\n\n _Default,\n\n /// An invisible anchor on a page, which can be used in a URL to open the page from the specified anchor\n\n #[serde(rename(serialize = \"pageBlockAnchor\", deserialize = \"pageBlockAnchor\"))]\n\n Anchor(PageBlockAnchor),\n\n /// An animation\n\n #[serde(rename(serialize = \"pageBlockAnimation\", deserialize = \"pageBlockAnimation\"))]\n\n Animation(PageBlockAnimation),\n\n /// An audio file\n\n #[serde(rename(serialize = \"pageBlockAudio\", deserialize = \"pageBlockAudio\"))]\n\n Audio(PageBlockAudio),\n\n /// The author and publishing date of a page\n\n #[serde(rename(serialize = \"pageBlockAuthorDate\", deserialize = \"pageBlockAuthorDate\"))]\n\n AuthorDate(PageBlockAuthorDate),\n", "file_path": "src/types/page_block.rs", "rank": 35, "score": 262264.94860339415 }, { "content": "/// Describes the current call state\n\npub trait TDCallState: Debug + RObject {}\n\n\n\n/// Describes the current call state\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum CallState {\n\n #[doc(hidden)]\n\n _Default,\n\n /// The call has ended successfully\n\n #[serde(rename(serialize = \"callStateDiscarded\", deserialize = \"callStateDiscarded\"))]\n\n Discarded(CallStateDiscarded),\n\n /// The call has ended with an error\n\n #[serde(rename(serialize = \"callStateError\", deserialize = \"callStateError\"))]\n\n Error(CallStateError),\n\n /// The call has been answered and encryption keys are being exchanged\n\n #[serde(rename(\n\n serialize = \"callStateExchangingKeys\",\n\n deserialize = \"callStateExchangingKeys\"\n\n ))]\n\n ExchangingKeys(CallStateExchangingKeys),\n", "file_path": "src/types/call_state.rs", "rank": 37, "score": 262264.94860339415 }, { "content": "/// Describes a reason why an external chat is shown in a chat list\n\npub trait TDChatSource: Debug + RObject {}\n\n\n\n/// Describes a reason why an external chat is shown in a chat list\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum ChatSource {\n\n #[doc(hidden)]\n\n _Default,\n\n /// The chat is sponsored by the user's MTProxy server\n\n #[serde(rename(\n\n serialize = \"chatSourceMtprotoProxy\",\n\n deserialize = \"chatSourceMtprotoProxy\"\n\n ))]\n\n MtprotoProxy(ChatSourceMtprotoProxy),\n\n /// The chat contains a public service announcement\n\n #[serde(rename(\n\n serialize = \"chatSourcePublicServiceAnnouncement\",\n\n deserialize = \"chatSourcePublicServiceAnnouncement\"\n\n ))]\n\n PublicServiceAnnouncement(ChatSourcePublicServiceAnnouncement),\n", "file_path": "src/types/chat_source.rs", "rank": 38, "score": 262264.94860339415 }, { "content": "/// Describes format of the thumbnail\n\npub trait TDThumbnailFormat: Debug + RObject {}\n\n\n\n/// Describes format of the thumbnail\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum ThumbnailFormat {\n\n #[doc(hidden)]\n\n _Default,\n\n /// The thumbnail is in static GIF format. It will be used only for some bot inline results\n\n #[serde(rename(serialize = \"thumbnailFormatGif\", deserialize = \"thumbnailFormatGif\"))]\n\n Gif(ThumbnailFormatGif),\n\n /// The thumbnail is in JPEG format\n\n #[serde(rename(serialize = \"thumbnailFormatJpeg\", deserialize = \"thumbnailFormatJpeg\"))]\n\n Jpeg(ThumbnailFormatJpeg),\n\n /// The thumbnail is in MPEG4 format. It will be used only for some animations and videos\n\n #[serde(rename(\n\n serialize = \"thumbnailFormatMpeg4\",\n\n deserialize = \"thumbnailFormatMpeg4\"\n\n ))]\n\n Mpeg4(ThumbnailFormatMpeg4),\n", "file_path": "src/types/thumbnail_format.rs", "rank": 39, "score": 262264.9486033942 }, { "content": "/// Contains information about the sender of a message\n\npub trait TDMessageSender: Debug + RObject {}\n\n\n\n/// Contains information about the sender of a message\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum MessageSender {\n\n #[doc(hidden)]\n\n _Default,\n\n /// The message was sent on behalf of a chat\n\n #[serde(rename(serialize = \"messageSenderChat\", deserialize = \"messageSenderChat\"))]\n\n Chat(MessageSenderChat),\n\n /// The message was sent by a known user\n\n #[serde(rename(serialize = \"messageSenderUser\", deserialize = \"messageSenderUser\"))]\n\n User(MessageSenderUser),\n\n}\n\n\n\nimpl Default for MessageSender {\n\n fn default() -> Self {\n\n MessageSender::_Default\n\n }\n", "file_path": "src/types/message_sender.rs", "rank": 40, "score": 262264.9486033942 }, { "content": "/// Describes an action suggested to the current user\n\npub trait TDSuggestedAction: Debug + RObject {}\n\n\n\n/// Describes an action suggested to the current user\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum SuggestedAction {\n\n #[doc(hidden)]\n\n _Default,\n\n /// Suggests the user to check authorization phone number and change the phone number if it is inaccessible\n\n #[serde(rename(\n\n serialize = \"suggestedActionCheckPhoneNumber\",\n\n deserialize = \"suggestedActionCheckPhoneNumber\"\n\n ))]\n\n CheckPhoneNumber(SuggestedActionCheckPhoneNumber),\n\n /// Suggests the user to enable \"archive_and_mute_new_chats_from_unknown_users\" option\n\n #[serde(rename(\n\n serialize = \"suggestedActionEnableArchiveAndMuteNewChats\",\n\n deserialize = \"suggestedActionEnableArchiveAndMuteNewChats\"\n\n ))]\n\n EnableArchiveAndMuteNewChats(SuggestedActionEnableArchiveAndMuteNewChats),\n", "file_path": "src/types/suggested_action.rs", "rank": 41, "score": 262264.9486033942 }, { "content": "/// Part of the face, relative to which a mask should be placed\n\npub trait TDMaskPoint: Debug + RObject {}\n\n\n\n/// Part of the face, relative to which a mask should be placed\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum MaskPoint {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A mask should be placed relatively to the chin\n\n #[serde(rename(serialize = \"maskPointChin\", deserialize = \"maskPointChin\"))]\n\n Chin(MaskPointChin),\n\n /// A mask should be placed relatively to the eyes\n\n #[serde(rename(serialize = \"maskPointEyes\", deserialize = \"maskPointEyes\"))]\n\n Eyes(MaskPointEyes),\n\n /// A mask should be placed relatively to the forehead\n\n #[serde(rename(serialize = \"maskPointForehead\", deserialize = \"maskPointForehead\"))]\n\n Forehead(MaskPointForehead),\n\n /// A mask should be placed relatively to the mouth\n\n #[serde(rename(serialize = \"maskPointMouth\", deserialize = \"maskPointMouth\"))]\n\n Mouth(MaskPointMouth),\n", "file_path": "src/types/mask_point.rs", "rank": 42, "score": 262264.94860339415 }, { "content": "/// Represents a JSON value\n\npub trait TDJsonValue: Debug + RObject {}\n\n\n\n/// Represents a JSON value\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum JsonValue {\n\n #[doc(hidden)]\n\n _Default,\n\n /// Returns application config, provided by the server. Can be called before authorization\n\n #[serde(rename(\n\n serialize = \"getApplicationConfig\",\n\n deserialize = \"getApplicationConfig\"\n\n ))]\n\n GetApplicationConfig(GetApplicationConfig),\n\n /// Converts a JSON-serialized string to corresponding JsonValue object. Can be called synchronously\n\n #[serde(rename(serialize = \"getJsonValue\", deserialize = \"getJsonValue\"))]\n\n GetJsonValue(GetJsonValue),\n\n /// Represents a JSON array\n\n #[serde(rename(serialize = \"jsonValueArray\", deserialize = \"jsonValueArray\"))]\n\n Array(JsonValueArray),\n", "file_path": "src/types/json_value.rs", "rank": 43, "score": 262264.9486033942 }, { "content": "/// Describes a sticker that needs to be added to a sticker set\n\npub trait TDInputSticker: Debug + RObject {}\n\n\n\n/// Describes a sticker that needs to be added to a sticker set\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum InputSticker {\n\n #[doc(hidden)]\n\n _Default,\n\n /// An animated sticker in TGS format\n\n #[serde(rename(\n\n serialize = \"inputStickerAnimated\",\n\n deserialize = \"inputStickerAnimated\"\n\n ))]\n\n Animated(InputStickerAnimated),\n\n /// A static sticker in PNG format, which will be converted to WEBP server-side\n\n #[serde(rename(serialize = \"inputStickerStatic\", deserialize = \"inputStickerStatic\"))]\n\n Static(InputStickerStatic),\n\n}\n\n\n\nimpl Default for InputSticker {\n", "file_path": "src/types/input_sticker.rs", "rank": 44, "score": 262264.9486033942 }, { "content": "/// Describes the current state of the connection to Telegram servers\n\npub trait TDConnectionState: Debug + RObject {}\n\n\n\n/// Describes the current state of the connection to Telegram servers\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum ConnectionState {\n\n #[doc(hidden)]\n\n _Default,\n\n /// Currently establishing a connection to the Telegram servers\n\n #[serde(rename(\n\n serialize = \"connectionStateConnecting\",\n\n deserialize = \"connectionStateConnecting\"\n\n ))]\n\n Connecting(ConnectionStateConnecting),\n\n /// Currently establishing a connection with a proxy server\n\n #[serde(rename(\n\n serialize = \"connectionStateConnectingToProxy\",\n\n deserialize = \"connectionStateConnectingToProxy\"\n\n ))]\n\n ConnectingToProxy(ConnectionStateConnectingToProxy),\n", "file_path": "src/types/connection_state.rs", "rank": 45, "score": 262264.9486033942 }, { "content": "/// Contains information about background to set\n\npub trait TDInputBackground: Debug + RObject {}\n\n\n\n/// Contains information about background to set\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum InputBackground {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A background from a local file\n\n #[serde(rename(\n\n serialize = \"inputBackgroundLocal\",\n\n deserialize = \"inputBackgroundLocal\"\n\n ))]\n\n Local(InputBackgroundLocal),\n\n /// A background from the server\n\n #[serde(rename(\n\n serialize = \"inputBackgroundRemote\",\n\n deserialize = \"inputBackgroundRemote\"\n\n ))]\n\n Remote(InputBackgroundRemote),\n", "file_path": "src/types/input_background.rs", "rank": 46, "score": 262264.9486033942 }, { "content": "/// Contains a description of a custom keyboard and actions that can be done with it to quickly reply to bots\n\npub trait TDReplyMarkup: Debug + RObject {}\n\n\n\n/// Contains a description of a custom keyboard and actions that can be done with it to quickly reply to bots\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum ReplyMarkup {\n\n #[doc(hidden)]\n\n _Default,\n\n /// Instructs application to force a reply to this message\n\n #[serde(rename(\n\n serialize = \"replyMarkupForceReply\",\n\n deserialize = \"replyMarkupForceReply\"\n\n ))]\n\n ForceReply(ReplyMarkupForceReply),\n\n /// Contains an inline keyboard layout\n\n #[serde(rename(\n\n serialize = \"replyMarkupInlineKeyboard\",\n\n deserialize = \"replyMarkupInlineKeyboard\"\n\n ))]\n\n InlineKeyboard(ReplyMarkupInlineKeyboard),\n", "file_path": "src/types/reply_markup.rs", "rank": 47, "score": 262264.94860339415 }, { "content": "/// Describes the last time the user was online\n\npub trait TDUserStatus: Debug + RObject {}\n\n\n\n/// Describes the last time the user was online\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum UserStatus {\n\n #[doc(hidden)]\n\n _Default,\n\n /// The user status was never changed\n\n #[serde(rename(serialize = \"userStatusEmpty\", deserialize = \"userStatusEmpty\"))]\n\n Empty(UserStatusEmpty),\n\n /// The user is offline, but was online last month\n\n #[serde(rename(serialize = \"userStatusLastMonth\", deserialize = \"userStatusLastMonth\"))]\n\n LastMonth(UserStatusLastMonth),\n\n /// The user is offline, but was online last week\n\n #[serde(rename(serialize = \"userStatusLastWeek\", deserialize = \"userStatusLastWeek\"))]\n\n LastWeek(UserStatusLastWeek),\n\n /// The user is offline\n\n #[serde(rename(serialize = \"userStatusOffline\", deserialize = \"userStatusOffline\"))]\n\n Offline(UserStatusOffline),\n", "file_path": "src/types/user_status.rs", "rank": 48, "score": 262264.9486033942 }, { "content": "/// Contains information about an inline button of type inlineKeyboardButtonTypeLoginUrl\n\npub trait TDLoginUrlInfo: Debug + RObject {}\n\n\n\n/// Contains information about an inline button of type inlineKeyboardButtonTypeLoginUrl\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum LoginUrlInfo {\n\n #[doc(hidden)]\n\n _Default,\n\n /// Returns information about a button of type inlineKeyboardButtonTypeLoginUrl. The method needs to be called when the user presses the button\n\n #[serde(rename(serialize = \"getLoginUrlInfo\", deserialize = \"getLoginUrlInfo\"))]\n\n GetLoginUrlInfo(GetLoginUrlInfo),\n\n /// An HTTP url needs to be open\n\n #[serde(rename(serialize = \"loginUrlInfoOpen\", deserialize = \"loginUrlInfoOpen\"))]\n\n Open(LoginUrlInfoOpen),\n\n /// An authorization confirmation dialog needs to be shown to the user\n\n #[serde(rename(\n\n serialize = \"loginUrlInfoRequestConfirmation\",\n\n deserialize = \"loginUrlInfoRequestConfirmation\"\n\n ))]\n\n RequestConfirmation(LoginUrlInfoRequestConfirmation),\n", "file_path": "src/types/login_url_info.rs", "rank": 49, "score": 254077.7825179567 }, { "content": "/// Describes the types of chats to which notification settings are applied\n\npub trait TDNotificationSettingsScope: Debug + RObject {}\n\n\n\n/// Describes the types of chats to which notification settings are applied\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum NotificationSettingsScope {\n\n #[doc(hidden)]\n\n _Default,\n\n /// Notification settings applied to all channels when the corresponding chat setting has a default value\n\n #[serde(rename(\n\n serialize = \"notificationSettingsScopeChannelChats\",\n\n deserialize = \"notificationSettingsScopeChannelChats\"\n\n ))]\n\n ChannelChats(NotificationSettingsScopeChannelChats),\n\n /// Notification settings applied to all basic groups and supergroups when the corresponding chat setting has a default value\n\n #[serde(rename(\n\n serialize = \"notificationSettingsScopeGroupChats\",\n\n deserialize = \"notificationSettingsScopeGroupChats\"\n\n ))]\n\n GroupChats(NotificationSettingsScopeGroupChats),\n", "file_path": "src/types/notification_settings_scope.rs", "rank": 50, "score": 254076.99503251296 }, { "content": "/// Represents the categories of chats for which a list of frequently used chats can be retrieved\n\npub trait TDTopChatCategory: Debug + RObject {}\n\n\n\n/// Represents the categories of chats for which a list of frequently used chats can be retrieved\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum TopChatCategory {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A category containing frequently used private chats with bot users\n\n #[serde(rename(serialize = \"topChatCategoryBots\", deserialize = \"topChatCategoryBots\"))]\n\n Bots(TopChatCategoryBots),\n\n /// A category containing frequently used chats used for calls\n\n #[serde(rename(\n\n serialize = \"topChatCategoryCalls\",\n\n deserialize = \"topChatCategoryCalls\"\n\n ))]\n\n Calls(TopChatCategoryCalls),\n\n /// A category containing frequently used channels\n\n #[serde(rename(\n\n serialize = \"topChatCategoryChannels\",\n", "file_path": "src/types/top_chat_category.rs", "rank": 51, "score": 254076.96470397653 }, { "content": "/// Represents a payload of a callback query\n\npub trait TDCallbackQueryPayload: Debug + RObject {}\n\n\n\n/// Represents a payload of a callback query\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum CallbackQueryPayload {\n\n #[doc(hidden)]\n\n _Default,\n\n /// The payload for a general callback button\n\n #[serde(rename(\n\n serialize = \"callbackQueryPayloadData\",\n\n deserialize = \"callbackQueryPayloadData\"\n\n ))]\n\n Data(CallbackQueryPayloadData),\n\n /// The payload for a callback button requiring password\n\n #[serde(rename(\n\n serialize = \"callbackQueryPayloadDataWithPassword\",\n\n deserialize = \"callbackQueryPayloadDataWithPassword\"\n\n ))]\n\n DataWithPassword(CallbackQueryPayloadDataWithPassword),\n", "file_path": "src/types/callback_query_payload.rs", "rank": 52, "score": 254070.85572718468 }, { "content": "/// Provides information about the status of a member in a chat\n\npub trait TDChatMemberStatus: Debug + RObject {}\n\n\n\n/// Provides information about the status of a member in a chat\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum ChatMemberStatus {\n\n #[doc(hidden)]\n\n _Default,\n\n /// The user is a member of a chat and has some additional privileges. In basic groups, administrators can edit and delete messages sent by others, add new members, and ban unprivileged members. In supergroups and channels, there are more detailed options for administrator privileges\n\n #[serde(rename(\n\n serialize = \"chatMemberStatusAdministrator\",\n\n deserialize = \"chatMemberStatusAdministrator\"\n\n ))]\n\n Administrator(ChatMemberStatusAdministrator),\n\n /// The user was banned (and hence is not a member of the chat). Implies the user can't return to the chat or view messages\n\n #[serde(rename(\n\n serialize = \"chatMemberStatusBanned\",\n\n deserialize = \"chatMemberStatusBanned\"\n\n ))]\n\n Banned(ChatMemberStatusBanned),\n", "file_path": "src/types/chat_member_status.rs", "rank": 53, "score": 254070.85572718462 }, { "content": "/// Describes the way the text should be parsed for TextEntities\n\npub trait TDTextParseMode: Debug + RObject {}\n\n\n\n/// Describes the way the text should be parsed for TextEntities\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum TextParseMode {\n\n #[doc(hidden)]\n\n _Default,\n\n /// The text uses HTML-style formatting. The same as Telegram Bot API \"HTML\" parse mode\n\n #[serde(rename(serialize = \"textParseModeHTML\", deserialize = \"textParseModeHTML\"))]\n\n HTML(TextParseModeHTML),\n\n /// The text uses Markdown-style formatting\n\n #[serde(rename(\n\n serialize = \"textParseModeMarkdown\",\n\n deserialize = \"textParseModeMarkdown\"\n\n ))]\n\n Markdown(TextParseModeMarkdown),\n\n}\n\n\n\nimpl Default for TextParseMode {\n", "file_path": "src/types/text_parse_mode.rs", "rank": 54, "score": 254070.85572718462 }, { "content": "/// Describes actions which should be possible to do through a chat action bar\n\npub trait TDChatActionBar: Debug + RObject {}\n\n\n\n/// Describes actions which should be possible to do through a chat action bar\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum ChatActionBar {\n\n #[doc(hidden)]\n\n _Default,\n\n /// The chat is a private or secret chat and the other user can be added to the contact list using the method addContact\n\n #[serde(rename(\n\n serialize = \"chatActionBarAddContact\",\n\n deserialize = \"chatActionBarAddContact\"\n\n ))]\n\n AddContact(ChatActionBarAddContact),\n\n /// The chat is a private or secret chat, which can be reported using the method reportChat, or the other user can be blocked using the method blockUser, or the other user can be added to the contact list using the method addContact\n\n #[serde(rename(\n\n serialize = \"chatActionBarReportAddBlock\",\n\n deserialize = \"chatActionBarReportAddBlock\"\n\n ))]\n\n ReportAddBlock(ChatActionBarReportAddBlock),\n", "file_path": "src/types/chat_action_bar.rs", "rank": 55, "score": 254070.85572718468 }, { "content": "/// Specifies the kind of chat members to return in getSupergroupMembers\n\npub trait TDSupergroupMembersFilter: Debug + RObject {}\n\n\n\n/// Specifies the kind of chat members to return in getSupergroupMembers\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum SupergroupMembersFilter {\n\n #[doc(hidden)]\n\n _Default,\n\n /// Returns the owner and administrators\n\n #[serde(rename(\n\n serialize = \"supergroupMembersFilterAdministrators\",\n\n deserialize = \"supergroupMembersFilterAdministrators\"\n\n ))]\n\n Administrators(SupergroupMembersFilterAdministrators),\n\n /// Returns users banned from the supergroup or channel; can be used only by administrators\n\n #[serde(rename(\n\n serialize = \"supergroupMembersFilterBanned\",\n\n deserialize = \"supergroupMembersFilterBanned\"\n\n ))]\n\n Banned(SupergroupMembersFilterBanned),\n", "file_path": "src/types/supergroup_members_filter.rs", "rank": 56, "score": 254070.85572718462 }, { "content": "/// Represents a single result of an inline query\n\npub trait TDInlineQueryResult: Debug + RObject {}\n\n\n\n/// Represents a single result of an inline query\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum InlineQueryResult {\n\n #[doc(hidden)]\n\n _Default,\n\n /// Represents an animation file\n\n #[serde(rename(\n\n serialize = \"inlineQueryResultAnimation\",\n\n deserialize = \"inlineQueryResultAnimation\"\n\n ))]\n\n Animation(InlineQueryResultAnimation),\n\n /// Represents a link to an article or web page\n\n #[serde(rename(\n\n serialize = \"inlineQueryResultArticle\",\n\n deserialize = \"inlineQueryResultArticle\"\n\n ))]\n\n Article(InlineQueryResultArticle),\n", "file_path": "src/types/inline_query_result.rs", "rank": 57, "score": 254070.85572718462 }, { "content": "/// The content of a message to send\n\npub trait TDInputMessageContent: Debug + RObject {}\n\n\n\n/// The content of a message to send\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum InputMessageContent {\n\n #[doc(hidden)]\n\n _Default,\n\n /// An animation message (GIF-style).\n\n #[serde(rename(\n\n serialize = \"inputMessageAnimation\",\n\n deserialize = \"inputMessageAnimation\"\n\n ))]\n\n InputMessageAnimation(InputMessageAnimation),\n\n /// An audio message\n\n #[serde(rename(serialize = \"inputMessageAudio\", deserialize = \"inputMessageAudio\"))]\n\n InputMessageAudio(InputMessageAudio),\n\n /// A message containing a user contact\n\n #[serde(rename(serialize = \"inputMessageContact\", deserialize = \"inputMessageContact\"))]\n\n InputMessageContact(InputMessageContact),\n", "file_path": "src/types/input_message_content.rs", "rank": 58, "score": 254070.85572718465 }, { "content": "/// Describes available user privacy settings\n\npub trait TDUserPrivacySetting: Debug + RObject {}\n\n\n\n/// Describes available user privacy settings\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum UserPrivacySetting {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A privacy setting for managing whether the user can be called\n\n #[serde(rename(\n\n serialize = \"userPrivacySettingAllowCalls\",\n\n deserialize = \"userPrivacySettingAllowCalls\"\n\n ))]\n\n AllowCalls(UserPrivacySettingAllowCalls),\n\n /// A privacy setting for managing whether the user can be invited to chats\n\n #[serde(rename(\n\n serialize = \"userPrivacySettingAllowChatInvites\",\n\n deserialize = \"userPrivacySettingAllowChatInvites\"\n\n ))]\n\n AllowChatInvites(UserPrivacySettingAllowChatInvites),\n", "file_path": "src/types/user_privacy_setting.rs", "rank": 59, "score": 254070.85572718462 }, { "content": "/// Describes a photo to be set as a user profile or chat photo\n\npub trait TDInputChatPhoto: Debug + RObject {}\n\n\n\n/// Describes a photo to be set as a user profile or chat photo\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum InputChatPhoto {\n\n #[doc(hidden)]\n\n _Default,\n\n /// An animation in MPEG4 format; must be square, at most 10 seconds long, have width between 160 and 800 and be at most 2MB in size\n\n #[serde(rename(\n\n serialize = \"inputChatPhotoAnimation\",\n\n deserialize = \"inputChatPhotoAnimation\"\n\n ))]\n\n Animation(InputChatPhotoAnimation),\n\n /// A previously used profile photo of the current user\n\n #[serde(rename(\n\n serialize = \"inputChatPhotoPrevious\",\n\n deserialize = \"inputChatPhotoPrevious\"\n\n ))]\n\n Previous(InputChatPhotoPrevious),\n", "file_path": "src/types/input_chat_photo.rs", "rank": 60, "score": 254070.85572718462 }, { "content": "/// Contains statistics about network usage\n\npub trait TDNetworkStatisticsEntry: Debug + RObject {}\n\n\n\n/// Contains statistics about network usage\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum NetworkStatisticsEntry {\n\n #[doc(hidden)]\n\n _Default,\n\n /// Contains information about the total amount of data that was used for calls\n\n #[serde(rename(\n\n serialize = \"networkStatisticsEntryCall\",\n\n deserialize = \"networkStatisticsEntryCall\"\n\n ))]\n\n Call(NetworkStatisticsEntryCall),\n\n /// Contains information about the total amount of data that was used to send and receive files\n\n #[serde(rename(\n\n serialize = \"networkStatisticsEntryFile\",\n\n deserialize = \"networkStatisticsEntryFile\"\n\n ))]\n\n File(NetworkStatisticsEntryFile),\n", "file_path": "src/types/network_statistics_entry.rs", "rank": 61, "score": 254070.85572718462 }, { "content": "/// Describes the reason why a call was discarded\n\npub trait TDCallDiscardReason: Debug + RObject {}\n\n\n\n/// Describes the reason why a call was discarded\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum CallDiscardReason {\n\n #[doc(hidden)]\n\n _Default,\n\n /// The call was ended before the conversation started. It was declined by the other party\n\n #[serde(rename(\n\n serialize = \"callDiscardReasonDeclined\",\n\n deserialize = \"callDiscardReasonDeclined\"\n\n ))]\n\n Declined(CallDiscardReasonDeclined),\n\n /// The call was ended during the conversation because the users were disconnected\n\n #[serde(rename(\n\n serialize = \"callDiscardReasonDisconnected\",\n\n deserialize = \"callDiscardReasonDisconnected\"\n\n ))]\n\n Disconnected(CallDiscardReasonDisconnected),\n", "file_path": "src/types/call_discard_reason.rs", "rank": 62, "score": 254070.85572718468 }, { "content": "/// Represents a chat event\n\npub trait TDChatEventAction: Debug + RObject {}\n\n\n\n/// Represents a chat event\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum ChatEventAction {\n\n #[doc(hidden)]\n\n _Default,\n\n /// The chat description was changed\n\n #[serde(rename(\n\n serialize = \"chatEventDescriptionChanged\",\n\n deserialize = \"chatEventDescriptionChanged\"\n\n ))]\n\n ChatEventDescriptionChanged(ChatEventDescriptionChanged),\n\n /// The can_invite_users permission of a supergroup chat was toggled\n\n #[serde(rename(\n\n serialize = \"chatEventInvitesToggled\",\n\n deserialize = \"chatEventInvitesToggled\"\n\n ))]\n\n ChatEventInvitesToggled(ChatEventInvitesToggled),\n", "file_path": "src/types/chat_event_action.rs", "rank": 63, "score": 254070.85572718465 }, { "content": "/// Specifies the kind of chat members to return in searchChatMembers\n\npub trait TDChatMembersFilter: Debug + RObject {}\n\n\n\n/// Specifies the kind of chat members to return in searchChatMembers\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum ChatMembersFilter {\n\n #[doc(hidden)]\n\n _Default,\n\n /// Returns the owner and administrators\n\n #[serde(rename(\n\n serialize = \"chatMembersFilterAdministrators\",\n\n deserialize = \"chatMembersFilterAdministrators\"\n\n ))]\n\n Administrators(ChatMembersFilterAdministrators),\n\n /// Returns users banned from the chat; can be used only by administrators in a supergroup or in a channel\n\n #[serde(rename(\n\n serialize = \"chatMembersFilterBanned\",\n\n deserialize = \"chatMembersFilterBanned\"\n\n ))]\n\n Banned(ChatMembersFilterBanned),\n", "file_path": "src/types/chat_members_filter.rs", "rank": 64, "score": 254070.85572718465 }, { "content": "/// Describes the reason why a chat is reported\n\npub trait TDChatReportReason: Debug + RObject {}\n\n\n\n/// Describes the reason why a chat is reported\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum ChatReportReason {\n\n #[doc(hidden)]\n\n _Default,\n\n /// The chat has child abuse related content\n\n #[serde(rename(\n\n serialize = \"chatReportReasonChildAbuse\",\n\n deserialize = \"chatReportReasonChildAbuse\"\n\n ))]\n\n ChildAbuse(ChatReportReasonChildAbuse),\n\n /// The chat contains copyrighted content\n\n #[serde(rename(\n\n serialize = \"chatReportReasonCopyright\",\n\n deserialize = \"chatReportReasonCopyright\"\n\n ))]\n\n Copyright(ChatReportReasonCopyright),\n", "file_path": "src/types/chat_report_reason.rs", "rank": 65, "score": 254070.85572718465 }, { "content": "/// Contains content of a push message notification\n\npub trait TDPushMessageContent: Debug + RObject {}\n\n\n\n/// Contains content of a push message notification\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum PushMessageContent {\n\n #[doc(hidden)]\n\n _Default,\n\n /// An animation message (GIF-style).\n\n #[serde(rename(\n\n serialize = \"pushMessageContentAnimation\",\n\n deserialize = \"pushMessageContentAnimation\"\n\n ))]\n\n Animation(PushMessageContentAnimation),\n\n /// An audio message\n\n #[serde(rename(\n\n serialize = \"pushMessageContentAudio\",\n\n deserialize = \"pushMessageContentAudio\"\n\n ))]\n\n Audio(PushMessageContentAudio),\n", "file_path": "src/types/push_message_content.rs", "rank": 66, "score": 254070.85572718462 }, { "content": "/// Contains information about a Telegram Passport element to be saved\n\npub trait TDInputPassportElement: Debug + RObject {}\n\n\n\n/// Contains information about a Telegram Passport element to be saved\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum InputPassportElement {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A Telegram Passport element to be saved containing the user's address\n\n #[serde(rename(\n\n serialize = \"inputPassportElementAddress\",\n\n deserialize = \"inputPassportElementAddress\"\n\n ))]\n\n Address(InputPassportElementAddress),\n\n /// A Telegram Passport element to be saved containing the user's bank statement\n\n #[serde(rename(\n\n serialize = \"inputPassportElementBankStatement\",\n\n deserialize = \"inputPassportElementBankStatement\"\n\n ))]\n\n BankStatement(InputPassportElementBankStatement),\n", "file_path": "src/types/input_passport_element.rs", "rank": 67, "score": 254070.85572718468 }, { "content": "/// Contains information about the sending state of the message\n\npub trait TDMessageSendingState: Debug + RObject {}\n\n\n\n/// Contains information about the sending state of the message\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum MessageSendingState {\n\n #[doc(hidden)]\n\n _Default,\n\n /// The message failed to be sent\n\n #[serde(rename(\n\n serialize = \"messageSendingStateFailed\",\n\n deserialize = \"messageSendingStateFailed\"\n\n ))]\n\n Failed(MessageSendingStateFailed),\n\n /// The message is being sent now, but has not yet been delivered to the server\n\n #[serde(rename(\n\n serialize = \"messageSendingStatePending\",\n\n deserialize = \"messageSendingStatePending\"\n\n ))]\n\n Pending(MessageSendingStatePending),\n", "file_path": "src/types/message_sending_state.rs", "rank": 68, "score": 254070.85572718462 }, { "content": "/// Contains information about the origin of a forwarded message\n\npub trait TDMessageForwardOrigin: Debug + RObject {}\n\n\n\n/// Contains information about the origin of a forwarded message\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum MessageForwardOrigin {\n\n #[doc(hidden)]\n\n _Default,\n\n /// The message was originally a post in a channel\n\n #[serde(rename(\n\n serialize = \"messageForwardOriginChannel\",\n\n deserialize = \"messageForwardOriginChannel\"\n\n ))]\n\n Channel(MessageForwardOriginChannel),\n\n /// The message was originally sent by an anonymous chat administrator on behalf of the chat\n\n #[serde(rename(\n\n serialize = \"messageForwardOriginChat\",\n\n deserialize = \"messageForwardOriginChat\"\n\n ))]\n\n Chat(MessageForwardOriginChat),\n", "file_path": "src/types/message_forward_origin.rs", "rank": 69, "score": 254070.85572718462 }, { "content": "/// Contains information about the time when a scheduled message will be sent\n\npub trait TDMessageSchedulingState: Debug + RObject {}\n\n\n\n/// Contains information about the time when a scheduled message will be sent\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum MessageSchedulingState {\n\n #[doc(hidden)]\n\n _Default,\n\n /// The message will be sent at the specified date\n\n #[serde(rename(\n\n serialize = \"messageSchedulingStateSendAtDate\",\n\n deserialize = \"messageSchedulingStateSendAtDate\"\n\n ))]\n\n SendAtDate(MessageSchedulingStateSendAtDate),\n\n /// The message will be sent when the peer will be online. Applicable to private chats only and when the exact online status of the peer is known\n\n #[serde(rename(\n\n serialize = \"messageSchedulingStateSendWhenOnline\",\n\n deserialize = \"messageSchedulingStateSendWhenOnline\"\n\n ))]\n\n SendWhenOnline(MessageSchedulingStateSendWhenOnline),\n", "file_path": "src/types/message_scheduling_state.rs", "rank": 70, "score": 254070.85572718465 }, { "content": "/// Describes the current secret chat state\n\npub trait TDSecretChatState: Debug + RObject {}\n\n\n\n/// Describes the current secret chat state\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum SecretChatState {\n\n #[doc(hidden)]\n\n _Default,\n\n /// The secret chat is closed\n\n #[serde(rename(\n\n serialize = \"secretChatStateClosed\",\n\n deserialize = \"secretChatStateClosed\"\n\n ))]\n\n Closed(SecretChatStateClosed),\n\n /// The secret chat is not yet created; waiting for the other user to get online\n\n #[serde(rename(\n\n serialize = \"secretChatStatePending\",\n\n deserialize = \"secretChatStatePending\"\n\n ))]\n\n Pending(SecretChatStatePending),\n", "file_path": "src/types/secret_chat_state.rs", "rank": 71, "score": 254070.85572718462 }, { "content": "/// Represents a filter for message search results\n\npub trait TDSearchMessagesFilter: Debug + RObject {}\n\n\n\n/// Represents a filter for message search results\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum SearchMessagesFilter {\n\n #[doc(hidden)]\n\n _Default,\n\n /// Returns only animation messages\n\n #[serde(rename(\n\n serialize = \"searchMessagesFilterAnimation\",\n\n deserialize = \"searchMessagesFilterAnimation\"\n\n ))]\n\n Animation(SearchMessagesFilterAnimation),\n\n /// Returns only audio messages\n\n #[serde(rename(\n\n serialize = \"searchMessagesFilterAudio\",\n\n deserialize = \"searchMessagesFilterAudio\"\n\n ))]\n\n Audio(SearchMessagesFilterAudio),\n", "file_path": "src/types/search_messages_filter.rs", "rank": 72, "score": 254070.85572718462 }, { "content": "/// Represents result of checking whether the current session can be used to transfer a chat ownership to another user\n\npub trait TDCanTransferOwnershipResult: Debug + RObject {}\n\n\n\n/// Represents result of checking whether the current session can be used to transfer a chat ownership to another user\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum CanTransferOwnershipResult {\n\n #[doc(hidden)]\n\n _Default,\n\n /// Checks whether the current session can be used to transfer a chat ownership to another user\n\n #[serde(rename(\n\n serialize = \"canTransferOwnership\",\n\n deserialize = \"canTransferOwnership\"\n\n ))]\n\n CanTransferOwnership(CanTransferOwnership),\n\n /// The session can be used\n\n #[serde(rename(\n\n serialize = \"canTransferOwnershipResultOk\",\n\n deserialize = \"canTransferOwnershipResultOk\"\n\n ))]\n\n Ok(CanTransferOwnershipResultOk),\n", "file_path": "src/types/can_transfer_ownership_result.rs", "rank": 73, "score": 246650.68044162783 }, { "content": "/// Represents result of checking whether a username can be set for a chat\n\npub trait TDCheckChatUsernameResult: Debug + RObject {}\n\n\n\n/// Represents result of checking whether a username can be set for a chat\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum CheckChatUsernameResult {\n\n #[doc(hidden)]\n\n _Default,\n\n /// Checks whether a username can be set for a chat\n\n #[serde(rename(serialize = \"checkChatUsername\", deserialize = \"checkChatUsername\"))]\n\n CheckChatUsername(CheckChatUsername),\n\n /// The username can be set\n\n #[serde(rename(\n\n serialize = \"checkChatUsernameResultOk\",\n\n deserialize = \"checkChatUsernameResultOk\"\n\n ))]\n\n Ok(CheckChatUsernameResultOk),\n\n /// The user has too much chats with username, one of them should be made private first\n\n #[serde(rename(\n\n serialize = \"checkChatUsernameResultPublicChatsTooMuch\",\n", "file_path": "src/types/check_chat_username_result.rs", "rank": 74, "score": 246644.7746316148 }, { "content": "/// Represents the value of a string in a language pack\n\npub trait TDLanguagePackStringValue: Debug + RObject {}\n\n\n\n/// Represents the value of a string in a language pack\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum LanguagePackStringValue {\n\n #[doc(hidden)]\n\n _Default,\n\n /// Returns a string stored in the local database from the specified localization target and language pack by its key. Returns a 404 error if the string is not found. Can be called synchronously\n\n #[serde(rename(\n\n serialize = \"getLanguagePackString\",\n\n deserialize = \"getLanguagePackString\"\n\n ))]\n\n GetLanguagePackString(GetLanguagePackString),\n\n /// A deleted language pack string, the value should be taken from the built-in english language pack\n\n #[serde(rename(\n\n serialize = \"languagePackStringValueDeleted\",\n\n deserialize = \"languagePackStringValueDeleted\"\n\n ))]\n\n Deleted(LanguagePackStringValueDeleted),\n", "file_path": "src/types/language_pack_string_value.rs", "rank": 75, "score": 246644.7746316148 }, { "content": "/// Represents a single result of an inline query; for bots only\n\npub trait TDInputInlineQueryResult: Debug + RObject {}\n\n\n\n/// Represents a single result of an inline query; for bots only\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum InputInlineQueryResult {\n\n #[doc(hidden)]\n\n _Default,\n\n /// Represents a link to an animated GIF or an animated (i.e. without sound) H.264/MPEG-4 AVC video\n\n #[serde(rename(\n\n serialize = \"inputInlineQueryResultAnimation\",\n\n deserialize = \"inputInlineQueryResultAnimation\"\n\n ))]\n\n Animation(InputInlineQueryResultAnimation),\n\n /// Represents a link to an article or web page\n\n #[serde(rename(\n\n serialize = \"inputInlineQueryResultArticle\",\n\n deserialize = \"inputInlineQueryResultArticle\"\n\n ))]\n\n Article(InputInlineQueryResultArticle),\n", "file_path": "src/types/input_inline_query_result.rs", "rank": 76, "score": 246644.7746316148 }, { "content": "/// Represents a single rule for managing privacy settings\n\npub trait TDUserPrivacySettingRule: Debug + RObject {}\n\n\n\n/// Represents a single rule for managing privacy settings\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum UserPrivacySettingRule {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A rule to allow all users to do something\n\n #[serde(rename(\n\n serialize = \"userPrivacySettingRuleAllowAll\",\n\n deserialize = \"userPrivacySettingRuleAllowAll\"\n\n ))]\n\n AllowAll(UserPrivacySettingRuleAllowAll),\n\n /// A rule to allow all members of certain specified basic groups and supergroups to doing something\n\n #[serde(rename(\n\n serialize = \"userPrivacySettingRuleAllowChatMembers\",\n\n deserialize = \"userPrivacySettingRuleAllowChatMembers\"\n\n ))]\n\n AllowChatMembers(UserPrivacySettingRuleAllowChatMembers),\n", "file_path": "src/types/user_privacy_setting_rule.rs", "rank": 77, "score": 246644.7746316148 }, { "content": "/// Contains the description of an error in a Telegram Passport element\n\npub trait TDPassportElementErrorSource: Debug + RObject {}\n\n\n\n/// Contains the description of an error in a Telegram Passport element\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum PassportElementErrorSource {\n\n #[doc(hidden)]\n\n _Default,\n\n /// One of the data fields contains an error. The error will be considered resolved when the value of the field changes\n\n #[serde(rename(\n\n serialize = \"passportElementErrorSourceDataField\",\n\n deserialize = \"passportElementErrorSourceDataField\"\n\n ))]\n\n DataField(PassportElementErrorSourceDataField),\n\n /// The file contains an error. The error will be considered resolved when the file changes\n\n #[serde(rename(\n\n serialize = \"passportElementErrorSourceFile\",\n\n deserialize = \"passportElementErrorSourceFile\"\n\n ))]\n\n File(PassportElementErrorSourceFile),\n", "file_path": "src/types/passport_element_error_source.rs", "rank": 78, "score": 246644.7746316148 }, { "content": "/// Describes a horizontal alignment of a table cell content\n\npub trait TDPageBlockHorizontalAlignment: Debug + RObject {}\n\n\n\n/// Describes a horizontal alignment of a table cell content\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum PageBlockHorizontalAlignment {\n\n #[doc(hidden)]\n\n _Default,\n\n /// The content should be center-aligned\n\n #[serde(rename(\n\n serialize = \"pageBlockHorizontalAlignmentCenter\",\n\n deserialize = \"pageBlockHorizontalAlignmentCenter\"\n\n ))]\n\n Center(PageBlockHorizontalAlignmentCenter),\n\n /// The content should be left-aligned\n\n #[serde(rename(\n\n serialize = \"pageBlockHorizontalAlignmentLeft\",\n\n deserialize = \"pageBlockHorizontalAlignmentLeft\"\n\n ))]\n\n Left(PageBlockHorizontalAlignmentLeft),\n", "file_path": "src/types/page_block_horizontal_alignment.rs", "rank": 79, "score": 246644.7746316148 }, { "content": "/// Describes a Vertical alignment of a table cell content\n\npub trait TDPageBlockVerticalAlignment: Debug + RObject {}\n\n\n\n/// Describes a Vertical alignment of a table cell content\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum PageBlockVerticalAlignment {\n\n #[doc(hidden)]\n\n _Default,\n\n /// The content should be bottom-aligned\n\n #[serde(rename(\n\n serialize = \"pageBlockVerticalAlignmentBottom\",\n\n deserialize = \"pageBlockVerticalAlignmentBottom\"\n\n ))]\n\n Bottom(PageBlockVerticalAlignmentBottom),\n\n /// The content should be middle-aligned\n\n #[serde(rename(\n\n serialize = \"pageBlockVerticalAlignmentMiddle\",\n\n deserialize = \"pageBlockVerticalAlignmentMiddle\"\n\n ))]\n\n Middle(PageBlockVerticalAlignmentMiddle),\n", "file_path": "src/types/page_block_vertical_alignment.rs", "rank": 80, "score": 246644.7746316148 }, { "content": "/// Contains the description of an error in a Telegram Passport element; for bots only\n\npub trait TDInputPassportElementErrorSource: Debug + RObject {}\n\n\n\n/// Contains the description of an error in a Telegram Passport element; for bots only\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"@type\")]\n\npub enum InputPassportElementErrorSource {\n\n #[doc(hidden)]\n\n _Default,\n\n /// A data field contains an error. The error is considered resolved when the field's value changes\n\n #[serde(rename(\n\n serialize = \"inputPassportElementErrorSourceDataField\",\n\n deserialize = \"inputPassportElementErrorSourceDataField\"\n\n ))]\n\n DataField(InputPassportElementErrorSourceDataField),\n\n /// The file contains an error. The error is considered resolved when the file changes\n\n #[serde(rename(\n\n serialize = \"inputPassportElementErrorSourceFile\",\n\n deserialize = \"inputPassportElementErrorSourceFile\"\n\n ))]\n\n File(InputPassportElementErrorSourceFile),\n", "file_path": "src/types/input_passport_element_error_source.rs", "rank": 81, "score": 239883.56321411827 }, { "content": "fn deserialize_traits(\n\n rtd_trait_type: &str,\n\n rtd_trait_value: serde_json::Value,\n\n) -> Result<Option<TdType>, serde_json::Error> {\n\n if let Some(td_type) = deserialize_update(rtd_trait_type, rtd_trait_value.clone())? {\n\n return Ok(Some(td_type));\n\n };\n\n\n\n if let Some(td_type) = deserialize_json_value(rtd_trait_type, rtd_trait_value.clone())? {\n\n return Ok(Some(td_type));\n\n };\n\n\n\n if let Some(td_type) = deserialize_option_value(rtd_trait_type, rtd_trait_value.clone())? {\n\n return Ok(Some(td_type));\n\n };\n\n if let Some(td_type) = deserialize_authorization_state(rtd_trait_type, rtd_trait_value.clone())?\n\n {\n\n return Ok(Some(td_type));\n\n };\n\n\n", "file_path": "src/types/_common.rs", "rank": 82, "score": 230260.71426302503 }, { "content": "fn deserialize_direct_types(\n\n rtd_trait_type: &str,\n\n rtd_trait_value: serde_json::Value,\n\n) -> Result<Option<TdType>, serde_json::Error> {\n\n Ok(match rtd_trait_type {\n\n \"accountTtl\" => Some(TdType::AccountTtl(serde_json::from_value(rtd_trait_value)?)),\n\n \"animations\" => Some(TdType::Animations(serde_json::from_value(rtd_trait_value)?)),\n\n \"authenticationCodeInfo\" => Some(TdType::AuthenticationCodeInfo(serde_json::from_value(\n\n rtd_trait_value,\n\n )?)),\n\n \"autoDownloadSettingsPresets\" => Some(TdType::AutoDownloadSettingsPresets(\n\n serde_json::from_value(rtd_trait_value)?,\n\n )),\n\n \"background\" => Some(TdType::Background(serde_json::from_value(rtd_trait_value)?)),\n\n \"backgrounds\" => Some(TdType::Backgrounds(serde_json::from_value(\n\n rtd_trait_value,\n\n )?)),\n\n \"bankCardInfo\" => Some(TdType::BankCardInfo(serde_json::from_value(\n\n rtd_trait_value,\n\n )?)),\n", "file_path": "src/types/_common.rs", "rank": 83, "score": 182799.79281551496 }, { "content": "fn deserialize_update(\n\n rtd_trait_type: &str,\n\n rtd_trait_value: serde_json::Value,\n\n) -> Result<Option<TdType>, serde_json::Error> {\n\n Ok(match UPDATE_MEMBERS.contains(&rtd_trait_type) {\n\n true => Some(TdType::Update(serde_json::from_value(rtd_trait_value)?)),\n\n false => None,\n\n })\n\n}\n\n\n\nimpl TdType {\n\n pub fn extra(&self) -> Option<&str> {\n\n match self {\n\n TdType::AuthorizationState(value) => value.extra(),\n\n\n\n TdType::CanTransferOwnershipResult(value) => value.extra(),\n\n\n\n TdType::ChatStatistics(value) => value.extra(),\n\n\n\n TdType::CheckChatUsernameResult(value) => value.extra(),\n", "file_path": "src/types/_common.rs", "rank": 84, "score": 177347.37535882252 }, { "content": "fn deserialize_option_value(\n\n rtd_trait_type: &str,\n\n rtd_trait_value: serde_json::Value,\n\n) -> Result<Option<TdType>, serde_json::Error> {\n\n Ok(match OPTIONVALUE_MEMBERS.contains(&rtd_trait_type) {\n\n true => Some(TdType::OptionValue(serde_json::from_value(\n\n rtd_trait_value,\n\n )?)),\n\n false => None,\n\n })\n\n}\n\n\n\nconst PASSPORTELEMENT_MEMBERS: &[&str] = &[\n\n \"getPassportElement\",\n\n \"passportElementAddress\",\n\n \"passportElementBankStatement\",\n\n \"passportElementDriverLicense\",\n\n \"passportElementEmailAddress\",\n\n \"passportElementIdentityCard\",\n\n \"passportElementInternalPassport\",\n\n \"passportElementPassport\",\n\n \"passportElementPassportRegistration\",\n\n \"passportElementPersonalDetails\",\n\n \"passportElementPhoneNumber\",\n\n \"passportElementRentalAgreement\",\n\n \"passportElementTemporaryRegistration\",\n\n \"passportElementUtilityBill\",\n\n \"setPassportElement\",\n\n];\n\n\n", "file_path": "src/types/_common.rs", "rank": 85, "score": 173057.3058517308 }, { "content": "fn deserialize_log_stream(\n\n rtd_trait_type: &str,\n\n rtd_trait_value: serde_json::Value,\n\n) -> Result<Option<TdType>, serde_json::Error> {\n\n Ok(match LOGSTREAM_MEMBERS.contains(&rtd_trait_type) {\n\n true => Some(TdType::LogStream(serde_json::from_value(rtd_trait_value)?)),\n\n false => None,\n\n })\n\n}\n\n\n\nconst LOGINURLINFO_MEMBERS: &[&str] = &[\n\n \"getLoginUrlInfo\",\n\n \"loginUrlInfoOpen\",\n\n \"loginUrlInfoRequestConfirmation\",\n\n];\n\n\n", "file_path": "src/types/_common.rs", "rank": 86, "score": 173057.3058517308 }, { "content": "fn deserialize_authorization_state(\n\n rtd_trait_type: &str,\n\n rtd_trait_value: serde_json::Value,\n\n) -> Result<Option<TdType>, serde_json::Error> {\n\n Ok(match AUTHORIZATIONSTATE_MEMBERS.contains(&rtd_trait_type) {\n\n true => Some(TdType::AuthorizationState(serde_json::from_value(\n\n rtd_trait_value,\n\n )?)),\n\n false => None,\n\n })\n\n}\n\n\n\nconst CANTRANSFEROWNERSHIPRESULT_MEMBERS: &[&str] = &[\n\n \"canTransferOwnership\",\n\n \"canTransferOwnershipResultOk\",\n\n \"canTransferOwnershipResultPasswordNeeded\",\n\n \"canTransferOwnershipResultPasswordTooFresh\",\n\n \"canTransferOwnershipResultSessionTooFresh\",\n\n];\n\n\n", "file_path": "src/types/_common.rs", "rank": 87, "score": 173057.3058517308 }, { "content": "fn deserialize_statistical_graph(\n\n rtd_trait_type: &str,\n\n rtd_trait_value: serde_json::Value,\n\n) -> Result<Option<TdType>, serde_json::Error> {\n\n Ok(match STATISTICALGRAPH_MEMBERS.contains(&rtd_trait_type) {\n\n true => Some(TdType::StatisticalGraph(serde_json::from_value(\n\n rtd_trait_value,\n\n )?)),\n\n false => None,\n\n })\n\n}\n\n\n\nconst UPDATE_MEMBERS: &[&str] = &[\n\n \"testUseUpdate\",\n\n \"updateActiveNotifications\",\n\n \"updateAnimationSearchParameters\",\n\n \"updateAuthorizationState\",\n\n \"updateBasicGroup\",\n\n \"updateBasicGroupFullInfo\",\n\n \"updateCall\",\n", "file_path": "src/types/_common.rs", "rank": 88, "score": 173057.3058517308 }, { "content": "fn deserialize_passport_element(\n\n rtd_trait_type: &str,\n\n rtd_trait_value: serde_json::Value,\n\n) -> Result<Option<TdType>, serde_json::Error> {\n\n Ok(match PASSPORTELEMENT_MEMBERS.contains(&rtd_trait_type) {\n\n true => Some(TdType::PassportElement(serde_json::from_value(\n\n rtd_trait_value,\n\n )?)),\n\n false => None,\n\n })\n\n}\n\n\n\nconst STATISTICALGRAPH_MEMBERS: &[&str] = &[\n\n \"getStatisticalGraph\",\n\n \"statisticalGraphAsync\",\n\n \"statisticalGraphData\",\n\n \"statisticalGraphError\",\n\n];\n\n\n", "file_path": "src/types/_common.rs", "rank": 89, "score": 173057.30585173084 }, { "content": "fn deserialize_chat_statistics(\n\n rtd_trait_type: &str,\n\n rtd_trait_value: serde_json::Value,\n\n) -> Result<Option<TdType>, serde_json::Error> {\n\n Ok(match CHATSTATISTICS_MEMBERS.contains(&rtd_trait_type) {\n\n true => Some(TdType::ChatStatistics(serde_json::from_value(\n\n rtd_trait_value,\n\n )?)),\n\n false => None,\n\n })\n\n}\n\n\n\nconst CHECKCHATUSERNAMERESULT_MEMBERS: &[&str] = &[\n\n \"checkChatUsername\",\n\n \"checkChatUsernameResultOk\",\n\n \"checkChatUsernameResultPublicChatsTooMuch\",\n\n \"checkChatUsernameResultPublicGroupsUnavailable\",\n\n \"checkChatUsernameResultUsernameInvalid\",\n\n \"checkChatUsernameResultUsernameOccupied\",\n\n];\n\n\n", "file_path": "src/types/_common.rs", "rank": 90, "score": 173057.3058517308 }, { "content": "fn deserialize_json_value(\n\n rtd_trait_type: &str,\n\n rtd_trait_value: serde_json::Value,\n\n) -> Result<Option<TdType>, serde_json::Error> {\n\n Ok(match JSONVALUE_MEMBERS.contains(&rtd_trait_type) {\n\n true => Some(TdType::JsonValue(serde_json::from_value(rtd_trait_value)?)),\n\n false => None,\n\n })\n\n}\n\n\n\nconst LANGUAGEPACKSTRINGVALUE_MEMBERS: &[&str] = &[\n\n \"getLanguagePackString\",\n\n \"languagePackStringValueDeleted\",\n\n \"languagePackStringValueOrdinary\",\n\n \"languagePackStringValuePluralized\",\n\n];\n\n\n", "file_path": "src/types/_common.rs", "rank": 91, "score": 173057.3058517308 }, { "content": "fn deserialize_login_url_info(\n\n rtd_trait_type: &str,\n\n rtd_trait_value: serde_json::Value,\n\n) -> Result<Option<TdType>, serde_json::Error> {\n\n Ok(match LOGINURLINFO_MEMBERS.contains(&rtd_trait_type) {\n\n true => Some(TdType::LoginUrlInfo(serde_json::from_value(\n\n rtd_trait_value,\n\n )?)),\n\n false => None,\n\n })\n\n}\n\n\n\nconst OPTIONVALUE_MEMBERS: &[&str] = &[\n\n \"getOption\",\n\n \"optionValueBoolean\",\n\n \"optionValueEmpty\",\n\n \"optionValueInteger\",\n\n \"optionValueString\",\n\n];\n\n\n", "file_path": "src/types/_common.rs", "rank": 92, "score": 169014.66722627784 }, { "content": "fn deserialize_check_chat_username_result(\n\n rtd_trait_type: &str,\n\n rtd_trait_value: serde_json::Value,\n\n) -> Result<Option<TdType>, serde_json::Error> {\n\n Ok(\n\n match CHECKCHATUSERNAMERESULT_MEMBERS.contains(&rtd_trait_type) {\n\n true => Some(TdType::CheckChatUsernameResult(serde_json::from_value(\n\n rtd_trait_value,\n\n )?)),\n\n false => None,\n\n },\n\n )\n\n}\n\n\n\nconst JSONVALUE_MEMBERS: &[&str] = &[\n\n \"getApplicationConfig\",\n\n \"getJsonValue\",\n\n \"jsonValueArray\",\n\n \"jsonValueBoolean\",\n\n \"jsonValueNull\",\n\n \"jsonValueNumber\",\n\n \"jsonValueObject\",\n\n \"jsonValueString\",\n\n];\n\n\n", "file_path": "src/types/_common.rs", "rank": 93, "score": 165198.6535159594 }, { "content": "fn deserialize_language_pack_string_value(\n\n rtd_trait_type: &str,\n\n rtd_trait_value: serde_json::Value,\n\n) -> Result<Option<TdType>, serde_json::Error> {\n\n Ok(\n\n match LANGUAGEPACKSTRINGVALUE_MEMBERS.contains(&rtd_trait_type) {\n\n true => Some(TdType::LanguagePackStringValue(serde_json::from_value(\n\n rtd_trait_value,\n\n )?)),\n\n false => None,\n\n },\n\n )\n\n}\n\n\n\nconst LOGSTREAM_MEMBERS: &[&str] = &[\n\n \"getLogStream\",\n\n \"logStreamDefault\",\n\n \"logStreamEmpty\",\n\n \"logStreamFile\",\n\n];\n\n\n", "file_path": "src/types/_common.rs", "rank": 94, "score": 165198.6535159594 }, { "content": "fn deserialize_can_transfer_ownership_result(\n\n rtd_trait_type: &str,\n\n rtd_trait_value: serde_json::Value,\n\n) -> Result<Option<TdType>, serde_json::Error> {\n\n Ok(\n\n match CANTRANSFEROWNERSHIPRESULT_MEMBERS.contains(&rtd_trait_type) {\n\n true => Some(TdType::CanTransferOwnershipResult(serde_json::from_value(\n\n rtd_trait_value,\n\n )?)),\n\n false => None,\n\n },\n\n )\n\n}\n\n\n\nconst CHATSTATISTICS_MEMBERS: &[&str] = &[\n\n \"chatStatisticsChannel\",\n\n \"chatStatisticsSupergroup\",\n\n \"getChatStatistics\",\n\n];\n\n\n", "file_path": "src/types/_common.rs", "rank": 95, "score": 165198.6535159594 }, { "content": "#[allow(dead_code)]\n\npub fn from_json<'a, T>(json: &'a str) -> RTDResult<T>\n\nwhere\n\n T: serde::de::Deserialize<'a>,\n\n{\n\n Ok(serde_json::from_str(json)?)\n\n}\n\n\n", "file_path": "src/types/_common.rs", "rank": 96, "score": 159065.48395042776 }, { "content": "pub fn new_client() -> ClientId {\n\n unsafe { td_create_client_id() }\n\n}\n\n\n", "file_path": "src/tdjson.rs", "rank": 97, "score": 153132.74431549333 }, { "content": "#[async_trait]\n\npub trait AuthStateHandler {\n\n /// Interacts with provided link\n\n async fn handle_other_device_confirmation(\n\n &self,\n\n wait_device_confirmation: &AuthorizationStateWaitOtherDeviceConfirmation,\n\n ) {\n\n println!(\n\n \"other device confirmation link: {}\",\n\n wait_device_confirmation.link()\n\n );\n\n }\n\n /// Returns wait code\n\n async fn handle_wait_code(&self, wait_code: &AuthorizationStateWaitCode) -> String;\n\n /// Returns database encryption key\n\n async fn handle_encryption_key(\n\n &self,\n\n wait_encryption_key: &AuthorizationStateWaitEncryptionKey,\n\n ) -> String;\n\n /// Returns password\n\n async fn handle_wait_password(&self, wait_password: &AuthorizationStateWaitPassword) -> String;\n", "file_path": "src/client/auth_handler.rs", "rank": 98, "score": 151974.90673522215 }, { "content": "/// A bridge between TDLib and rust-tdlib.\n\npub trait TdLibClient {\n\n fn send<Fnc: RFunction>(&self, client_id: tdjson::ClientId, fnc: Fnc) -> RTDResult<()>;\n\n fn receive(&self, timeout: f64) -> Option<String>;\n\n fn execute<Fnc: RFunction>(&self, fnc: Fnc) -> RTDResult<Option<String>>;\n\n fn new_client(&self) -> tdjson::ClientId;\n\n}\n\n\n\n#[derive(Clone, Debug, Copy)]\n\n/// Base implementation. See [tdjson](crate::tdjson) for details.\n\npub struct TdJson;\n\n\n\nimpl Default for TdJson {\n\n fn default() -> Self {\n\n Self\n\n }\n\n}\n\n\n\nimpl TdLibClient for TdJson {\n\n fn send<Fnc: RFunction>(&self, client_id: tdjson::ClientId, fnc: Fnc) -> RTDResult<()> {\n\n let json = fnc.to_json()?;\n", "file_path": "src/client/tdlib_client.rs", "rank": 99, "score": 151968.2911407397 } ]
Rust
crates/vkfft-sys/build.rs
semio-ai/vkfft-rs
2ee35297bdfef4aece8877f7b1c1c5b1c584766c
extern crate bindgen; extern crate cc; use std::error::Error; use std::path::{Path, PathBuf}; use bindgen::Bindings; fn build_lib<O, LD, L, const N: usize, const M: usize>(out_dir: O, library_dirs: LD, libraries: L, defines: &[(&str, &str); N], include_dirs: &[String; M]) -> Result<(), Box<dyn Error>> where O: AsRef<Path>, LD: Iterator, LD::Item: AsRef<str>, L: Iterator, L::Item: AsRef<str> { let mut build = cc::Build::default(); build .cpp(true) .file("wrapper.cpp") .include(out_dir) .flag("-std=c++11") .flag("-w"); for library_dir in library_dirs { build.flag(format!("-L{}", library_dir.as_ref()).as_str()); } for library in libraries { build.flag(format!("-l{}", library.as_ref()).as_str()); } build .cargo_metadata(true) .static_flag(true); for (key, value) in defines.iter() { build.define(*key, Some(*value)); } for include_dir in include_dirs.iter() { build.include(include_dir); } build.compile("vkfft"); Ok(()) } fn gen_wrapper<F, const N: usize, const M: usize>(file: F, defines: &[(&str, &str); N], include_dirs: &[String; M]) -> Result<Bindings, Box<dyn Error>> where F: AsRef<Path>, { let base_args = [ "-std=c++11".to_string(), ]; let defines: Vec<String> = defines.iter().map(|(k, v)| { format!("-D{}={}", k, v) }).collect(); let include_dirs: Vec<String> = include_dirs.iter() .map(|s| format!("-I{}", s)) .collect(); let clang_args = base_args .iter() .chain(defines.iter()) .chain(include_dirs.iter()); println!("{:?}", clang_args); let res = bindgen::Builder::default() .clang_args(clang_args) .parse_callbacks(Box::new(bindgen::CargoCallbacks)) .header(file.as_ref().to_str().unwrap()) .allowlist_recursively(true) .allowlist_type("VkFFTConfiguration") .allowlist_type("VkFFTLaunchParams") .allowlist_type("VkFFTResult") .allowlist_type("VkFFTSpecializationConstantsLayout") .allowlist_type("VkFFTPushConstantsLayout") .allowlist_type("VkFFTAxis") .allowlist_type("VkFFTPlan") .allowlist_type("VkFFTApplication") .allowlist_function("VkFFTSync") .allowlist_function("VkFFTAppend") .allowlist_function("VkFFTPlanAxis") .allowlist_function("initializeVkFFT") .allowlist_function("deleteVkFFT") .allowlist_function("VkFFTGetVersion") .generate(); let bindings = match res { Ok(x) => x, Err(_) => { eprintln!("Failed to generate bindings."); std::process::exit(1); } }; Ok(bindings) } fn main() -> Result<(), Box<dyn Error>> { let vkfft_root = std::env::var("VKFFT_ROOT")?; let out_dir = std::env::var("OUT_DIR")?; let out_dir = PathBuf::from(out_dir); let library_dirs = [ format!("{}/build/glslang-master/glslang", vkfft_root), format!("{}/build/glslang-master/glslang/OSDependent/Unix", vkfft_root), format!("{}/build/glslang-master/glslang/OGLCompilersDLL", vkfft_root), format!("{}/build/glslang-master/glslang/SPIRV", vkfft_root), ]; let libraries = [ "glslang", "MachineIndependent", "OSDependent", "GenericCodeGen", "OGLCompiler", "vulkan", "SPIRV" ]; for library_dir in library_dirs.iter() { println!("cargo:rustc-link-search={}", library_dir); } for library in libraries.iter() { println!("cargo:rustc-link-lib={}", library); } println!("cargo:rerun-if-changed=wrapper.cpp"); println!("cargo:rerun-if-changed=build.rs"); let include_dirs = [ format!("{}/vkFFT", &vkfft_root), format!("{}/glslang-master/glslang/Include", vkfft_root) ]; let defines = [ ("VKFFT_BACKEND", "0"), ("VK_API_VERSION", "11") ]; let wrapper = std::fs::read_to_string(format!("{}/vkFFT/vkFFT.h", vkfft_root))? .replace("static inline", ""); let rw = out_dir.join("vkfft_rw.hpp"); std::fs::write(&rw, wrapper.as_str())?; build_lib(&out_dir, library_dirs.iter(), libraries.iter(), &defines, &include_dirs)?; let bindings = gen_wrapper(&rw, &defines, &include_dirs)?; bindings.write_to_file(out_dir.join("bindings.rs"))?; Ok(()) }
extern crate bindgen; extern crate cc; use std::error::Error; use std::path::{Path, PathBuf}; use bindgen::Bindings; fn build_lib<O, LD, L, const N: usize, const M: usize>(out_dir: O, library_dirs: LD, libraries: L, defines: &[(&str, &str); N], include_dirs: &[String; M]) -> Result<(), Box<dyn Error>> where O: AsRef<Path>, LD: Iterator, LD::Item: AsRef<str>, L: Iterator, L::Item: AsRef<str>
ings = gen_wrapper(&rw, &defines, &include_dirs)?; bindings.write_to_file(out_dir.join("bindings.rs"))?; Ok(()) }
{ let mut build = cc::Build::default(); build .cpp(true) .file("wrapper.cpp") .include(out_dir) .flag("-std=c++11") .flag("-w"); for library_dir in library_dirs { build.flag(format!("-L{}", library_dir.as_ref()).as_str()); } for library in libraries { build.flag(format!("-l{}", library.as_ref()).as_str()); } build .cargo_metadata(true) .static_flag(true); for (key, value) in defines.iter() { build.define(*key, Some(*value)); } for include_dir in include_dirs.iter() { build.include(include_dir); } build.compile("vkfft"); Ok(()) } fn gen_wrapper<F, const N: usize, const M: usize>(file: F, defines: &[(&str, &str); N], include_dirs: &[String; M]) -> Result<Bindings, Box<dyn Error>> where F: AsRef<Path>, { let base_args = [ "-std=c++11".to_string(), ]; let defines: Vec<String> = defines.iter().map(|(k, v)| { format!("-D{}={}", k, v) }).collect(); let include_dirs: Vec<String> = include_dirs.iter() .map(|s| format!("-I{}", s)) .collect(); let clang_args = base_args .iter() .chain(defines.iter()) .chain(include_dirs.iter()); println!("{:?}", clang_args); let res = bindgen::Builder::default() .clang_args(clang_args) .parse_callbacks(Box::new(bindgen::CargoCallbacks)) .header(file.as_ref().to_str().unwrap()) .allowlist_recursively(true) .allowlist_type("VkFFTConfiguration") .allowlist_type("VkFFTLaunchParams") .allowlist_type("VkFFTResult") .allowlist_type("VkFFTSpecializationConstantsLayout") .allowlist_type("VkFFTPushConstantsLayout") .allowlist_type("VkFFTAxis") .allowlist_type("VkFFTPlan") .allowlist_type("VkFFTApplication") .allowlist_function("VkFFTSync") .allowlist_function("VkFFTAppend") .allowlist_function("VkFFTPlanAxis") .allowlist_function("initializeVkFFT") .allowlist_function("deleteVkFFT") .allowlist_function("VkFFTGetVersion") .generate(); let bindings = match res { Ok(x) => x, Err(_) => { eprintln!("Failed to generate bindings."); std::process::exit(1); } }; Ok(bindings) } fn main() -> Result<(), Box<dyn Error>> { let vkfft_root = std::env::var("VKFFT_ROOT")?; let out_dir = std::env::var("OUT_DIR")?; let out_dir = PathBuf::from(out_dir); let library_dirs = [ format!("{}/build/glslang-master/glslang", vkfft_root), format!("{}/build/glslang-master/glslang/OSDependent/Unix", vkfft_root), format!("{}/build/glslang-master/glslang/OGLCompilersDLL", vkfft_root), format!("{}/build/glslang-master/glslang/SPIRV", vkfft_root), ]; let libraries = [ "glslang", "MachineIndependent", "OSDependent", "GenericCodeGen", "OGLCompiler", "vulkan", "SPIRV" ]; for library_dir in library_dirs.iter() { println!("cargo:rustc-link-search={}", library_dir); } for library in libraries.iter() { println!("cargo:rustc-link-lib={}", library); } println!("cargo:rerun-if-changed=wrapper.cpp"); println!("cargo:rerun-if-changed=build.rs"); let include_dirs = [ format!("{}/vkFFT", &vkfft_root), format!("{}/glslang-master/glslang/Include", vkfft_root) ]; let defines = [ ("VKFFT_BACKEND", "0"), ("VK_API_VERSION", "11") ]; let wrapper = std::fs::read_to_string(format!("{}/vkFFT/vkFFT.h", vkfft_root))? .replace("static inline", ""); let rw = out_dir.join("vkfft_rw.hpp"); std::fs::write(&rw, wrapper.as_str())?; build_lib(&out_dir, library_dirs.iter(), libraries.iter(), &defines, &include_dirs)?; let bind
random
[ { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n println!(\"VkFFT version: {}\", vkfft::version());\n\n\n\n let instance = Instance::new(\n\n None,\n\n &InstanceExtensions {\n\n ext_debug_utils: true,\n\n ..InstanceExtensions::none()\n\n },\n\n vec![\"VK_LAYER_KHRONOS_validation\"],\n\n )?;\n\n\n\n let mut context = Context::new(&instance)?;\n\n\n\n let batch_count = 2;\n\n let coordinate_features = 2;\n\n let size = [32, 32];\n\n\n\n let kernel_size = batch_count * coordinate_features * 2 * (size[0] / 2 + 1) * size[1];\n\n\n", "file_path": "examples/convolution.rs", "rank": 3, "score": 79442.20417043607 }, { "content": "fn on_debug_message(msg: &Message) {\n\n if msg.ty.general && msg.severity.verbose {\n\n return;\n\n }\n\n\n\n let severity = if msg.severity.error {\n\n \"error\"\n\n } else if msg.severity.warning {\n\n \"warning\"\n\n } else if msg.severity.information {\n\n \"information\"\n\n } else if msg.severity.verbose {\n\n \"verbose\"\n\n } else {\n\n panic!(\"no-impl\");\n\n };\n\n\n\n let ty = if msg.ty.general {\n\n \"general\"\n\n } else if msg.ty.validation {\n", "file_path": "crates/util/src/lib.rs", "rank": 4, "score": 38610.456166227494 }, { "content": "pub fn convolve(\n\n context: &mut Context,\n\n coordinate_features: u32,\n\n size: &[u32; 2],\n\n kernel: &Arc<CpuAccessibleBuffer<[f32]>>,\n\n) -> Result<(), Box<dyn Error>> {\n\n let input_buffer_size = coordinate_features * 2 * (size[0] / 2 + 1) * size[1];\n\n let buffer_size = coordinate_features * 2 * (size[0] / 2 + 1) * size[1];\n\n\n\n let input_buffer = CpuAccessibleBuffer::from_iter(\n\n context.device.clone(),\n\n DEFAULT_BUFFER_USAGE,\n\n false,\n\n (0..input_buffer_size).map(|_| 0.0f32),\n\n )?;\n\n\n\n let buffer = CpuAccessibleBuffer::from_iter(\n\n context.device.clone(),\n\n DEFAULT_BUFFER_USAGE,\n\n false,\n", "file_path": "examples/convolution.rs", "rank": 5, "score": 27572.222669100418 }, { "content": "/// Transform a kernel from spatial data to frequency data\n\npub fn transform_kernel(\n\n context: &mut Context,\n\n coordinate_features: u32,\n\n batch_count: u32,\n\n size: &[u32; 2],\n\n kernel: &Arc<CpuAccessibleBuffer<[f32]>>,\n\n) -> Result<(), Box<dyn Error>> {\n\n // Configure kernel FFT\n\n let config = Config::builder()\n\n .physical_device(context.physical)\n\n .device(context.device.clone())\n\n .fence(&context.fence)\n\n .queue(context.queue.clone())\n\n .buffer(kernel.clone())\n\n .command_pool(context.pool.clone())\n\n .kernel_convolution()\n\n .normalize()\n\n .coordinate_features(coordinate_features)\n\n .batch_count(1)\n\n .r2c()\n", "file_path": "examples/convolution.rs", "rank": 6, "score": 26265.560785848018 }, { "content": "pub fn version() -> Version {\n\n let ver = unsafe { vkfft_sys::VkFFTGetVersion() };\n\n\n\n Version {\n\n major: ver / 10000,\n\n minor: ver % 10000 / 100,\n\n patch: ver % 100,\n\n }\n\n}\n", "file_path": "src/version.rs", "rank": 7, "score": 25792.257990956034 }, { "content": " }\n\n}\n\n\n\nimpl From<LaunchError> for Error {\n\n fn from(e: LaunchError) -> Self {\n\n Self::Launch(e)\n\n }\n\n}\n\n\n\npub(crate) fn check_error(result: vkfft_sys::VkFFTResult) -> Result<()> {\n\n match result.try_into() {\n\n Ok(err) => Err(err),\n\n Err(_) => Ok(()),\n\n }\n\n}\n\n\n\npub type Result<T> = std::result::Result<T, Error>;\n", "file_path": "src/error.rs", "rank": 8, "score": 23765.956125289544 }, { "content": "use std::convert::{TryFrom, TryInto};\n\n\n\nuse derive_more::{Display, Error};\n\n\n\nuse crate::{app::LaunchError, config::ConfigError};\n\n\n\n#[derive(Display, Debug, Error)]\n\npub enum Error {\n\n InvalidPhysicalDevice,\n\n InvalidDevice,\n\n InvalidQueue,\n\n InvalidCommandPool,\n\n InvalidFence,\n\n OnlyForwardFftInitialized,\n\n OnlyInverseFftInitialized,\n\n InvalidContext,\n\n InvalidPlatform,\n\n EmptyFftDim,\n\n EmptySize,\n\n EmptyBufferSize,\n", "file_path": "src/error.rs", "rank": 9, "score": 23764.376757912476 }, { "content": " FailedToReleaseCommandQueue,\n\n FailedToEnumerateDevices,\n\n Config(ConfigError),\n\n Launch(LaunchError),\n\n}\n\n\n\nimpl TryFrom<vkfft_sys::VkFFTResult> for Error {\n\n type Error = ();\n\n\n\n #[allow(non_upper_case_globals)]\n\n fn try_from(value: vkfft_sys::VkFFTResult) -> std::result::Result<Self, Self::Error> {\n\n use vkfft_sys::*;\n\n\n\n match value {\n\n VkFFTResult_VKFFT_ERROR_INVALID_PHYSICAL_DEVICE => Ok(Self::InvalidPhysicalDevice),\n\n VkFFTResult_VKFFT_ERROR_INVALID_DEVICE => Ok(Self::InvalidDevice),\n\n VkFFTResult_VKFFT_ERROR_INVALID_QUEUE => Ok(Self::InvalidQueue),\n\n VkFFTResult_VKFFT_ERROR_INVALID_COMMAND_POOL => Ok(Self::InvalidCommandPool),\n\n VkFFTResult_VKFFT_ERROR_INVALID_FENCE => Ok(Self::InvalidFence),\n\n VkFFTResult_VKFFT_ERROR_ONLY_FORWARD_FFT_INITIALIZED => Ok(Self::OnlyForwardFftInitialized),\n", "file_path": "src/error.rs", "rank": 10, "score": 23764.330272277733 }, { "content": " VkFFTResult_VKFFT_ERROR_ONLY_INVERSE_FFT_INITIALIZED => Ok(Self::OnlyInverseFftInitialized),\n\n VkFFTResult_VKFFT_ERROR_INVALID_CONTEXT => Ok(Self::InvalidContext),\n\n VkFFTResult_VKFFT_ERROR_INVALID_PLATFORM => Ok(Self::InvalidPlatform),\n\n VkFFTResult_VKFFT_ERROR_EMPTY_FFTdim => Ok(Self::EmptyFftDim),\n\n VkFFTResult_VKFFT_ERROR_EMPTY_size => Ok(Self::EmptySize),\n\n VkFFTResult_VKFFT_ERROR_EMPTY_bufferSize => Ok(Self::EmptyBufferSize),\n\n VkFFTResult_VKFFT_ERROR_EMPTY_buffer => Ok(Self::EmptyBuffer),\n\n VkFFTResult_VKFFT_ERROR_EMPTY_tempBufferSize => Ok(Self::EmptyTempBufferSize),\n\n VkFFTResult_VKFFT_ERROR_EMPTY_tempBuffer => Ok(Self::EmptyTempBuffer),\n\n VkFFTResult_VKFFT_ERROR_EMPTY_inputBufferSize => Ok(Self::EmptyInputBufferSize),\n\n VkFFTResult_VKFFT_ERROR_EMPTY_inputBuffer => Ok(Self::EmptyInputBuffer),\n\n VkFFTResult_VKFFT_ERROR_EMPTY_outputBufferSize => Ok(Self::EmptyOutputBufferSize),\n\n VkFFTResult_VKFFT_ERROR_EMPTY_outputBuffer => Ok(Self::EmptyOutputBuffer),\n\n VkFFTResult_VKFFT_ERROR_EMPTY_kernelSize => Ok(Self::EmptyKernelSize),\n\n VkFFTResult_VKFFT_ERROR_EMPTY_kernel => Ok(Self::EmptyKernel),\n\n VkFFTResult_VKFFT_ERROR_UNSUPPORTED_RADIX => Ok(Self::UnsupportedRadix),\n\n VkFFTResult_VKFFT_ERROR_UNSUPPORTED_FFT_LENGTH => Ok(Self::UnsupportedFftLength),\n\n VkFFTResult_VKFFT_ERROR_UNSUPPORTED_FFT_LENGTH_R2C => Ok(Self::UnsupportedFftLengthR2C),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_ALLOCATE => Ok(Self::FailedToAllocate),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_MAP_MEMORY => Ok(Self::FailedToMapMemory),\n", "file_path": "src/error.rs", "rank": 11, "score": 23763.327558929726 }, { "content": " VkFFTResult_VKFFT_ERROR_FAILED_TO_ALLOCATE_MEMORY => Ok(Self::FailedToAllocateMemory),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_BIND_BUFFER_MEMORY => Ok(Self::FailedToBindBufferMemory),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_FIND_MEMORY => Ok(Self::FailedToFindMemory),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_SYNCHRONIZE => Ok(Self::FailedToSynchronize),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_COPY => Ok(Self::FailedToCopy),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_CREATE_PROGRAM => Ok(Self::FailedToCreateProgram),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_COMPILE_PROGRAM => Ok(Self::FailedToCompileProgram),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_GET_CODE_SIZE => Ok(Self::FailedToGetCodeSize),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_GET_CODE => Ok(Self::FailedToGetCode),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_DESTROY_PROGRAM => Ok(Self::FailedToDestroyProgram),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_LOAD_MODULE => Ok(Self::FailedToLoadModule),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_GET_FUNCTION => Ok(Self::FailedToGetFunction),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_SET_DYNAMIC_SHARED_MEMORY => {\n\n Ok(Self::FailedToSetDynamicSharedMemory)\n\n }\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_MODULE_GET_GLOBAL => Ok(Self::FailedToModuleGetGlobal),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_LAUNCH_KERNEL => Ok(Self::FailedToLaunchKernel),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_EVENT_RECORD => Ok(Self::FailedToEventRecord),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_ADD_NAME_EXPRESSION => Ok(Self::FailedToAddNameExpression),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_INITIALIZE => Ok(Self::FailedToInitialize),\n", "file_path": "src/error.rs", "rank": 12, "score": 23763.249812491715 }, { "content": " VkFFTResult_VKFFT_ERROR_FAILED_TO_SET_DEVICE_ID => Ok(Self::FailedToSetDeviceId),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_GET_DEVICE => Ok(Self::FailedToGetDevice),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_CREATE_CONTEXT => Ok(Self::FailedToCreateContext),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_CREATE_PIPELINE => Ok(Self::FailedToCreatePipeline),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_SET_KERNEL_ARG => Ok(Self::FailedToSetKernelArg),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_CREATE_COMMAND_QUEUE => {\n\n Ok(Self::FailedToCreateCommandQueue)\n\n }\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_RELEASE_COMMAND_QUEUE => {\n\n Ok(Self::FailedToReleaseCommandQueue)\n\n }\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_ENUMERATE_DEVICES => Ok(Self::FailedToEnumerateDevices),\n\n _ => Err(()),\n\n }\n\n }\n\n}\n\n\n\nimpl From<ConfigError> for Error {\n\n fn from(e: ConfigError) -> Self {\n\n Self::Config(e)\n", "file_path": "src/error.rs", "rank": 13, "score": 23763.22046339516 }, { "content": " VkFFTResult_VKFFT_ERROR_FAILED_TO_ALLOCATE_COMMAND_BUFFERS => {\n\n Ok(Self::FailedToAllocateCommandBuffers)\n\n }\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_BEGIN_COMMAND_BUFFER => {\n\n Ok(Self::FailedToBeginCommandBuffer)\n\n }\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_END_COMMAND_BUFFER => Ok(Self::FailedToEndCommandBuffer),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_SUBMIT_QUEUE => Ok(Self::FailedToSubmitQueue),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_WAIT_FOR_FENCES => Ok(Self::FailedToWaitForFences),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_RESET_FENCES => Ok(Self::FailedToResetFences),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_CREATE_DESCRIPTOR_POOL => {\n\n Ok(Self::FailedToCreateDescriptorPool)\n\n }\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_CREATE_DESCRIPTOR_SET_LAYOUT => {\n\n Ok(Self::FailedToCreatedDescriptorSetLayout)\n\n }\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_ALLOCATE_DESCRIPTOR_SETS => {\n\n Ok(Self::FailedToAllocateDescriptorSets)\n\n }\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_CREATE_PIPELINE_LAYOUT => {\n", "file_path": "src/error.rs", "rank": 14, "score": 23763.113124383137 }, { "content": " Ok(Self::FailedToCreatePipelineLayout)\n\n }\n\n VkFFTResult_VKFFT_ERROR_FAILED_SHADER_PREPROCESS => Ok(Self::FailedShaderPreprocess),\n\n VkFFTResult_VKFFT_ERROR_FAILED_SHADER_PARSE => Ok(Self::FailedShaderParse),\n\n VkFFTResult_VKFFT_ERROR_FAILED_SHADER_LINK => Ok(Self::FailedShaderLink),\n\n VkFFTResult_VKFFT_ERROR_FAILED_SPIRV_GENERATE => Ok(Self::FailedSpirvGenerate),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_CREATE_SHADER_MODULE => {\n\n Ok(Self::FailedToCreateShaderModule)\n\n }\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_CREATE_INSTANCE => Ok(Self::FailedToCreateInstance),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_SETUP_DEBUG_MESSENGER => {\n\n Ok(Self::FailedToSetupDebugMessenger)\n\n }\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_FIND_PHYSICAL_DEVICE => {\n\n Ok(Self::FailedToFindPhysicalDevice)\n\n }\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_CREATE_DEVICE => Ok(Self::FailedToCreateDevice),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_CREATE_FENCE => Ok(Self::FailedToCreateFence),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_CREATE_COMMAND_POOL => Ok(Self::FailedToCreateCommandPool),\n\n VkFFTResult_VKFFT_ERROR_FAILED_TO_CREATE_BUFFER => Ok(Self::FailedToCreateBuffer),\n", "file_path": "src/error.rs", "rank": 15, "score": 23763.11092203141 }, { "content": " EmptyBuffer,\n\n EmptyTempBufferSize,\n\n EmptyTempBuffer,\n\n EmptyInputBufferSize,\n\n EmptyInputBuffer,\n\n EmptyOutputBufferSize,\n\n EmptyOutputBuffer,\n\n EmptyKernelSize,\n\n EmptyKernel,\n\n UnsupportedRadix,\n\n UnsupportedFftLength,\n\n UnsupportedFftLengthR2C,\n\n FailedToAllocate,\n\n FailedToMapMemory,\n\n FailedToAllocateCommandBuffers,\n\n FailedToBeginCommandBuffer,\n\n FailedToEndCommandBuffer,\n\n FailedToSubmitQueue,\n\n FailedToWaitForFences,\n\n FailedToResetFences,\n", "file_path": "src/error.rs", "rank": 16, "score": 23757.507004006133 }, { "content": " FailedToCopy,\n\n FailedToCreateProgram,\n\n FailedToCompileProgram,\n\n FailedToGetCodeSize,\n\n FailedToGetCode,\n\n FailedToDestroyProgram,\n\n FailedToLoadModule,\n\n FailedToGetFunction,\n\n FailedToSetDynamicSharedMemory,\n\n FailedToModuleGetGlobal,\n\n FailedToLaunchKernel,\n\n FailedToEventRecord,\n\n FailedToAddNameExpression,\n\n FailedToInitialize,\n\n FailedToSetDeviceId,\n\n FailedToGetDevice,\n\n FailedToCreateContext,\n\n FailedToCreatePipeline,\n\n FailedToSetKernelArg,\n\n FailedToCreateCommandQueue,\n", "file_path": "src/error.rs", "rank": 17, "score": 23757.507004006133 }, { "content": " FailedToCreateDescriptorPool,\n\n FailedToCreatedDescriptorSetLayout,\n\n FailedToAllocateDescriptorSets,\n\n FailedToCreatePipelineLayout,\n\n FailedShaderPreprocess,\n\n FailedShaderParse,\n\n FailedShaderLink,\n\n FailedSpirvGenerate,\n\n FailedToCreateShaderModule,\n\n FailedToCreateInstance,\n\n FailedToSetupDebugMessenger,\n\n FailedToFindPhysicalDevice,\n\n FailedToCreateDevice,\n\n FailedToCreateFence,\n\n FailedToCreateCommandPool,\n\n FailedToCreateBuffer,\n\n FailedToAllocateMemory,\n\n FailedToBindBufferMemory,\n\n FailedToFindMemory,\n\n FailedToSynchronize,\n", "file_path": "src/error.rs", "rank": 18, "score": 23757.507004006133 }, { "content": " .pool\n\n .alloc_command_buffers(secondary, 1)?\n\n .next()\n\n .ok_or(\"Failed to allocate cmd buffer\")?,\n\n )\n\n }\n\n\n\n pub fn alloc_primary_cmd_buffer(&self) -> Result<UnsafeCommandPoolAlloc, Box<dyn Error>> {\n\n self.alloc_cmd_buffer(false)\n\n }\n\n\n\n pub fn alloc_secondary_cmd_buffer(&self) -> Result<UnsafeCommandPoolAlloc, Box<dyn Error>> {\n\n self.alloc_cmd_buffer(true)\n\n }\n\n}\n\n\n\npub struct SizeIterator<'a> {\n\n size: &'a [u32; 2],\n\n pos: [u32; 2],\n\n total: u32,\n", "file_path": "crates/util/src/lib.rs", "rank": 23, "score": 20152.289692327984 }, { "content": "use vulkano::{buffer::{BufferAccess, CpuAccessibleBuffer}, command_buffer::pool::{UnsafeCommandPool, UnsafeCommandPoolAlloc}};\n\nuse vulkano::command_buffer::{submit::SubmitCommandBufferBuilder, sys::UnsafeCommandBuffer};\n\nuse vulkano::device::{Device, DeviceExtensions, Features, Queue};\n\nuse vulkano::instance::debug::{DebugCallback, Message, MessageSeverity, MessageType};\n\nuse vulkano::instance::{Instance, PhysicalDevice};\n\nuse vulkano::sync::Fence;\n\n\n\nuse std::{error::Error, fmt::{Display, Formatter}, sync::Arc};\n\n\n\nconst MESSAGE_SEVERITIES: MessageSeverity = MessageSeverity {\n\n error: true,\n\n warning: true,\n\n information: true,\n\n verbose: true,\n\n};\n\n\n", "file_path": "crates/util/src/lib.rs", "rank": 24, "score": 20151.768259423923 }, { "content": " )?);\n\n\n\n let fence = Fence::alloc(device.clone())?;\n\n\n\n Ok(Self {\n\n instance,\n\n physical,\n\n queue,\n\n device,\n\n pool,\n\n fence,\n\n _debug_cb: debug_cb,\n\n })\n\n }\n\n\n\n pub fn submit(\n\n &mut self,\n\n command_buffer: UnsafeCommandBuffer,\n\n ) -> Result<(), Box<dyn std::error::Error>> {\n\n unsafe {\n", "file_path": "crates/util/src/lib.rs", "rank": 25, "score": 20151.028490678025 }, { "content": " iter: u32\n\n}\n\n\n\nimpl<'a> SizeIterator<'a> {\n\n pub fn new(size: &'a [u32; 2]) -> Self {\n\n let total = size.iter().cloned().reduce(|a, b| a * b).unwrap();\n\n Self { size, pos: [0; 2], total, iter: 0 }\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for SizeIterator<'a> {\n\n type Item = [u32; 2];\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if self.iter >= self.total - 1 {\n\n return None;\n\n }\n\n\n\n let ret = Some([self.iter % self.size[0], self.iter / self.size[0]]);\n\n \n\n self.iter += 1;\n\n \n\n ret\n\n }\n\n}\n\n\n", "file_path": "crates/util/src/lib.rs", "rank": 27, "score": 20150.560257744288 }, { "content": " pub queue: Arc<Queue>,\n\n pub pool: Arc<UnsafeCommandPool>,\n\n pub fence: Fence,\n\n _debug_cb: Option<DebugCallback>,\n\n}\n\n\n\nimpl<'a> Context<'a> {\n\n pub fn new(instance: &'a Arc<Instance>) -> Result<Self, Box<dyn std::error::Error>> {\n\n let debug_cb = DebugCallback::new(\n\n &instance,\n\n MESSAGE_SEVERITIES,\n\n MessageType::all(),\n\n on_debug_message,\n\n )\n\n .ok();\n\n\n\n let physical = PhysicalDevice::enumerate(&instance)\n\n .next()\n\n .ok_or(\"No device available\")?;\n\n\n", "file_path": "crates/util/src/lib.rs", "rank": 28, "score": 20150.161567578543 }, { "content": "\n\nimpl<'a> MatrixFormatter<'a> {\n\n pub fn new(size: &'a [u32; 2], data: &'a Arc<CpuAccessibleBuffer<[f32]>>) -> Self {\n\n Self {\n\n size,\n\n data\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Display for MatrixFormatter<'a>\n\n{\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n let data = self.data.read().unwrap();\n\n for j in 0..self.size[1] {\n\n for i in 0..self.size[0] {\n\n let value = data[(j * self.size[0] + i) as usize];\n\n if value >= 0.0f32 {\n\n write!(f, \" \")?;\n\n }\n", "file_path": "crates/util/src/lib.rs", "rank": 29, "score": 20150.038873658803 }, { "content": " let mut submit = SubmitCommandBufferBuilder::new();\n\n submit.add_command_buffer(&command_buffer);\n\n submit.set_fence_signal(&self.fence);\n\n\n\n submit.submit(&self.queue)?;\n\n\n\n self.fence.wait(None)?;\n\n\n\n self.fence.reset()?;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn alloc_cmd_buffer(\n\n &self,\n\n secondary: bool,\n\n ) -> Result<UnsafeCommandPoolAlloc, Box<dyn Error>> {\n\n Ok(\n\n self\n", "file_path": "crates/util/src/lib.rs", "rank": 30, "score": 20149.879150637433 }, { "content": " println!(\"Using {}\", physical.name());\n\n\n\n let queue_family = physical\n\n .queue_families()\n\n .find(|&q| q.supports_compute() && q.supports_graphics())\n\n .ok_or(\"Couldn't find a compute queue family\")?;\n\n\n\n let (device, mut queues) = Device::new(\n\n physical,\n\n &Features::none(),\n\n &DeviceExtensions::none(),\n\n [(queue_family, 0.5)].iter().cloned(),\n\n )?;\n\n\n\n let queue = queues.next().unwrap();\n\n let pool = Arc::new(UnsafeCommandPool::new(\n\n device.clone(),\n\n queue_family,\n\n false,\n\n true,\n", "file_path": "crates/util/src/lib.rs", "rank": 31, "score": 20149.62716916017 }, { "content": "\n\n let spaces = 3 - (value.floor() as i64).digits();\n\n for _ in 0..spaces {\n\n write!(f, \" \")?;\n\n }\n\n\n\n write!(f, \"{:.1}\", value)?;\n\n }\n\n writeln!(f)?;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn check_digits() {\n\n assert_eq!(100i64.digits(), 3);\n\n assert_eq!(1000i64.digits(), 4);\n\n assert_eq!((-1000i64).digits(), 4);\n\n assert_eq!((-1i64).digits(), 1);\n\n assert_eq!(0i64.digits(), 1);\n\n assert_eq!((-9.6f32.floor() as i64).digits(), 1)\n\n }\n\n}", "file_path": "crates/util/src/lib.rs", "rank": 32, "score": 20148.270112873186 }, { "content": " \"validation\"\n\n } else if msg.ty.performance {\n\n \"performance\"\n\n } else {\n\n panic!(\"no-impl\");\n\n };\n\n\n\n eprintln!(\n\n \"{} {} {}: {}\",\n\n msg.layer_prefix.unwrap_or(\"unknown\"),\n\n ty,\n\n severity,\n\n msg.description\n\n );\n\n}\n\n\n\npub struct Context<'a> {\n\n pub instance: &'a Arc<Instance>,\n\n pub physical: PhysicalDevice<'a>,\n\n pub device: Arc<Device>,\n", "file_path": "crates/util/src/lib.rs", "rank": 34, "score": 20146.77545089491 }, { "content": "#include <vkfft_rw.hpp>", "file_path": "crates/vkfft-sys/wrapper.cpp", "rank": 35, "score": 20146.77545089491 }, { "content": "#![allow(non_upper_case_globals)]\n\n#![allow(non_camel_case_types)]\n\n#![allow(non_snake_case)]\n\n\n\ninclude!(concat!(env!(\"OUT_DIR\"), \"/bindings.rs\"));\n", "file_path": "crates/vkfft-sys/src/lib.rs", "rank": 36, "score": 18890.916058610288 }, { "content": "pub trait Digits {\n\n fn digits(&self) -> usize;\n\n}\n\n\n\nimpl Digits for i64 {\n\n fn digits(&self) -> usize {\n\n let mut this = *self;\n\n let mut ret = 1;\n\n while this / 10 != 0 {\n\n ret += 1;\n\n this /= 10;\n\n }\n\n ret\n\n }\n\n}\n\n\n\npub struct MatrixFormatter<'a> {\n\n size: &'a [u32; 2],\n\n data: &'a Arc<CpuAccessibleBuffer<[f32]>>\n\n}\n", "file_path": "crates/util/src/lib.rs", "rank": 37, "score": 17782.4387830366 }, { "content": "use std::sync::Arc;\n\n\n\nuse error::check_error;\n\nuse vulkano::{buffer::BufferAccess, VulkanHandle, VulkanObject};\n\n\n\nuse crate::{\n\n config::{Config, ConfigGuard},\n\n error,\n\n};\n\n\n\nuse std::pin::Pin;\n\nuse vk_sys as vk;\n\n\n\nuse std::ptr::addr_of_mut;\n\n\n\nuse derive_more::{Display, Error};\n\n\n\n#[derive(Display, Debug, Error)]\n\npub enum BuildError {\n\n NoCommandBuffer,\n", "file_path": "src/app.rs", "rank": 38, "score": 8.00406982351089 }, { "content": "use vkfft::app::App;\n\nuse vkfft::app::LaunchParams;\n\nuse vkfft::config::Config;\n\n\n\nuse vulkano::buffer::{BufferUsage, CpuAccessibleBuffer};\n\nuse vulkano::command_buffer::{\n\n sys::{Flags, UnsafeCommandBufferBuilder},\n\n Kind,\n\n};\n\n\n\nuse vulkano::instance::{Instance, InstanceExtensions};\n\n\n\nuse std::{error::Error, sync::Arc};\n\n\n\nuse util::{Context, SizeIterator, MatrixFormatter};\n\n\n\nconst DEFAULT_BUFFER_USAGE: BufferUsage = BufferUsage {\n\n storage_buffer: true,\n\n transfer_source: true,\n\n transfer_destination: true,\n\n ..BufferUsage::none()\n\n};\n\n\n\n\n\n\n\n\n\n/// Transform a kernel from spatial data to frequency data\n", "file_path": "examples/convolution.rs", "rank": 39, "score": 5.9935953369069805 }, { "content": " convolution: false,\n\n use_lut: false,\n\n symmetric_kernel: false,\n\n input_formatted: None,\n\n output_formatted: None,\n\n kernel: None,\n\n }\n\n }\n\n\n\n pub fn dim<const N: usize>(mut self, dim: &[u32; N]) -> Self {\n\n let len = dim.len();\n\n assert!(len <= 3);\n\n\n\n self.fft_dim = len as u32;\n\n if len > 0 {\n\n self.size[0] = dim[0];\n\n }\n\n if len > 1 {\n\n self.size[1] = dim[1];\n\n }\n", "file_path": "src/config.rs", "rank": 40, "score": 5.72404719469171 }, { "content": "\n\n pub fn inverse(&mut self, params: &mut LaunchParams) -> error::Result<()> {\n\n self.launch(params, true)\n\n }\n\n}\n\n\n\nimpl Drop for App {\n\n fn drop(&mut self) {\n\n use vkfft_sys::*;\n\n\n\n unsafe {\n\n deleteVkFFT(std::ptr::addr_of_mut!(self.app));\n\n }\n\n }\n\n}\n", "file_path": "src/app.rs", "rank": 41, "score": 5.568742337715296 }, { "content": "use std::sync::Arc;\n\n\n\nuse derive_more::{Display, Error};\n\nuse std::pin::Pin;\n\nuse vulkano::{\n\n buffer::BufferAccess,\n\n command_buffer::pool::UnsafeCommandPool,\n\n device::{Device, Queue},\n\n instance::PhysicalDevice,\n\n sync::Fence,\n\n SynchronizedVulkanObject, VulkanHandle, VulkanObject,\n\n};\n\n\n\nuse std::ptr::addr_of_mut;\n\n\n\n#[derive(Display, Debug, Error)]\n\npub enum BuildError {\n\n NoPhysicalDevice,\n\n NoDevice,\n\n NoQueue,\n", "file_path": "src/config.rs", "rank": 42, "score": 5.41958681213855 }, { "content": " self.batch_count\n\n }\n\n\n\n pub fn use_lut(&self) -> bool {\n\n self.use_lut\n\n }\n\n\n\n pub(crate) fn as_sys(&self) -> Result<Pin<Box<ConfigGuard>>, ConfigError> {\n\n use std::mem::{transmute, zeroed};\n\n\n\n unsafe {\n\n let keep_alive = KeepAlive {\n\n device: self.device.clone(),\n\n buffer: self.buffer.as_ref().map(|b| b.as_buffer().cloned()).flatten(),\n\n input_buffer: self.input_buffer.as_ref().map(|b| b.as_buffer().cloned()).flatten(),\n\n output_buffer: self.output_buffer.as_ref().map(|b| b.as_buffer().cloned()).flatten(),\n\n kernel: self.kernel.as_ref().map(|b| b.as_buffer().cloned()).flatten(),\n\n command_pool: self.command_pool.clone(),\n\n queue: self.queue.clone(),\n\n temp_buffer: self.temp_buffer.as_ref().map(|b| b.as_buffer().cloned()).flatten()\n", "file_path": "src/config.rs", "rank": 43, "score": 5.347671808344652 }, { "content": " check_error(unsafe { initializeVkFFT(std::ptr::addr_of_mut!(res.app), res.config.config) })?;\n\n\n\n Ok(res)\n\n }\n\n\n\n pub fn launch(&mut self, params: &mut LaunchParams, inverse: bool) -> error::Result<()> {\n\n use vkfft_sys::VkFFTAppend;\n\n\n\n let mut params = params.as_sys();\n\n\n\n if self.config.buffer.is_some() && params.buffer.is_some() {\n\n return Err(LaunchError::ConfigSpecifiesBuffer.into());\n\n }\n\n\n\n if self.config.temp_buffer.is_some() && params.temp_buffer.is_some() {\n\n return Err(LaunchError::ConfigSpecifiesTempBuffer.into());\n\n }\n\n\n\n if self.config.input_buffer.is_some() && params.input_buffer.is_some() {\n\n return Err(LaunchError::ConfigSpecifiesInputBuffer.into());\n", "file_path": "src/app.rs", "rank": 44, "score": 4.91309755595971 }, { "content": "#![feature(core_intrinsics)]\n\n\n\npub mod app;\n\npub mod config;\n\npub mod error;\n\nmod version;\n\n\n\npub use version::*;\n", "file_path": "src/lib.rs", "rank": 45, "score": 4.48754187885295 }, { "content": " pub(crate) input_buffer_size: u64,\n\n pub(crate) input_buffer: Option<vk_sys::Buffer>,\n\n pub(crate) output_buffer_size: u64,\n\n pub(crate) output_buffer: Option<vk_sys::Buffer>,\n\n pub(crate) temp_buffer_size: u64,\n\n pub(crate) temp_buffer: Option<vk_sys::Buffer>,\n\n pub(crate) kernel_size: u64,\n\n pub(crate) kernel: Option<vk_sys::Buffer>,\n\n}\n\n\n\nimpl<'a> Config<'a> {\n\n pub fn builder() -> ConfigBuilder<'a> {\n\n ConfigBuilder::new()\n\n }\n\n\n\n pub fn buffer_size(&self) -> usize {\n\n self.buffer.as_ref().map(|b| b.size()).unwrap_or(0)\n\n }\n\n\n\n pub fn buffer(&self) -> Option<&BufferDesc> {\n", "file_path": "src/config.rs", "rank": 46, "score": 4.3622674344623205 }, { "content": " }\n\n if len > 2 {\n\n self.zero_padding[2] = zero_padding[2];\n\n }\n\n self\n\n }\n\n\n\n pub fn zeropad_left<const N: usize>(mut self, zeropad_left: &[u32; N]) -> Self {\n\n let len = zeropad_left.len();\n\n assert!(len <= 3);\n\n\n\n if len > 0 {\n\n self.zeropad_left[0] = zeropad_left[0];\n\n }\n\n if len > 1 {\n\n self.zeropad_left[1] = zeropad_left[1];\n\n }\n\n if len > 2 {\n\n self.zeropad_left[2] = zeropad_left[2];\n\n }\n", "file_path": "src/config.rs", "rank": 47, "score": 4.2509103748009975 }, { "content": " }\n\n\n\n if self.config.output_buffer.is_some() && params.output_buffer.is_some() {\n\n return Err(LaunchError::ConfigSpecifiesOutputBuffer.into());\n\n }\n\n\n\n check_error(unsafe {\n\n VkFFTAppend(\n\n std::ptr::addr_of_mut!(self.app),\n\n if inverse { 1 } else { -1 },\n\n std::ptr::addr_of_mut!(params.params),\n\n )\n\n })?;\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn forward(&mut self, params: &mut LaunchParams) -> error::Result<()> {\n\n self.launch(params, false)\n\n }\n", "file_path": "src/app.rs", "rank": 48, "score": 4.138759621637908 }, { "content": " self\n\n }\n\n\n\n pub fn zeropad_right<const N: usize>(mut self, zeropad_right: &[u32; N]) -> Self {\n\n let len = zeropad_right.len();\n\n assert!(len <= 3);\n\n\n\n if len > 0 {\n\n self.zeropad_right[0] = zeropad_right[0];\n\n }\n\n if len > 1 {\n\n self.zeropad_right[1] = zeropad_right[1];\n\n }\n\n if len > 2 {\n\n self.zeropad_right[2] = zeropad_right[2];\n\n }\n\n self\n\n }\n\n\n\n pub fn batch_count(mut self, batch_count: u32) -> Self {\n", "file_path": "src/config.rs", "rank": 49, "score": 4.115527162411942 }, { "content": "pub struct App {\n\n app: vkfft_sys::VkFFTApplication,\n\n\n\n // Safety: We must keep a copy of the config to ensure our resources are kept alive\n\n config: Pin<Box<ConfigGuard>>,\n\n}\n\n\n\nimpl App {\n\n pub fn new(config: Config) -> error::Result<Pin<Box<Self>>> {\n\n use vkfft_sys::*;\n\n\n\n let app: VkFFTApplication = unsafe { std::mem::zeroed() };\n\n\n\n let sys_config = config.as_sys()?;\n\n\n\n let mut res = Box::pin(Self {\n\n app,\n\n config: sys_config,\n\n });\n\n\n", "file_path": "src/app.rs", "rank": 50, "score": 3.9405314949222197 }, { "content": " zero_padding: [bool; 3usize],\n\n zeropad_left: [u32; 3usize],\n\n zeropad_right: [u32; 3usize],\n\n kernel_convolution: bool,\n\n convolution: bool,\n\n r2c: bool,\n\n coordinate_features: u32,\n\n disable_reorder_four_step: bool,\n\n batch_count: Option<u32>,\n\n precision: Precision,\n\n use_lut: bool,\n\n symmetric_kernel: bool,\n\n input_formatted: Option<bool>,\n\n output_formatted: Option<bool>,\n\n}\n\n\n\nimpl<'a> ConfigBuilder<'a> {\n\n pub fn new() -> Self {\n\n Self {\n\n fft_dim: 1,\n", "file_path": "src/config.rs", "rank": 51, "score": 3.861339163682255 }, { "content": " self.batch_count = Some(batch_count);\n\n self\n\n }\n\n\n\n pub fn input_formatted(mut self, input_formatted: bool) -> Self {\n\n self.input_formatted = Some(input_formatted);\n\n self\n\n }\n\n\n\n pub fn output_formatted(mut self, output_formatted: bool) -> Self {\n\n self.output_formatted = Some(output_formatted);\n\n self\n\n }\n\n\n\n pub fn build(self) -> Result<Config<'a>, BuildError> {\n\n let physical_device = match self.physical_device {\n\n Some(v) => v,\n\n None => return Err(BuildError::NoPhysicalDevice),\n\n };\n\n\n", "file_path": "src/config.rs", "rank": 52, "score": 3.693075250886366 }, { "content": "\n\n pub fn coordinate_features(mut self, coordinate_features: u32) -> Self {\n\n self.coordinate_features = coordinate_features;\n\n self\n\n }\n\n\n\n pub fn disable_reorder_four_step(mut self) -> Self {\n\n self.disable_reorder_four_step = true;\n\n self\n\n }\n\n\n\n pub fn zero_padding<const N: usize>(mut self, zero_padding: &[bool; N]) -> Self {\n\n let len = zero_padding.len();\n\n assert!(len <= 3);\n\n\n\n if len > 0 {\n\n self.zero_padding[0] = zero_padding[0];\n\n }\n\n if len > 1 {\n\n self.zero_padding[1] = zero_padding[1];\n", "file_path": "src/config.rs", "rank": 53, "score": 3.686551648685466 }, { "content": " (0..buffer_size).map(|_| 0.0f32),\n\n )?;\n\n\n\n {\n\n let mut buffer = input_buffer.write()?;\n\n\n\n for v in 0..coordinate_features {\n\n for [i, j] in SizeIterator::new(size) {\n\n let _0 = i + j * (size[0] / 2) + v * (size[0] / 2) * size[1];\n\n buffer[_0 as usize] = 1.0f32;\n\n }\n\n }\n\n }\n\n\n\n println!(\"Buffer:\");\n\n println!(\"{}\", MatrixFormatter::new(size, &input_buffer));\n\n println!();\n\n\n\n // Configure kernel FFT\n\n let conv_config = Config::builder()\n", "file_path": "examples/convolution.rs", "rank": 54, "score": 3.413880863769175 }, { "content": " pub temp_buffer: Option<BufferDesc>,\n\n pub kernel: Option<BufferDesc>,\n\n\n\n /// Normalize inverse transform\n\n pub normalize: bool,\n\n\n\n /// Don't read some data/perform computations if some input sequences are zeropadded for each axis\n\n pub zero_padding: [bool; 3usize],\n\n\n\n /// Specify start boundary of zero block in the system for each axis\n\n pub zeropad_left: [u32; 3usize],\n\n\n\n /// Specify end boundary of zero block in the system for each axis\n\n pub zeropad_right: [u32; 3usize],\n\n\n\n /// Specify if this application is used to create kernel for convolution, so it has the same properties\n\n pub kernel_convolution: bool,\n\n\n\n /// Perform convolution in this application (0 - off, 1 - on). Disables reorderFourStep parameter\n\n pub convolution: bool,\n", "file_path": "src/config.rs", "rank": 55, "score": 3.338859127458163 }, { "content": " pub fn output_buffer(mut self, output_buffer: Arc<dyn BufferAccess>) -> Self {\n\n self.output_buffer = Some(output_buffer);\n\n self\n\n }\n\n\n\n pub fn kernel(mut self, kernel: Arc<dyn BufferAccess>) -> Self {\n\n self.kernel = Some(kernel);\n\n self\n\n }\n\n\n\n pub fn build(self) -> Result<LaunchParams, BuildError> {\n\n let command_buffer = match self.command_buffer {\n\n Some(command_buffer) => command_buffer,\n\n None => return Err(BuildError::NoCommandBuffer),\n\n };\n\n\n\n Ok(LaunchParams {\n\n buffer: self.buffer,\n\n command_buffer,\n\n input_buffer: self.input_buffer,\n", "file_path": "src/app.rs", "rank": 56, "score": 3.2370347031186015 }, { "content": " output_buffer: self.output_buffer,\n\n temp_buffer: self.temp_buffer,\n\n kernel: self.kernel,\n\n })\n\n }\n\n}\n\n\n\n#[repr(C)]\n\npub(crate) struct LaunchParamsGuard {\n\n pub(crate) params: vkfft_sys::VkFFTLaunchParams,\n\n pub(crate) command_buffer: vk_sys::CommandBuffer,\n\n pub(crate) buffer: Option<vk_sys::Buffer>,\n\n pub(crate) temp_buffer: Option<vk_sys::Buffer>,\n\n pub(crate) input_buffer: Option<vk_sys::Buffer>,\n\n pub(crate) output_buffer: Option<vk_sys::Buffer>,\n\n pub(crate) kernel: Option<vk_sys::Buffer>,\n\n}\n\n\n\npub struct LaunchParams {\n\n pub command_buffer: vk::CommandBuffer,\n", "file_path": "src/app.rs", "rank": 57, "score": 3.2004995957678277 }, { "content": " pub command_pool: Arc<UnsafeCommandPool>,\n\n\n\n pub buffer: Option<Arc<dyn BufferAccess>>,\n\n pub input_buffer: Option<Arc<dyn BufferAccess>>,\n\n pub output_buffer: Option<Arc<dyn BufferAccess>>,\n\n pub temp_buffer: Option<Arc<dyn BufferAccess>>,\n\n pub kernel: Option<Arc<dyn BufferAccess>>,\n\n}\n\n\n\n#[repr(C)]\n\npub(crate) struct ConfigGuard {\n\n pub(crate) keep_alive: KeepAlive,\n\n pub(crate) config: vkfft_sys::VkFFTConfiguration,\n\n pub(crate) physical_device: vk_sys::PhysicalDevice,\n\n pub(crate) device: vk_sys::Device,\n\n pub(crate) queue: vk_sys::Queue,\n\n pub(crate) command_pool: vk_sys::CommandPool,\n\n pub(crate) fence: vk_sys::Fence,\n\n pub(crate) buffer_size: u64,\n\n pub(crate) buffer: Option<vk_sys::Buffer>,\n", "file_path": "src/config.rs", "rank": 58, "score": 3.1429887770275347 }, { "content": " let device = match self.device {\n\n Some(v) => v,\n\n None => return Err(BuildError::NoDevice),\n\n };\n\n\n\n let queue = match self.queue {\n\n Some(v) => v,\n\n None => return Err(BuildError::NoQueue),\n\n };\n\n\n\n let fence = match self.fence {\n\n Some(v) => v,\n\n None => return Err(BuildError::NoFence),\n\n };\n\n\n\n let command_pool = match self.command_pool {\n\n Some(v) => v,\n\n None => return Err(BuildError::NoCommandPool),\n\n };\n\n\n", "file_path": "src/config.rs", "rank": 59, "score": 3.0278265955467027 }, { "content": "\n\n /// specify if input buffer is padded - false is padded, true is not padded.\n\n /// For example if it is not padded for R2C if out-of-place mode is selected\n\n /// (only if numberBatches==1 and numberKernels==1)\n\n pub input_formatted: Option<bool>,\n\n\n\n /// specify if output buffer is padded - false is padded, true is not padded.\n\n /// For example if it is not padded for R2C if out-of-place mode is selected\n\n /// (only if numberBatches==1 and numberKernels==1)\n\n pub output_formatted: Option<bool>,\n\n}\n\n\n\n#[derive(Display, Debug, Error)]\n\npub enum ConfigError {\n\n InvalidConfig,\n\n}\n\n\n\npub(crate) struct KeepAlive {\n\n pub device: Arc<Device>,\n\n pub queue: Arc<Queue>,\n", "file_path": "src/config.rs", "rank": 60, "score": 3.015522804761971 }, { "content": " /// buffer/tempBuffer have to be allocated as float (out of place mode only).\n\n HalfMemory,\n\n}\n\n\n\npub enum BufferDesc {\n\n Buffer(Arc<dyn BufferAccess>),\n\n BufferSize(usize),\n\n}\n\n\n\nimpl<T> From<Arc<T>> for BufferDesc\n\nwhere\n\n T: 'static + BufferAccess,\n\n{\n\n fn from(value: Arc<T>) -> Self {\n\n Self::Buffer(value as Arc<dyn BufferAccess>)\n\n }\n\n}\n\n\n\nimpl From<usize> for BufferDesc {\n\n fn from(value: usize) -> Self {\n", "file_path": "src/config.rs", "rank": 61, "score": 2.6623443892945113 }, { "content": "\n\n pub fn symmetric_kernel(mut self) -> Self {\n\n self.symmetric_kernel = true;\n\n self\n\n }\n\n\n\n pub fn convolution(mut self) -> Self {\n\n self.convolution = true;\n\n self\n\n }\n\n\n\n pub fn r2c(mut self) -> Self {\n\n self.r2c = true;\n\n self\n\n }\n\n\n\n pub fn use_lut(mut self) -> Self {\n\n self.use_lut = true;\n\n self\n\n }\n", "file_path": "src/config.rs", "rank": 62, "score": 2.623522300463661 }, { "content": " + f * coordinate_features * (size[0] + 2) * size[1];\n\n let _1 = 2 * i\n\n + 1\n\n + j * (size[0] + 2)\n\n + v * (size[0] + 2) * size[1]\n\n + f * coordinate_features * (size[0] + 2) * size[1];\n\n kernel_input[_0 as usize] = (f * coordinate_features + v + 1) as f32;\n\n kernel_input[_1 as usize] = 0.0f32;\n\n }\n\n }\n\n }\n\n }\n\n\n\n println!(\"Kernel:\");\n\n println!(\"{}\", &MatrixFormatter::new(&size, &kernel));\n\n println!();\n\n\n\n\n\n transform_kernel(\n\n &mut context,\n", "file_path": "examples/convolution.rs", "rank": 63, "score": 2.533049594137518 }, { "content": "# vkfft-rs\n\n\n\n`vkfft-rs` allows high-performance execution of 1, 2, or 3D FFTs on the GPU using Vulkan in Rust, with built-in support for convolutions.\n\n\n\n`vkfft-rs` is a binding for [VkFFT](https://github.com/DTolm/VkFFT) that assumes usage with [vulkano](https://vulkano.rs/). While VkFFT, despite the name, supports multiple backends, this wrapper requires usage with Vulkan.\n\n\n\nWhile `vkfft-rs` attempts to maintain a safe API, it's very likely there are some safe functions in this codebase that can still cause unsafe behavior. VkFFT's API and associated data structures are unsafe and stateful, which presents difficulties in ensuring Rust's safety guarantees. Until its safety properties can be properly verified it is recommend to proceed with caution. PRs welcome!\n\n\n\n## Building\n\n\n\n```.sh\n\n# Clone VkFFT\n\ngit clone https://github.com/DTolm/VkFFT.git\n\n\n\n# Navigate into the folder\n\ncd VkFFT\n\n\n\n# Create a build directory (this currently must be named \"build\"!)\n\nmkdir build && cd build\n\n\n\n# Configure build\n\ncmake ..\n\n\n\n# Build\n\nmake\n\n\n\n# Build vkfft-rs\n\ncd vkfft-rs\n\n\n\n# VKFFT_ROOT must be set to the root directory of VkFFT!\n\nexport VKFFT_ROOT=/path/to/VkFFT\n\n\n\n# Build\n\ncargo build --examples\n\n\n\n# Run convolution example\n\ncargo run --example convolution\n\n```\n\n\n\n### IMPORTANT\n\n\n\nIf your system already has `libSPIRV.a` in the library search path and are encountering strange segmentation faults\n\nin SPIRV at runtime, it's possible Rust has linked against the system `libSPIRV.a` rather than the one in VkFFT's `build`\n\ndirectory. These different libraries might be ABI incompatible.\n\n\n\nThis is unfortunately a limitation of cargo/rustc's ability for a crate to specify absolute paths for static libraries. It is recommended to, unfortunately, remove the other `libSPIRV.a` from the system library path.\n\n\n\nFor example, on Ubuntu:\n\n```.sh\n\nsudo mv /usr/lib/x86_64-linux-gnu/libSPIRV.a /usr/lib/x86_64-linux-gnu/libSPIRV.a.backup \n\n```\n", "file_path": "README.md", "rank": 64, "score": 2.502743732639964 }, { "content": " Self::BufferSize(value)\n\n }\n\n}\n\n\n\nimpl BufferDesc {\n\n pub fn size(&self) -> usize {\n\n match self {\n\n Self::Buffer(b) => b.size(),\n\n Self::BufferSize(b) => *b,\n\n }\n\n }\n\n\n\n pub fn as_buffer(&self) -> Option<&Arc<dyn BufferAccess>> {\n\n match self {\n\n Self::Buffer(b) => Some(b),\n\n Self::BufferSize(_) => None,\n\n }\n\n }\n\n\n\n pub fn as_buffer_size(&self) -> Option<&usize> {\n", "file_path": "src/config.rs", "rank": 65, "score": 2.4846088050862827 }, { "content": " let kernel = CpuAccessibleBuffer::from_iter(\n\n context.device.clone(),\n\n DEFAULT_BUFFER_USAGE,\n\n false,\n\n (0..kernel_size).map(|_| 0.0f32),\n\n )?;\n\n\n\n {\n\n let mut kernel_input = kernel.write()?;\n\n\n\n let mut range = size;\n\n range[0] = range[0] / 2 + 1;\n\n\n\n for f in 0..batch_count {\n\n for v in 0..coordinate_features {\n\n for [i, j] in SizeIterator::new(&range) {\n\n println!(\"{} {}\", i, j);\n\n let _0 = 2 * i\n\n + j * (size[0] + 2)\n\n + v * (size[0] + 2) * size[1]\n", "file_path": "examples/convolution.rs", "rank": 66, "score": 2.4795714914008737 }, { "content": " use_lut: self.use_lut,\n\n symmetric_kernel: self.symmetric_kernel,\n\n input_formatted: self.input_formatted,\n\n output_formatted: self.output_formatted,\n\n kernel: self.kernel,\n\n temp_buffer: self.temp_buffer,\n\n input_buffer: self.input_buffer,\n\n output_buffer: self.output_buffer,\n\n })\n\n }\n\n}\n\n\n\npub enum Precision {\n\n /// Perform calculations in single precision (32-bit)\n\n Single,\n\n /// Perform calculations in double precision (64-bit)\n\n Double,\n\n /// Perform calculations in half precision (16-bit)\n\n Half,\n\n /// Use half precision only as input/output buffer. Input/Output have to be allocated as half,\n", "file_path": "src/config.rs", "rank": 67, "score": 2.39947965290125 }, { "content": " pub fn patch(&self) -> u32 {\n\n self.patch\n\n }\n\n}\n\n\n\nimpl Display for Version {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{}.{}.{}\", self.major, self.minor, self.patch)\n\n }\n\n}\n\n\n", "file_path": "src/version.rs", "rank": 68, "score": 2.2795419975871227 }, { "content": "\n\n /// Perform R2C/C2R decomposition\n\n pub r2c: bool,\n\n\n\n /// C - coordinate, or dimension of features vector. In matrix convolution - size of vector\n\n pub coordinate_features: u32,\n\n\n\n /// Disables unshuffling of four step algorithm. Requires `temp_buffer` allocation.\n\n pub disable_reorder_four_step: bool,\n\n\n\n /// Used to perform multiple batches of initial data\n\n pub batch_count: Option<u32>,\n\n\n\n pub precision: Precision,\n\n\n\n /// Switches from calculating sincos to using precomputed LUT tables\n\n pub use_lut: bool,\n\n\n\n /// Specify if kernel in 2x2 or 3x3 matrix convolution is symmetric\n\n pub symmetric_kernel: bool,\n", "file_path": "src/config.rs", "rank": 69, "score": 2.2174910889549055 }, { "content": " pub buffer: Option<Arc<dyn BufferAccess>>,\n\n pub temp_buffer: Option<Arc<dyn BufferAccess>>,\n\n pub input_buffer: Option<Arc<dyn BufferAccess>>,\n\n pub output_buffer: Option<Arc<dyn BufferAccess>>,\n\n pub kernel: Option<Arc<dyn BufferAccess>>,\n\n}\n\n\n\nimpl LaunchParams {\n\n fn buffer_object<B>(buffer: B) -> u64\n\n where\n\n B: AsRef<dyn BufferAccess>,\n\n {\n\n buffer.as_ref().inner().buffer.internal_object().value()\n\n }\n\n\n\n pub(crate) fn as_sys(&self) -> Pin<Box<LaunchParamsGuard>> {\n\n use std::mem::{transmute, zeroed};\n\n\n\n unsafe {\n\n let mut res = Box::pin(LaunchParamsGuard {\n", "file_path": "src/app.rs", "rank": 70, "score": 2.173320691998419 }, { "content": " // NoBuffer,\n\n // NoTempBuffer,\n\n // NoInputBuffer,\n\n // NoOutputBuffer,\n\n // NoKernel,\n\n}\n\n\n\n#[derive(Display, Debug, Error)]\n\npub enum LaunchError {\n\n ConfigSpecifiesBuffer,\n\n ConfigSpecifiesTempBuffer,\n\n ConfigSpecifiesInputBuffer,\n\n ConfigSpecifiesOutputBuffer,\n\n ConfigSpecifiesKernel,\n\n}\n\n\n\npub struct LaunchParamsBuilder {\n\n command_buffer: Option<vk::CommandBuffer>,\n\n buffer: Option<Arc<dyn BufferAccess>>,\n\n temp_buffer: Option<Arc<dyn BufferAccess>>,\n", "file_path": "src/app.rs", "rank": 71, "score": 1.9834311517054446 }, { "content": " }\n\n\n\n if let Some(false) = self.output_formatted {\n\n return Err(ConfigError::InvalidConfig);\n\n }\n\n\n\n res.config.isInputFormatted = true.into();\n\n res.config.isOutputFormatted = true.into();\n\n }\n\n _ => {}\n\n }\n\n\n\n if let Some(batch_count) = &self.batch_count {\n\n res.config.numberBatches = *batch_count;\n\n }\n\n\n\n Ok(res)\n\n }\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 72, "score": 1.7967300740658545 }, { "content": "use std::fmt::{Display, Formatter};\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub struct Version {\n\n major: u32,\n\n minor: u32,\n\n patch: u32,\n\n}\n\n\n\nimpl Version {\n\n #[inline]\n\n pub fn major(&self) -> u32 {\n\n self.major\n\n }\n\n\n\n #[inline]\n\n pub fn minor(&self) -> u32 {\n\n self.minor\n\n }\n\n\n\n #[inline]\n", "file_path": "src/version.rs", "rank": 73, "score": 1.765417124336341 }, { "content": " };\n\n\n\n let mut res = Box::pin(ConfigGuard {\n\n keep_alive, \n\n config: zeroed(),\n\n physical_device: self.physical_device.internal_object(),\n\n device: self.device.internal_object().value() as usize,\n\n queue: self.queue.internal_object_guard().value() as usize,\n\n command_pool: self.command_pool.internal_object().value(),\n\n fence: self.fence.internal_object().value(),\n\n buffer_size: self.buffer.as_ref().map(|b| b.size()).unwrap_or(0) as u64,\n\n temp_buffer_size: self.temp_buffer.as_ref().map(|b| b.size()).unwrap_or(0) as u64,\n\n input_buffer_size: self.input_buffer.as_ref().map(|b| b.size()).unwrap_or(0) as u64,\n\n output_buffer_size: self.output_buffer.as_ref().map(|b| b.size()).unwrap_or(0) as u64,\n\n kernel_size: self.kernel.as_ref().map(|b| b.size()).unwrap_or(0) as u64,\n\n buffer: self\n\n .buffer\n\n .as_ref()\n\n .map(|b| b.as_buffer())\n\n .flatten()\n", "file_path": "src/config.rs", "rank": 74, "score": 1.608140304478228 }, { "content": " match self {\n\n Self::Buffer(_) => None,\n\n Self::BufferSize(b) => Some(b),\n\n }\n\n }\n\n}\n\n\n\npub struct Config<'a> {\n\n pub fft_dim: u32,\n\n pub size: [u32; 3usize],\n\n\n\n pub physical_device: PhysicalDevice<'a>,\n\n pub device: Arc<Device>,\n\n pub queue: Arc<Queue>,\n\n pub fence: &'a Fence,\n\n pub command_pool: Arc<UnsafeCommandPool>,\n\n\n\n pub buffer: Option<BufferDesc>,\n\n pub input_buffer: Option<BufferDesc>,\n\n pub output_buffer: Option<BufferDesc>,\n", "file_path": "src/config.rs", "rank": 75, "score": 1.3567067677881193 }, { "content": " NoFence,\n\n NoCommandPool,\n\n NoBuffer,\n\n}\n\n\n\npub struct ConfigBuilder<'a> {\n\n fft_dim: u32,\n\n size: [u32; 3usize],\n\n\n\n physical_device: Option<PhysicalDevice<'a>>,\n\n device: Option<Arc<Device>>,\n\n queue: Option<Arc<Queue>>,\n\n fence: Option<&'a Fence>,\n\n command_pool: Option<Arc<UnsafeCommandPool>>,\n\n buffer: Option<BufferDesc>,\n\n input_buffer: Option<BufferDesc>,\n\n output_buffer: Option<BufferDesc>,\n\n temp_buffer: Option<BufferDesc>,\n\n kernel: Option<BufferDesc>,\n\n normalize: bool,\n", "file_path": "src/config.rs", "rank": 76, "score": 1.302310389664346 }, { "content": " res.config.symmetricKernel = self.symmetric_kernel.into();\n\n\n\n if let Some(input_formatted) = self.input_formatted {\n\n res.config.isInputFormatted = input_formatted.into();\n\n }\n\n\n\n if let Some(output_formatted) = self.output_formatted {\n\n res.config.isOutputFormatted = output_formatted.into();\n\n }\n\n\n\n match self.precision {\n\n Precision::Double => {\n\n res.config.doublePrecision = true.into();\n\n }\n\n Precision::Half => res.config.halfPrecision = true.into(),\n\n Precision::HalfMemory => {\n\n res.config.halfPrecisionMemoryOnly = true.into();\n\n\n\n if let Some(false) = self.input_formatted {\n\n return Err(ConfigError::InvalidConfig);\n", "file_path": "src/config.rs", "rank": 77, "score": 1.263763854119107 }, { "content": "\n\n // Create command buffer handle\n\n let builder =\n\n unsafe { UnsafeCommandBufferBuilder::new(&primary_cmd_buffer, Kind::primary(), Flags::None)? };\n\n\n\n // Configure FFT launch parameters\n\n let mut params = LaunchParams::builder().command_buffer(&builder).build()?;\n\n\n\n // Construct FFT \"Application\"\n\n let mut app = App::new(conv_config)?;\n\n\n\n // Run forward FFT\n\n app.forward(&mut params)?;\n\n\n\n // Dispatch command buffer and wait for completion\n\n let command_buffer = builder.build()?;\n\n context.submit(command_buffer)?;\n\n\n\n println!(\"Result:\");\n\n println!(\"{}\", MatrixFormatter::new(size, &buffer));\n\n println!();\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/convolution.rs", "rank": 78, "score": 1.168740826971698 } ]
Rust
examples/bgpdumper.rs
wladwm/zettabgp
85d8e21742e948907cf3fcc64552f40c9e852524
extern crate zettabgp; use std::env; use std::io::{Read, Write}; use std::net::{Shutdown, TcpStream}; use std::thread::{sleep, spawn}; use zettabgp::prelude::*; pub struct BgpDumper { pub params: BgpSessionParams, pub stream: TcpStream, } impl BgpDumper { pub fn new(bgp_params: BgpSessionParams, tcpstream: TcpStream) -> BgpDumper { BgpDumper { params: bgp_params, stream: tcpstream, } } fn recv_message_head(&mut self) -> Result<(BgpMessageType, usize), BgpError> { let mut buf = [0 as u8; 19]; self.stream.read_exact(&mut buf)?; self.params.decode_message_head(&buf) } pub fn start_active(&mut self) -> Result<(), BgpError> { let mut bom = self.params.open_message(); let mut buf = [255 as u8; 4096]; let messagelen = match bom.encode_to(&self.params, &mut buf[19..]) { Err(e) => { return Err(e); } Ok(sz) => sz, }; let blen = self .params .prepare_message_buf(&mut buf, BgpMessageType::Open, messagelen)?; self.stream.write_all(&buf[0..blen])?; let msg = match self.recv_message_head() { Err(e) => { return Err(e); } Ok(msg) => msg, }; if msg.0 != BgpMessageType::Open { return Err(BgpError::static_str("Invalid state to start_active")); } self.stream.read_exact(&mut buf[0..msg.1])?; bom.decode_from(&self.params, &buf[0..msg.1])?; self.params.hold_time = bom.hold_time; self.params.caps = bom.caps; self.params.check_caps(); Ok(()) } pub fn send_keepalive(stream: &mut TcpStream) -> Result<(), BgpError> { let mut buf = [255 as u8; 19]; buf[0..16].clone_from_slice(&[255 as u8; 16]); buf[16] = 0; buf[17] = 19; buf[18] = 4; match stream.write_all(&buf) { Ok(_) => Ok(()), Err(e) => Err(e.into()), } } pub fn start_keepalives(&self) -> Result<(), BgpError> { let mut ks = self.stream.try_clone()?; let slp = std::time::Duration::new((self.params.hold_time / 3) as u64, 0); spawn(move || loop { if BgpDumper::send_keepalive(&mut ks).is_err() { break; } sleep(slp); }); Ok(()) } pub fn lifecycle(&mut self) -> Result<(), BgpError> { self.start_keepalives()?; let mut buf = Box::new([0 as u8; 65536]); loop { let msg = match self.recv_message_head() { Ok(m) => m, Err(e) => { return Err(e); } }; if msg.0 == BgpMessageType::Keepalive { continue; } self.stream.read_exact(&mut buf[0..msg.1])?; match msg.0 { BgpMessageType::Open => { eprintln!("Incorrect open message!"); break; } BgpMessageType::Keepalive => {} BgpMessageType::Notification => { let mut msgnotification = BgpNotificationMessage::new(); match msgnotification.decode_from(&self.params, &buf[0..msg.1]) { Err(e) => { eprintln!("BGP notification decode error: {:?}", e); } Ok(_) => { println!( "BGP notification: {:?} - {:?}", msgnotification, msgnotification.error_text() ); } }; break; } BgpMessageType::Update => { let mut msgupdate = BgpUpdateMessage::new(); if let Err(e) = msgupdate.decode_from(&self.params, &buf[0..msg.1]) { eprintln!("BGP update decode error: {:?}", e); continue; } println!("{:?}", msgupdate); } } } Ok(()) } pub fn close(&mut self) { self.stream.shutdown(Shutdown::Both).unwrap_or_default(); } } fn main() { if env::args().len() != 3 { eprintln!("Usage: bgpdumper PEER AS"); return; } let vargs: Vec<String> = env::args().map(|x| x).collect(); let targetip: std::net::IpAddr = match vargs[1].parse() { Ok(x) => x, Err(_) => { eprintln!("Invalid peer IP - {}", vargs[1]); return; } }; let targetasn: u32 = match vargs[2].parse() { Ok(x) => x, Err(_) => { eprintln!("Invalid peer ASn - {}", vargs[2]); return; } }; let target = std::net::SocketAddr::new(targetip, 179); let stream = std::net::TcpStream::connect(target).expect("Unable to connect to bgp speaker"); let mut peer = BgpDumper::new( BgpSessionParams::new( targetasn, 180, BgpTransportMode::IPv4, std::net::Ipv4Addr::new(1, 0, 0, 0), vec![ BgpCapability::SafiIPv4u, BgpCapability::SafiIPv4m, BgpCapability::SafiIPv4lu, BgpCapability::SafiIPv6lu, BgpCapability::SafiVPNv4u, BgpCapability::SafiVPNv4m, BgpCapability::SafiVPNv6u, BgpCapability::SafiVPNv6m, BgpCapability::SafiIPv4mvpn, BgpCapability::SafiVPLS, BgpCapability::CapRR, BgpCapability::CapASN32(targetasn), ] .into_iter() .collect(), ), stream, ); match peer.start_active() { Err(e) => { eprintln!("failed to create BGP peer; err = {:?}", e); peer.close(); return; } Ok(_) => {} }; println!("Run lifecycle"); peer.lifecycle().unwrap(); println!("Done lifecycle"); peer.close(); }
extern crate zettabgp; use std::env; use std::io::{Read, Write}; use std::net::{Shutdown, TcpStream}; use std::thread::{sleep, spawn}; use zettabgp::prelude::*; pub struct BgpDumper { pub params: BgpSessionParams, pub stream: TcpStream, } impl BgpDumper { pub fn new(bgp_params: BgpSessionParams, tcpstream: TcpStream) -> BgpDumper { BgpDumper { params: bgp_params, stream: tcpstream, } } fn recv_message_head(&mut self) -> Result<(BgpMessageType, usize), BgpError> { let mut buf = [0 as u8; 19]; self.stream.read_exact(&mut buf)?; self.params.decode_message_head(&buf) } pub fn start_active(&mut self) -> Result<(), BgpError> { let mut bom = self.params.open_message(); let mut buf = [255 as u8; 4096]; let messagelen = match bom.encode_to(&self.params, &mut buf[19..]) { Err(e) => { return Err(e); } Ok(sz) => sz, }; let blen = self .params .prepare_message_buf(&mut buf, BgpMessageType::Open, messagelen)?; self.stream.write_all(&buf[0..blen])?; let msg = match self.recv_message_head() { Err(e) => { return Err(e); } Ok(msg) => msg, }; if msg.0 != BgpMessageType::Open { return Err(BgpError::static_str("Invalid state to start_active")); } self.stream.read_exact(&mut buf[0..msg.1])?; bom.decode_from(&self.params, &buf[0..msg.1])?; self.params.hold_time = bom.hold_time; self.params.caps = bom.caps; self.params.check_caps(); Ok(()) } pub fn send_keepalive(stream: &mut TcpStream) -> Result<(), BgpError> { let mut buf = [255 as u8; 19]; buf[0..16].clone_from_slice(&[255 as u8; 16]); buf[16] = 0; buf[17] = 19; buf[18] = 4; match stream.write_all(&buf) { Ok(_) => Ok(()), Err(e) => Err(e.into()), } } pub fn start_keepalives(&self) -> Result<(), BgpError> { let mut ks = self.stream.try_clone()?; let slp = std::time::Duration::new((self.params.hold_time / 3) as u64, 0); spawn(move || loop { if BgpDumper::send_keepalive(&mut ks).is_err() { break; } sleep(slp); }); Ok(()) } pub fn lifecycle(&mut self) -> Result<(), BgpError> { self.start_keepalives()?; let mut buf = Box::new([0 as u8; 65536]); loop { let msg = match self.recv_message_head() { Ok(m) => m, Err(e) => { return Err(e); } }; if msg.0 == BgpMessageType::Keepalive { continue; } self.stream.read_exact(&mut buf[0..msg.1])?; match msg.0 { BgpMessageType::Open => { eprintln!("Incorrect open message!"); break; } BgpMessageType::Keepalive => {} BgpMessageType::Notification => { let mut msgnotification = BgpNotificationMessage::new(); match msgnotification.decode_from(&self.params, &buf[0..msg.1]) { Err(e) => { eprintln!("BGP notification decode error: {:?}", e); } Ok(_) => { println!( "BGP notification: {:?} - {:?}", msgnotification, msgnotification.error_text() ); } }; break; } BgpMessageType::Update => { let mut msgupdate = BgpUpdateMessage::new(); if let Err(e) = msgupdate.decode_from(&self.params, &buf[0..msg.1]) { eprintln!("BGP update decode error: {:?}", e); continue; } println!("{:?}", msgupdate); } } } Ok(()) } pub fn close(&mut self) { self.stream.shutdown(Shutdown::Both).unwrap_or_default(); } }
fn main() { if env::args().len() != 3 { eprintln!("Usage: bgpdumper PEER AS"); return; } let vargs: Vec<String> = env::args().map(|x| x).collect(); let targetip: std::net::IpAddr = match vargs[1].parse() { Ok(x) => x, Err(_) => { eprintln!("Invalid peer IP - {}", vargs[1]); return; } }; let targetasn: u32 = match vargs[2].parse() { Ok(x) => x, Err(_) => { eprintln!("Invalid peer ASn - {}", vargs[2]); return; } }; let target = std::net::SocketAddr::new(targetip, 179); let stream = std::net::TcpStream::connect(target).expect("Unable to connect to bgp speaker"); let mut peer = BgpDumper::new( BgpSessionParams::new( targetasn, 180, BgpTransportMode::IPv4, std::net::Ipv4Addr::new(1, 0, 0, 0), vec![ BgpCapability::SafiIPv4u, BgpCapability::SafiIPv4m, BgpCapability::SafiIPv4lu, BgpCapability::SafiIPv6lu, BgpCapability::SafiVPNv4u, BgpCapability::SafiVPNv4m, BgpCapability::SafiVPNv6u, BgpCapability::SafiVPNv6m, BgpCapability::SafiIPv4mvpn, BgpCapability::SafiVPLS, BgpCapability::CapRR, BgpCapability::CapASN32(targetasn), ] .into_iter() .collect(), ), stream, ); match peer.start_active() { Err(e) => { eprintln!("failed to create BGP peer; err = {:?}", e); peer.close(); return; } Ok(_) => {} }; println!("Run lifecycle"); peer.lifecycle().unwrap(); println!("Done lifecycle"); peer.close(); }
function_block-full_function
[ { "content": "/// Stores ipv4 address into the buffer.\n\npub fn encode_addrv4_to(addr: &std::net::Ipv4Addr, buf: &mut [u8]) -> Result<usize, BgpError> {\n\n if buf.len() < 4 {\n\n return Err(BgpError::static_str(\"Invalid addrv4 length\"));\n\n }\n\n buf[0..4].clone_from_slice(&addr.octets());\n\n Ok(4)\n\n}\n", "file_path": "src/util.rs", "rank": 0, "score": 246238.29913891281 }, { "content": "/// Stores ipv6 address into the buffer.\n\npub fn encode_addrv6_to(addr: &std::net::Ipv6Addr, buf: &mut [u8]) -> Result<usize, BgpError> {\n\n if buf.len() < 16 {\n\n return Err(BgpError::static_str(\"Invalid addrv6 length\"));\n\n }\n\n buf[0..16].clone_from_slice(&addr.octets());\n\n Ok(16)\n\n}\n", "file_path": "src/util.rs", "rank": 1, "score": 246238.29913891276 }, { "content": "/// Stores ipv4/ipv6 address into the buffer.\n\npub fn encode_addr_to(addr: &std::net::IpAddr, buf: &mut [u8]) -> Result<usize, BgpError> {\n\n match addr {\n\n std::net::IpAddr::V4(a) => encode_addrv4_to(a, buf),\n\n std::net::IpAddr::V6(a) => encode_addrv6_to(a, buf),\n\n }\n\n}\n", "file_path": "src/util.rs", "rank": 2, "score": 246238.29913891281 }, { "content": "pub fn decode_bgpitem_from<T: BgpItem<T>>(buf: &[u8]) -> Result<(T, usize), BgpError> {\n\n let bits = buf[0];\n\n let r = T::extract_bits_from(bits, &buf[1..])?;\n\n Ok((r.0, r.1 + 1))\n\n}\n", "file_path": "src/afi/mod.rs", "rank": 3, "score": 246139.08781167673 }, { "content": "pub fn encode_bgpitems_to<T: BgpItem<T>>(v: &Vec<T>, buf: &mut [u8]) -> Result<usize, BgpError> {\n\n let mut curpos = 0;\n\n for i in v.iter() {\n\n let r = i.set_bits_to(&mut buf[curpos + 1..])?;\n\n buf[curpos] = r.0;\n\n curpos += r.1 + 1;\n\n }\n\n return Ok(curpos);\n\n}\n", "file_path": "src/afi/mod.rs", "rank": 4, "score": 240035.26404349087 }, { "content": "/// Gets ipv4 address from the buffer.\n\npub fn decode_addrv4_from(buf: &[u8]) -> Result<std::net::Ipv4Addr, BgpError> {\n\n if buf.len() < 4 {\n\n return Err(BgpError::static_str(\"Invalid addrv4 length\"));\n\n }\n\n return Ok(std::net::Ipv4Addr::new(buf[0], buf[1], buf[2], buf[3]));\n\n}\n", "file_path": "src/util.rs", "rank": 5, "score": 239393.72189925745 }, { "content": "/// Gets ipv4/ipv6 address from the buffer. Address type determined by buffer length.\n\npub fn decode_addr_from(buf: &[u8]) -> Result<std::net::IpAddr, BgpError> {\n\n match buf.len() {\n\n 16 => Ok(std::net::IpAddr::V6(decode_addrv6_from(buf)?)),\n\n 4 => Ok(std::net::IpAddr::V4(decode_addrv4_from(buf)?)),\n\n _ => Err(BgpError::static_str(\"Invalid addr length\")),\n\n }\n\n}\n", "file_path": "src/util.rs", "rank": 6, "score": 239393.72189925745 }, { "content": "/// Gets ipv6 address from the buffer.\n\npub fn decode_addrv6_from(buf: &[u8]) -> Result<std::net::Ipv6Addr, BgpError> {\n\n if buf.len() < 16 {\n\n return Err(BgpError::static_str(\"Invalid addrv6 length\"));\n\n }\n\n return Ok(std::net::Ipv6Addr::new(\n\n getn_u16(&buf[0..2]),\n\n getn_u16(&buf[2..4]),\n\n getn_u16(&buf[4..6]),\n\n getn_u16(&buf[6..8]),\n\n getn_u16(&buf[8..10]),\n\n getn_u16(&buf[10..12]),\n\n getn_u16(&buf[12..14]),\n\n getn_u16(&buf[14..16]),\n\n ));\n\n}\n", "file_path": "src/util.rs", "rank": 7, "score": 239393.72189925745 }, { "content": "pub fn decode_bgpitems_from<T: BgpItem<T>>(buf: &[u8]) -> Result<(Vec<T>, usize), BgpError> {\n\n let mut v = Vec::<T>::new();\n\n let mut curpos = 0;\n\n while curpos < buf.len() {\n\n let nlri = decode_bgpitem_from(&buf[curpos..])?;\n\n v.push(nlri.0);\n\n curpos += nlri.1;\n\n }\n\n return Ok((v, curpos));\n\n}\n", "file_path": "src/afi/mod.rs", "rank": 8, "score": 238154.3983585789 }, { "content": "pub fn decode_bmp_addr_from(buf: &[u8]) -> Result<std::net::IpAddr, BgpError> {\n\n if buf.len() < 16 {\n\n return Err(BgpError::insufficient_buffer_size());\n\n }\n\n if buf[0] == 0\n\n && buf[1] == 0\n\n && buf[2] == 0\n\n && buf[3] == 0\n\n && buf[4] == 0\n\n && buf[5] == 0\n\n && buf[6] == 0\n\n && buf[7] == 0\n\n && buf[8] == 0\n\n && buf[9] == 0\n\n && buf[10] == 0\n\n && buf[11] == 0\n\n {\n\n return Ok(std::net::IpAddr::V4(decode_addrv4_from(&buf[12..])?));\n\n }\n\n return Ok(std::net::IpAddr::V6(decode_addrv6_from(buf)?));\n", "file_path": "src/bmp/bmputl.rs", "rank": 9, "score": 228710.20245958475 }, { "content": "pub fn getn_u64(a: &[u8]) -> u64 {\n\n ((getn_u32(a) as u64) << 32) | (getn_u32(&a[4..8]) as u64)\n\n}\n", "file_path": "src/util.rs", "rank": 10, "score": 177759.11078255135 }, { "content": "pub fn setn_u32(s: u32, a: &mut [u8]) {\n\n a[0] = (s >> 24) as u8;\n\n a[1] = ((s >> 16) & 0xff) as u8;\n\n a[2] = ((s >> 8) & 0xff) as u8;\n\n a[3] = (s & 0xff) as u8;\n\n}\n", "file_path": "src/util.rs", "rank": 11, "score": 155970.94338366718 }, { "content": "pub fn setn_u16(s: u16, a: &mut [u8]) {\n\n a[0] = (s >> 8) as u8;\n\n a[1] = (s & 0xff) as u8;\n\n}\n", "file_path": "src/util.rs", "rank": 12, "score": 155970.94338366718 }, { "content": "#[repr(C, packed)]\n\nstruct BgpOpenHead {\n\n // bgpver:u8,\n\n as_num: u16,\n\n hold_time: u16,\n\n routerid: [u8; 4],\n\n caplen: u8,\n\n}\n\n\n\nimpl BgpMessage for BgpOpenMessage {\n\n fn decode_from(&mut self, _peer: &BgpSessionParams, buf: &[u8]) -> Result<(), BgpError> {\n\n let ptr: *const u8 = buf[1..].as_ptr();\n\n let ptr: *const BgpOpenHead = ptr as *const BgpOpenHead;\n\n let ptr: &BgpOpenHead = unsafe { &*ptr };\n\n if buf[0] != 4 {\n\n return Err(BgpError::static_str(\"Invalid BGP version <> 4\"));\n\n }\n\n self.as_num = ntoh16(ptr.as_num) as u32;\n\n self.hold_time = ntoh16(ptr.hold_time);\n\n self.router_id = std::net::Ipv4Addr::new(\n\n ptr.routerid[0],\n", "file_path": "src/message/open.rs", "rank": 13, "score": 146395.1985827269 }, { "content": "pub fn getn_u128(a: &[u8]) -> u128 {\n\n ((getn_u64(a) as u128) << 64) | (getn_u64(&a[8..16]) as u128)\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 14, "score": 121586.68067118272 }, { "content": "pub fn getn_u16(a: &[u8]) -> u16 {\n\n (a[0] as u16) << 8 | (a[1] as u16)\n\n}\n", "file_path": "src/util.rs", "rank": 15, "score": 121586.68067118272 }, { "content": "pub fn getn_u32(a: &[u8]) -> u32 {\n\n (a[0] as u32) << 24 | (a[1] as u32) << 16 | (a[2] as u32) << 8 | (a[3] as u32)\n\n}\n", "file_path": "src/util.rs", "rank": 16, "score": 121586.68067118272 }, { "content": "pub fn decode_bgpaddritems_from<T: BgpAddrItem<T>>(\n\n peermode: BgpTransportMode,\n\n buf: &[u8],\n\n) -> Result<(Vec<T>, usize), BgpError> {\n\n let mut v = Vec::<T>::new();\n\n let mut curpos = 0;\n\n while curpos < buf.len() {\n\n let nlri = T::decode_from(peermode, &buf[curpos..])?;\n\n v.push(nlri.0);\n\n curpos += nlri.1;\n\n }\n\n return Ok((v, curpos));\n\n}\n", "file_path": "src/afi/mod.rs", "rank": 17, "score": 116177.25875241605 }, { "content": "pub fn decode_long_bgpitems_from<T: BgpItemLong<T>>(\n\n buf: &[u8],\n\n) -> Result<(Vec<T>, usize), BgpError> {\n\n let mut v = Vec::<T>::new();\n\n let mut curpos = 0;\n\n while curpos < buf.len() {\n\n let itemlen = getn_u16(&buf[curpos..(curpos + 2)]) as usize;\n\n v.push(T::extract_from(\n\n itemlen,\n\n &buf[curpos + 2..(curpos + itemlen + 2)],\n\n )?);\n\n curpos += itemlen + 2;\n\n }\n\n return Ok((v, curpos));\n\n}\n", "file_path": "src/afi/mod.rs", "rank": 18, "score": 112761.00517004683 }, { "content": "/// trait BgpMessage represents BGP protocol message\n\npub trait BgpMessage {\n\n fn decode_from(\n\n &mut self,\n\n peer: &BgpSessionParams,\n\n buf: &[u8],\n\n ) -> Result<(), BgpError>;\n\n fn encode_to(\n\n &self,\n\n peer: &BgpSessionParams,\n\n buf: &mut [u8],\n\n ) -> Result<usize, BgpError>;\n\n}\n\n\n\n/// Bgp message type: open, update, notification or keepalive.\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub enum BgpMessageType {\n\n Open,\n\n Update,\n\n Notification,\n\n Keepalive,\n", "file_path": "src/message/mod.rs", "rank": 19, "score": 105673.81171367383 }, { "content": "/// This trait represens BGP protocol message.\n\npub trait BgpMessage {\n\n /// Decode from buffer.\n\n fn decode_from(&mut self, peer: &BgpSessionParams, buf: &[u8]) -> Result<(), BgpError>;\n\n /// Encode to buffer. Returns consumed buffer length, or error.\n\n fn encode_to(&self, peer: &BgpSessionParams, buf: &mut [u8]) -> Result<usize, BgpError>;\n\n}\n\n\n\n/// BGP capability AddPath.\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub struct BgpCapAddPath {\n\n pub afi: u16,\n\n pub safi: u8,\n\n pub send: bool,\n\n pub receive: bool,\n\n}\n\nimpl BgpCapAddPath {\n\n pub fn response(src: &BgpCapAddPath) -> BgpCapAddPath {\n\n BgpCapAddPath {\n\n afi: src.afi,\n\n safi: src.safi,\n", "file_path": "src/lib.rs", "rank": 20, "score": 100380.7844947868 }, { "content": "pub fn decode_pathid_bgpitems_from<T: BgpItem<T> + Clone + PartialEq + Eq + PartialOrd>(\n\n buf: &[u8],\n\n) -> Result<(Vec<WithPathId<T>>, usize), BgpError> {\n\n let mut v = Vec::<WithPathId<T>>::new();\n\n let mut curpos = 0;\n\n while (curpos+4) < buf.len() {\n\n let pathid = getn_u32(&buf[curpos..]);\n\n curpos += 4;\n\n let nlri = decode_bgpitem_from(&buf[curpos..])?;\n\n v.push(WithPathId::<T>::new(pathid, nlri.0));\n\n curpos += nlri.1;\n\n }\n\n return Ok((v, curpos));\n\n}\n", "file_path": "src/afi/mod.rs", "rank": 21, "score": 94386.80039484275 }, { "content": "pub fn encode_bgpaddritems_to<T: BgpAddrItem<T>>(\n\n v: &Vec<T>,\n\n peermode: BgpTransportMode,\n\n buf: &mut [u8],\n\n) -> Result<usize, BgpError> {\n\n let mut curpos = 0;\n\n for i in v {\n\n curpos += i.encode_to(peermode, &mut buf[curpos..])?;\n\n }\n\n Ok(curpos)\n\n}\n", "file_path": "src/afi/mod.rs", "rank": 22, "score": 93067.46524765162 }, { "content": "pub fn encode_long_bgpitems_to<T: BgpItemLong<T>>(\n\n v: &Vec<T>,\n\n buf: &mut [u8],\n\n) -> Result<usize, BgpError> {\n\n let mut curpos = 0;\n\n for i in v {\n\n let sz = i.pack_to(&mut buf[curpos + 2..])?;\n\n setn_u16(sz as u16, &mut buf[curpos..curpos + 2]);\n\n curpos += sz;\n\n }\n\n Ok(curpos)\n\n}\n", "file_path": "src/afi/mod.rs", "rank": 23, "score": 90509.84478396448 }, { "content": "pub fn ntoh16(a: u16) -> u16 {\n\n (a >> 8) | ((a & 0xff) << 8)\n\n}\n", "file_path": "src/util.rs", "rank": 25, "score": 81064.86456962062 }, { "content": "#[cfg(feature = \"serialization\")]\n\nstruct BgpNetVisitor;\n\n\n\n#[cfg(feature = \"serialization\")]\n\nimpl<'de> Visitor<'de> for BgpNetVisitor {\n\n type Value = BgpNet;\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"a ipv4/ipv6/mac prefix\")\n\n }\n\n fn visit_str<E>(self, value: &str) -> Result<BgpNet, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n value.parse::<BgpNet>().map_err(de::Error::custom)\n\n }\n\n}\n\n#[cfg(feature = \"serialization\")]\n\nimpl<'de> serde::Deserialize<'de> for BgpNet {\n\n fn deserialize<D>(deserializer: D) -> Result<BgpNet, D::Error>\n\n where\n\n D: serde::Deserializer<'de>,\n", "file_path": "src/afi/mod.rs", "rank": 26, "score": 80877.00332319038 }, { "content": "pub fn encode_pathid_bgpitems_to<T: BgpItem<T> + Clone + PartialEq + Eq + PartialOrd>(\n\n v: &Vec<WithPathId<T>>,\n\n buf: &mut [u8],\n\n) -> Result<usize, BgpError> {\n\n let mut curpos = 0;\n\n for i in v.iter() {\n\n setn_u32(i.pathid, &mut buf[curpos..]);\n\n curpos += 4;\n\n let r = i.nlri.set_bits_to(&mut buf[curpos + 1..])?;\n\n buf[curpos] = r.0;\n\n curpos += r.1 + 1;\n\n }\n\n return Ok(curpos);\n\n}\n\n/// BGP VPN route distinguisher\n\n#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct BgpRD {\n\n /// high-order part\n\n pub rdh: u32,\n\n /// low-order part\n", "file_path": "src/afi/mod.rs", "rank": 27, "score": 75621.68382763198 }, { "content": "pub trait BgpAttr: std::fmt::Display + std::fmt::Debug {\n\n fn encode_to(\n\n &self,\n\n peer: &BgpSessionParams,\n\n buf: &mut [u8],\n\n ) -> Result<usize, BgpError>;\n\n fn attr(&self) -> BgpAttrParams;\n\n}\n\n\n\n/// BGP path attribute\n\n#[derive(Debug,Hash,PartialOrd,Ord,PartialEq,Eq)]\n\npub enum BgpAttrItem {\n\n Origin(BgpOrigin),\n\n ASPath(BgpASpath),\n\n NextHop(BgpNextHop),\n\n MED(BgpMED),\n\n LocalPref(BgpLocalpref),\n\n AtomicAggregate(BgpAtomicAggregate),\n\n AggregatorAS(BgpAggregatorAS),\n\n CommunityList(BgpCommunityList),\n", "file_path": "src/message/attributes/mod.rs", "rank": 28, "score": 71533.5952728272 }, { "content": " pos += optlen;\n\n break;\n\n }\n\n Ok(cap) => {\n\n self.caps.push(cap.0);\n\n optlen -= cap.1;\n\n pos += cap.1;\n\n }\n\n };\n\n }\n\n }\n\n Ok(())\n\n }\n\n fn encode_to(&self, _peer: &BgpSessionParams, buf: &mut [u8]) -> Result<usize, BgpError> {\n\n let ptr: *mut u8 = buf[1..].as_mut_ptr();\n\n let ptr: *mut BgpOpenHead = ptr as *mut BgpOpenHead;\n\n let ptr: &mut BgpOpenHead = unsafe { &mut *ptr };\n\n buf[0] = 4;\n\n ptr.as_num = ntoh16(if self.as_num < 65536 {\n\n self.as_num as u16\n", "file_path": "src/message/open.rs", "rank": 29, "score": 64609.75092607744 }, { "content": " ptr.routerid[1],\n\n ptr.routerid[2],\n\n ptr.routerid[3],\n\n );\n\n self.caps.clear();\n\n let mut pos: usize = 10;\n\n while pos < buf.len() {\n\n if buf[pos] != 2 {\n\n eprintln!(\"BGP capability: {:?}\", &buf[pos..]);\n\n return Err(BgpError::from_string(format!(\n\n \"Invalid BGP capability code {:?}!\",\n\n buf[pos]\n\n )));\n\n }\n\n let mut optlen = buf[pos + 1] as usize;\n\n pos += 2;\n\n while optlen > 0 {\n\n match BgpCapability::from_buffer(&buf[pos..pos + optlen]) {\n\n Err(_) => {\n\n //eprintln!(\"Capability decode: {:?}: {:?}\", e, &buf[pos..pos + optlen]);\n", "file_path": "src/message/open.rs", "rank": 30, "score": 64600.76223463652 }, { "content": "// Copyright 2021 Vladimir Melnikov.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse crate::{ntoh16, BgpCapability, BgpError, BgpMessage, BgpSessionParams};\n\nuse std::vec::Vec;\n\n/// BGP open message\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub struct BgpOpenMessage {\n\n /// Autonomous system number\n\n pub as_num: u32,\n\n /// Hold time in seconds\n\n pub hold_time: u16,\n\n /// router Id\n\n pub router_id: std::net::Ipv4Addr,\n\n /// Capability set\n\n pub caps: Vec<BgpCapability>,\n\n}\n\n\n\n#[repr(C, packed)]\n", "file_path": "src/message/open.rs", "rank": 31, "score": 64597.341636565194 }, { "content": "\n\nimpl BgpOpenMessage {\n\n pub fn new() -> BgpOpenMessage {\n\n return BgpOpenMessage {\n\n as_num: 0,\n\n hold_time: 180,\n\n router_id: std::net::Ipv4Addr::new(127, 0, 0, 1),\n\n caps: Vec::new(),\n\n };\n\n }\n\n}\n", "file_path": "src/message/open.rs", "rank": 32, "score": 64595.12466941284 }, { "content": " } else {\n\n 23456\n\n });\n\n ptr.hold_time = ntoh16(self.hold_time);\n\n ptr.routerid = self.router_id.octets();\n\n ptr.caplen = self\n\n .caps\n\n .iter()\n\n .fold(0u32, |sum, i| sum + (i.bytes_len() as u32) + 2) as u8;\n\n let mut pos: usize = 10;\n\n for cp in self.caps.iter() {\n\n let caplen = cp.bytes_len();\n\n buf[pos] = 2; //capability\n\n buf[pos + 1] = caplen as u8;\n\n cp.fill_buffer(&mut buf[(pos + 2)..(caplen + pos + 2)])?;\n\n pos += 2 + caplen;\n\n }\n\n Ok(pos)\n\n }\n\n}\n", "file_path": "src/message/open.rs", "rank": 33, "score": 64591.54302290596 }, { "content": " f,\n\n \"BgpNotificationMessage {:?} code={:?} subcode={:?} data={:?})\",\n\n self.error_text(),\n\n self.error_code,\n\n self.error_subcode,\n\n self.data\n\n )\n\n }\n\n}\n\nimpl BgpMessage for BgpNotificationMessage {\n\n fn decode_from(\n\n &mut self,\n\n _peer: &BgpSessionParams,\n\n buf: &[u8],\n\n ) -> Result<(), BgpError> {\n\n if buf.len() < 2 {\n\n return Err(BgpError::static_str(\n\n \"Invalid notification message length\",\n\n ));\n\n }\n", "file_path": "src/message/notification.rs", "rank": 34, "score": 64587.96247888835 }, { "content": " self.error_code = buf[0];\n\n self.error_subcode = buf[1];\n\n if buf.len() == 3 {\n\n self.data = buf[2] as u16;\n\n }\n\n if buf.len() > 3 {\n\n self.data = ((buf[2] as u16) << 8) | (buf[3] as u16);\n\n }\n\n Ok(())\n\n }\n\n fn encode_to(\n\n &self,\n\n _peer: &BgpSessionParams,\n\n buf: &mut [u8],\n\n ) -> Result<usize, BgpError> {\n\n if buf.len() < 4 {\n\n return Err(BgpError::static_str(\n\n \"Invalid notification message length\",\n\n ));\n\n }\n\n buf[0] = self.error_code;\n\n buf[1] = self.error_subcode;\n\n buf[2] = (self.data >> 8) as u8;\n\n buf[3] = (self.data & 0xff) as u8;\n\n Ok(4)\n\n }\n\n}\n", "file_path": "src/message/notification.rs", "rank": 35, "score": 64584.737948800095 }, { "content": "// Copyright 2021 Vladimir Melnikov.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse crate::{BgpError, BgpMessage, BgpSessionParams};\n\n\n\n/// BGP notification message\n\npub struct BgpNotificationMessage {\n\n/// error code\n\n pub error_code: u8,\n\n/// error sub-code\n\n pub error_subcode: u8,\n\n/// extra data\n\n pub data: u16,\n\n}\n\nimpl BgpNotificationMessage {\n", "file_path": "src/message/notification.rs", "rank": 36, "score": 64579.7397550076 }, { "content": " String::from(\"Unknown code \")\n\n + n.to_string().as_str()\n\n + \" subcode \"\n\n + self.error_subcode.to_string().as_str()\n\n }\n\n }\n\n }\n\n}\n\nimpl std::fmt::Debug for BgpNotificationMessage {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_struct(\"BgpNotificationMessage\")\n\n .field(\"error_code\", &self.error_code)\n\n .field(\"error_subcode\", &self.error_subcode)\n\n .field(\"data\", &self.data)\n\n .finish()\n\n }\n\n}\n\nimpl std::fmt::Display for BgpNotificationMessage {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(\n", "file_path": "src/message/notification.rs", "rank": 37, "score": 64578.15098435049 }, { "content": " /// constructs new empty message\n\n pub fn new() -> BgpNotificationMessage {\n\n BgpNotificationMessage {\n\n error_code: 0,\n\n error_subcode: 0,\n\n data: 0,\n\n }\n\n }\n\n /// returns human-friendly error interpretation.\n\n pub fn error_text(&self) -> String {\n\n match self.error_code {\n\n 1 => {\n\n String::from(\"Message Header Error: \")\n\n + (match self.error_subcode {\n\n 1 => String::from(\"Connection not synchronized\"),\n\n 2 => String::from(\"Bad Message Length\"),\n\n 3 => String::from(\"Bad Message Type\"),\n\n n => String::from(\" subcode \") + n.to_string().as_str(),\n\n })\n\n .as_str()\n", "file_path": "src/message/notification.rs", "rank": 38, "score": 64571.71496140446 }, { "content": " }\n\n 2 => {\n\n String::from(\"OPEN Message Error: \")\n\n + (match self.error_subcode {\n\n 1 => String::from(\"Unsupported Version Number\"),\n\n 2 => String::from(\"Bad Peer AS\"),\n\n 3 => String::from(\"Bad BGP Identifier\"),\n\n 4 => String::from(\"Unsupported Optional Parameter\"),\n\n 5 => String::from(\"Deprecated(5)\"),\n\n 6 => String::from(\"Unacceptable Hold Time\"),\n\n n => String::from(\" subcode \") + n.to_string().as_str(),\n\n })\n\n .as_str()\n\n }\n\n 3 => {\n\n String::from(\"Update Message Error: \")\n\n + (match self.error_subcode {\n\n 1 => String::from(\"Malformed Attribute List\"),\n\n 2 => String::from(\"Unrecognized Well-known Attribute\"),\n\n 3 => String::from(\"Missing Well-known Attribute\"),\n", "file_path": "src/message/notification.rs", "rank": 39, "score": 64567.14363455059 }, { "content": " }\n\n 5 => {\n\n String::from(\"Finite State Machine Error\")\n\n + (if self.error_subcode != 0 {\n\n String::from(\" subcode \") + self.error_subcode.to_string().as_str()\n\n } else {\n\n String::from(\"(0)\")\n\n })\n\n .as_str()\n\n }\n\n 6 => {\n\n String::from(\"Cease\")\n\n + (if self.error_subcode != 0 {\n\n String::from(\" subcode \") + self.error_subcode.to_string().as_str()\n\n } else {\n\n String::from(\"(0)\")\n\n })\n\n .as_str()\n\n }\n\n n => {\n", "file_path": "src/message/notification.rs", "rank": 40, "score": 64557.58447280306 }, { "content": " 4 => String::from(\"Attribute Flags Error\"),\n\n 5 => String::from(\"Attribute Length Error\"),\n\n 6 => String::from(\"Invalid ORIGIN Attribute\"),\n\n 7 => String::from(\"Deprecated(7)\"),\n\n 8 => String::from(\"Invalid NEXT_HOP Attribute\"),\n\n 9 => String::from(\"Optional Attribute Error\"),\n\n 10 => String::from(\"Invalid Network Field\"),\n\n 11 => String::from(\"Malformed AS_PATH\"),\n\n n => String::from(\" subcode \") + n.to_string().as_str(),\n\n })\n\n .as_str()\n\n }\n\n 4 => {\n\n String::from(\"Hold Timer Expired\")\n\n + (if self.error_subcode != 0 {\n\n String::from(\" subcode \") + self.error_subcode.to_string().as_str()\n\n } else {\n\n String::from(\"(0)\")\n\n })\n\n .as_str()\n", "file_path": "src/message/notification.rs", "rank": 41, "score": 64554.055852964666 }, { "content": " if peer.check_addpath_receive(2, 1) {\n\n let r = decode_pathid_bgpitems_from(&buf[curpos..])?;\n\n self.updates = BgpAddrs::IPV6UP(r.0);\n\n } else {\n\n let r = decode_bgpitems_from(&buf[curpos..])?;\n\n self.updates = BgpAddrs::IPV6U(r.0);\n\n }\n\n }\n\n };\n\n //println!(\"Update: {:?}\", self);\n\n Ok(())\n\n }\n\n fn encode_to(&self, peer: &BgpSessionParams, buf: &mut [u8]) -> Result<usize, BgpError> {\n\n let mut curpos: usize = 0;\n\n //withdraws main\n\n match peer.peer_mode {\n\n BgpTransportMode::IPv4 => match self.withdraws {\n\n BgpAddrs::IPV4U(ref wdrw) => {\n\n let wlen = encode_bgpitems_to(&wdrw, &mut buf[curpos + 2..])?;\n\n if wlen > 65535 {\n", "file_path": "src/message/update/mod.rs", "rank": 42, "score": 61084.286590200056 }, { "content": " BgpAttrItem::MPWithdraws(n) => {\n\n return Some(&n);\n\n }\n\n _ => {}\n\n }\n\n }\n\n None\n\n }\n\n}\n\nimpl BgpMessage for BgpUpdateMessage {\n\n fn decode_from(&mut self, peer: &BgpSessionParams, buf: &[u8]) -> Result<(), BgpError> {\n\n let mut curpos: usize = 0;\n\n let withdraws_length = getn_u16(&buf[curpos..(curpos + 2)]) as usize;\n\n curpos += 2;\n\n let withdraws_end = curpos + withdraws_length;\n\n match peer.peer_mode {\n\n BgpTransportMode::IPv4 => {\n\n if peer.check_addpath_receive(1, 1) {\n\n let r = decode_pathid_bgpitems_from(&buf[curpos..withdraws_end])?;\n\n self.withdraws = BgpAddrs::IPV4UP(r.0);\n", "file_path": "src/message/update/mod.rs", "rank": 43, "score": 61082.0672200766 }, { "content": " /// path attributes\n\n pub attrs: Vec<BgpAttrItem>,\n\n}\n\nimpl BgpUpdateMessage {\n\n /// counstructs new empty update message.\n\n pub fn new() -> BgpUpdateMessage {\n\n BgpUpdateMessage {\n\n updates: BgpAddrs::None,\n\n withdraws: BgpAddrs::None,\n\n attrs: Vec::new(),\n\n }\n\n }\n\n /// returns origin attribute.\n\n pub fn get_attr_origin(&self) -> Option<&BgpOrigin> {\n\n for i in self.attrs.iter() {\n\n match i {\n\n BgpAttrItem::Origin(n) => {\n\n return Some(&n);\n\n }\n\n _ => {}\n", "file_path": "src/message/update/mod.rs", "rank": 44, "score": 61068.603591984 }, { "content": "// Copyright 2021 Vladimir Melnikov.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\n//! This module contains BGP update message - most important one, which carries roting information.\n\n\n\nuse crate::prelude::*;\n\nuse crate::*;\n\n\n\n/// BGP update message, which carries routing information.\n\n#[derive(Debug)]\n\npub struct BgpUpdateMessage {\n\n /// NLRI updates\n\n pub updates: BgpAddrs,\n\n /// NLRI withdraws\n\n pub withdraws: BgpAddrs,\n", "file_path": "src/message/update/mod.rs", "rank": 45, "score": 61068.44478813406 }, { "content": " if pathattr_end > buf.len() {\n\n return Err(BgpError::protocol_error());\n\n }\n\n while curpos < pathattr_end {\n\n //flags 0\n\n //tc 1\n\n let flags = buf[curpos];\n\n let tc = buf[curpos + 1];\n\n let attrlen = if (flags & 16) > 0 {\n\n curpos += 4;\n\n getn_u16(&buf[(curpos - 2)..curpos]) as usize\n\n } else {\n\n curpos += 3;\n\n buf[curpos - 1] as usize\n\n };\n\n if (curpos + attrlen) > pathattr_end {\n\n return Err(BgpError::protocol_error());\n\n }\n\n //println!(\"PA flags {:?} TC {:?} len {:?}\", flags, tc, attrlen);\n\n //https://www.iana.org/assignments/bgp-parameters/bgp-parameters.xhtml\n", "file_path": "src/message/update/mod.rs", "rank": 46, "score": 61067.33531460192 }, { "content": " return Err(BgpError::too_many_data());\n\n }\n\n setn_u16(wlen as u16, &mut buf[curpos..]);\n\n curpos += 2 + wlen;\n\n }\n\n BgpAddrs::IPV4UP(ref wdrw) => {\n\n let wlen = encode_pathid_bgpitems_to(&wdrw, &mut buf[curpos + 2..])?;\n\n if wlen > 65535 {\n\n return Err(BgpError::too_many_data());\n\n }\n\n setn_u16(wlen as u16, &mut buf[curpos..]);\n\n curpos += 2 + wlen;\n\n }\n\n _ => {\n\n setn_u16(0, buf);\n\n curpos = 2;\n\n }\n\n },\n\n BgpTransportMode::IPv6 => match self.withdraws {\n\n BgpAddrs::IPV6U(ref wdrw) => {\n", "file_path": "src/message/update/mod.rs", "rank": 47, "score": 61067.02918733685 }, { "content": " };\n\n let pathattrlen_pos = curpos;\n\n curpos += 2;\n\n for paitem in self.attrs.iter() {\n\n if (curpos - pathattrlen_pos) > 65535 {\n\n return Err(BgpError::static_str(\"Invalid path attribute length\"));\n\n }\n\n curpos += paitem.encode_to(peer, &mut buf[curpos..])?\n\n }\n\n setn_u16(\n\n (curpos - pathattrlen_pos - 2) as u16,\n\n &mut buf[pathattrlen_pos..(pathattrlen_pos + 2)],\n\n );\n\n match peer.peer_mode {\n\n BgpTransportMode::IPv4 => match self.updates {\n\n BgpAddrs::IPV4U(ref upds) => {\n\n curpos += encode_bgpitems_to(&upds, &mut buf[curpos..])?;\n\n }\n\n BgpAddrs::IPV4UP(ref upds) => {\n\n curpos += encode_pathid_bgpitems_to(&upds, &mut buf[curpos..])?;\n", "file_path": "src/message/update/mod.rs", "rank": 48, "score": 61066.974053901875 }, { "content": " }\n\n _ => {}\n\n },\n\n BgpTransportMode::IPv6 => match self.updates {\n\n BgpAddrs::IPV6U(ref upds) => {\n\n curpos += encode_bgpitems_to(&upds, &mut buf[curpos..])?;\n\n }\n\n BgpAddrs::IPV6UP(ref upds) => {\n\n curpos += encode_pathid_bgpitems_to(&upds, &mut buf[curpos..])?;\n\n }\n\n _ => {}\n\n },\n\n };\n\n Ok(curpos)\n\n }\n\n}\n", "file_path": "src/message/update/mod.rs", "rank": 49, "score": 61066.63995011426 }, { "content": " let wlen = encode_bgpitems_to(&wdrw, &mut buf[curpos + 2..])?;\n\n if wlen > 65535 {\n\n return Err(BgpError::too_many_data());\n\n }\n\n setn_u16(wlen as u16, &mut buf[curpos..]);\n\n curpos += 2 + wlen;\n\n }\n\n BgpAddrs::IPV6UP(ref wdrw) => {\n\n let wlen = encode_pathid_bgpitems_to(&wdrw, &mut buf[curpos + 2..])?;\n\n if wlen > 65535 {\n\n return Err(BgpError::too_many_data());\n\n }\n\n setn_u16(wlen as u16, &mut buf[curpos..]);\n\n curpos += 2 + wlen;\n\n }\n\n _ => {\n\n setn_u16(0, buf);\n\n curpos = 2;\n\n }\n\n },\n", "file_path": "src/message/update/mod.rs", "rank": 50, "score": 61065.09021954807 }, { "content": " self.attrs.push(BgpAttrItem::decode_from(\n\n peer,\n\n tc,\n\n flags,\n\n attrlen,\n\n &buf[curpos..(curpos + attrlen)],\n\n )?);\n\n curpos += attrlen;\n\n }\n\n match peer.peer_mode {\n\n BgpTransportMode::IPv4 => {\n\n if peer.check_addpath_receive(1, 1) {\n\n let r = decode_pathid_bgpitems_from(&buf[curpos..])?;\n\n self.updates = BgpAddrs::IPV4UP(r.0);\n\n } else {\n\n let r = decode_bgpitems_from(&buf[curpos..])?;\n\n self.updates = BgpAddrs::IPV4U(r.0);\n\n }\n\n }\n\n BgpTransportMode::IPv6 => {\n", "file_path": "src/message/update/mod.rs", "rank": 51, "score": 61064.66975315679 }, { "content": " }\n\n }\n\n None\n\n }\n\n /// returns MPUpdates\n\n pub fn get_mpupdates(&self) -> Option<&BgpMPUpdates> {\n\n for i in self.attrs.iter() {\n\n match i {\n\n BgpAttrItem::MPUpdates(n) => {\n\n return Some(&n);\n\n }\n\n _ => {}\n\n }\n\n }\n\n None\n\n }\n\n /// returns MPWithdraws\n\n pub fn get_mpwithdraws(&self) -> Option<&BgpMPWithdraws> {\n\n for i in self.attrs.iter() {\n\n match i {\n", "file_path": "src/message/update/mod.rs", "rank": 52, "score": 61062.94501246272 }, { "content": " } else {\n\n let r = decode_bgpitems_from(&buf[curpos..withdraws_end])?;\n\n self.withdraws = BgpAddrs::IPV4U(r.0);\n\n }\n\n }\n\n BgpTransportMode::IPv6 => {\n\n if peer.check_addpath_receive(2, 1) {\n\n let r = decode_pathid_bgpitems_from(&buf[curpos..withdraws_end])?;\n\n self.withdraws = BgpAddrs::IPV6UP(r.0);\n\n } else {\n\n let r = decode_bgpitems_from(&buf[curpos..withdraws_end])?;\n\n self.withdraws = BgpAddrs::IPV6U(r.0);\n\n }\n\n }\n\n };\n\n curpos = withdraws_end;\n\n let pathattr_len = getn_u16(&buf[curpos..(curpos + 2)]) as usize;\n\n curpos += 2;\n\n //println!(\"Path attributes length: {:?}\", pathattr_len);\n\n let pathattr_end = curpos + pathattr_len;\n", "file_path": "src/message/update/mod.rs", "rank": 53, "score": 61062.69221249751 }, { "content": " }\n\n }\n\n None\n\n }\n\n /// returns aspath attribute.\n\n pub fn get_attr_aspath(&self) -> Option<&BgpASpath> {\n\n for i in self.attrs.iter() {\n\n match i {\n\n BgpAttrItem::ASPath(n) => {\n\n return Some(&n);\n\n }\n\n _ => {}\n\n }\n\n }\n\n None\n\n }\n\n /// returns community list attribute.\n\n pub fn get_attr_communitylist(&self) -> Option<&BgpCommunityList> {\n\n for i in self.attrs.iter() {\n\n match i {\n", "file_path": "src/message/update/mod.rs", "rank": 54, "score": 61058.3507315718 }, { "content": " /// returns extended community list attribute.\n\n pub fn get_attr_extcommunitylist(&self) -> Option<&BgpExtCommunityList> {\n\n for i in self.attrs.iter() {\n\n match i {\n\n BgpAttrItem::ExtCommunityList(n) => {\n\n return Some(&n);\n\n }\n\n _ => {}\n\n }\n\n }\n\n None\n\n }\n\n /// returns next hop attribute.\n\n pub fn get_attr_nexthop(&self) -> Option<&BgpNextHop> {\n\n for i in self.attrs.iter() {\n\n match i {\n\n BgpAttrItem::NextHop(n) => {\n\n return Some(&n);\n\n }\n\n _ => {}\n", "file_path": "src/message/update/mod.rs", "rank": 55, "score": 61057.92435622467 }, { "content": " BgpAttrItem::CommunityList(n) => {\n\n return Some(&n);\n\n }\n\n _ => {}\n\n }\n\n }\n\n None\n\n }\n\n /// returns large community list attribute.\n\n pub fn get_attr_largecommunitylist(&self) -> Option<&BgpLargeCommunityList> {\n\n for i in self.attrs.iter() {\n\n match i {\n\n BgpAttrItem::LargeCommunityList(n) => {\n\n return Some(&n);\n\n }\n\n _ => {}\n\n }\n\n }\n\n None\n\n }\n", "file_path": "src/message/update/mod.rs", "rank": 56, "score": 61056.73012679876 }, { "content": "/// NLRI with bits length\n\npub trait BgpItem<T: std::marker::Sized> {\n\n fn extract_bits_from(bits: u8, buf: &[u8]) -> Result<(T, usize), BgpError>;\n\n fn set_bits_to(&self, buf: &mut [u8]) -> Result<(u8, usize), BgpError>;\n\n fn prefixlen(&self) -> usize;\n\n}\n", "file_path": "src/afi/mod.rs", "rank": 57, "score": 57668.575105172014 }, { "content": "/// This trait represens NLRI which have sequental chain encoding with opaque length.\n\npub trait BgpAddrItem<T: std::marker::Sized> {\n\n /// Decode from buffer. Returns entity and consumed buffer length, or error.\n\n fn decode_from(mode: BgpTransportMode, buf: &[u8]) -> Result<(T, usize), BgpError>;\n\n /// Encode entity into the buffer. Returns consumed buffer length, or error.\n\n fn encode_to(&self, mode: BgpTransportMode, buf: &mut [u8]) -> Result<usize, BgpError>;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 58, "score": 57668.575105172014 }, { "content": "/// NLRI with 2-byte length on each item\n\npub trait BgpItemLong<T: std::marker::Sized> {\n\n fn extract_from(size: usize, buf: &[u8]) -> Result<T, BgpError>;\n\n fn pack_to(&self, _buf: &mut [u8]) -> Result<usize, BgpError> {\n\n unimplemented!()\n\n }\n\n}\n\n\n\n#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Debug)]\n\npub enum BgpAddr {\n\n None,\n\n V4(std::net::Ipv4Addr),\n\n V6(std::net::Ipv6Addr),\n\n V4RD(BgpIPv4RD),\n\n V6RD(BgpIPv6RD),\n\n L2(BgpL2),\n\n MVPN(BgpMVPN),\n\n}\n\n\n\n/// Any kind of prefix - v4 or v6\n\n#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Debug)]\n", "file_path": "src/afi/mod.rs", "rank": 59, "score": 55827.52701578094 }, { "content": " pub fn insufficient_buffer_size() -> BgpError {\n\n BgpError::Static(\"Insufficient buffer size\")\n\n }\n\n /// Just says that we have common protocol error.\n\n pub fn protocol_error() -> BgpError {\n\n BgpError::Static(\"Protocol error\")\n\n }\n\n /// Just says that data size is too big to be encoded.\n\n pub fn too_many_data() -> BgpError {\n\n BgpError::Static(\"Too many data\")\n\n }\n\n}\n\nimpl std::fmt::Display for BgpError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n match self {\n\n BgpError::Static(s) => write!(f, \"BgpError {}\", s),\n\n BgpError::DynStr(s) => write!(f, \"BgpError {}\", s),\n\n BgpError::Other(e) => write!(f, \"BgpError {}\",e)\n\n }\n\n }\n", "file_path": "src/error.rs", "rank": 69, "score": 35357.57649768384 }, { "content": " Other(Box<dyn std::error::Error>)\n\n}\n\n\n\nimpl BgpError {\n\n /// Wraps static string error message.\n\n #[inline]\n\n pub fn static_str(ms: &'static str) -> BgpError {\n\n BgpError::Static(ms)\n\n }\n\n /// Wraps std String error message.\n\n #[inline]\n\n pub fn from_string(s: std::string::String) -> BgpError {\n\n BgpError::DynStr(s)\n\n }\n\n /// Wraps any error implements std::error::Error. In Box.\n\n #[inline]\n\n pub fn from_error(e: Box<dyn std::error::Error>) -> BgpError {\n\n BgpError::Other(e)\n\n }\n\n /// Just says that buffer size is too small.\n", "file_path": "src/error.rs", "rank": 70, "score": 35351.44740638405 }, { "content": "}\n\nimpl std::error::Error for BgpError {}\n\n\n\nimpl From<std::io::Error> for BgpError {\n\n #[inline]\n\n fn from(error: std::io::Error) -> Self {\n\n BgpError::Other(Box::new(error))\n\n }\n\n}\n\n\n\nimpl From<std::net::AddrParseError> for BgpError {\n\n #[inline]\n\n fn from(error: std::net::AddrParseError) -> Self {\n\n BgpError::Other(Box::new(error))\n\n }\n\n}\n\n\n", "file_path": "src/error.rs", "rank": 71, "score": 35350.392988333704 }, { "content": "// Copyright 2021 Vladimir Melnikov.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\n//! This module contains error struct\n\n\n\n/// This is represents standard library error.\n\n///\n\n/// # Generic usage\n\n///\n\n/// All library methods that can cause errors returns Result<...,BgpError>.\n\n///\n\n#[derive(Debug)]\n\npub enum BgpError {\n\n Static(&'static str),\n\n DynStr(std::string::String),\n", "file_path": "src/error.rs", "rank": 72, "score": 35345.1760873326 }, { "content": "/// FlowSpec NLRI item trait\n\npub trait FSItem<T: std::marker::Sized> {\n\n fn decode_from_fs(buf: &[u8]) -> Result<(T, usize), BgpError>;\n\n fn encode_to_fs(&self, buf: &mut [u8]) -> Result<(), BgpError>;\n\n fn prefixlen(&self) -> usize;\n\n fn get_store_size(&self) -> usize;\n\n}\n\nimpl FSItem<BgpAddrV4> for BgpAddrV4 {\n\n fn decode_from_fs(buf: &[u8]) -> Result<(BgpAddrV4, usize), BgpError> {\n\n let r = BgpAddrV4::from_bits(buf[0], &buf[1..])?;\n\n Ok((r.0, r.1 + 1))\n\n }\n\n fn encode_to_fs(&self, buf: &mut [u8]) -> Result<(), BgpError> {\n\n buf[0] = self.prefixlen;\n\n let _r = self.to_bits(&mut buf[1..])?;\n\n Ok(())\n\n }\n\n fn prefixlen(&self) -> usize {\n\n self.prefixlen as usize\n\n }\n\n fn get_store_size(&self) -> usize {\n", "file_path": "src/afi/flowspec.rs", "rank": 73, "score": 33778.31482955388 }, { "content": "}\n\n\n\nimpl BgpMessageType {\n\n /// decodes BGP message type from byte code\n\n pub fn decode_from(code: u8) -> Result<BgpMessageType, BgpError> {\n\n match code {\n\n 1 => Ok(BgpMessageType::Open),\n\n 2 => Ok(BgpMessageType::Update),\n\n 3 => Ok(BgpMessageType::Notification),\n\n 4 => Ok(BgpMessageType::Keepalive),\n\n _ => Err(BgpError::static_str(\"Invalid message type\")),\n\n }\n\n }\n\n /// encodes BGP message type into the byte code\n\n pub fn encode(&self) -> u8 {\n\n match self {\n\n BgpMessageType::Open => 1,\n\n BgpMessageType::Update => 2,\n\n BgpMessageType::Notification => 3,\n\n BgpMessageType::Keepalive => 4,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/message/mod.rs", "rank": 74, "score": 31148.963722960845 }, { "content": " Ok(())\n\n }\n\n fn encode_to(\n\n &self,\n\n _peer: &BgpSessionParams,\n\n _buf: &mut [u8],\n\n ) -> Result<usize, BgpError> {\n\n Ok(0)\n\n }\n\n}\n", "file_path": "src/message/keepalive.rs", "rank": 75, "score": 31138.213604426575 }, { "content": "// Copyright 2021 Vladimir Melnikov.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse crate::*;\n\n\n\n/// BGP keepalive message\n\n#[derive(Debug)]\n\npub struct BgpKeepaliveMessage {}\n\n\n\nimpl BgpMessage for BgpKeepaliveMessage {\n\n fn decode_from(\n\n &mut self,\n\n _peer: &BgpSessionParams,\n\n _buf: &[u8],\n\n ) -> Result<(), BgpError> {\n", "file_path": "src/message/keepalive.rs", "rank": 76, "score": 31137.950449421558 }, { "content": "// Copyright 2021 Vladimir Melnikov.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\n//! This module contains BGP messages\n\n\n\nuse crate::*;\n\nuse crate::error::*;\n\n\n\npub mod open;\n\npub mod update;\n\npub mod notification;\n\npub mod keepalive;\n\npub mod attributes;\n\n\n\n/// trait BgpMessage represents BGP protocol message\n", "file_path": "src/message/mod.rs", "rank": 77, "score": 31132.699690996258 }, { "content": "pub trait FSOperItem: Clone + PartialEq + Eq + PartialOrd + Ord {\n\n fn getbyteslen(&self) -> usize;\n\n fn encode_to(&self, buf: &mut [u8]) -> Result<usize, BgpError>;\n\n fn decode_from(buf: &[u8]) -> Result<(Self, usize), BgpError>;\n\n}\n\n#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct FSOperValItem {\n\n pub and_bit: bool,\n\n pub lt_cmp: bool,\n\n pub gt_cmp: bool,\n\n pub eq_cmp: bool,\n\n pub value: u32,\n\n}\n\nimpl FSOperValItem {\n\n pub fn new(v: u32, b_and: bool, b_lt: bool, b_gt: bool, b_eq: bool) -> FSOperValItem {\n\n FSOperValItem {\n\n and_bit: b_and,\n\n lt_cmp: b_lt,\n\n gt_cmp: b_gt,\n\n eq_cmp: b_eq,\n", "file_path": "src/afi/flowspec.rs", "rank": 78, "score": 29972.88903247381 }, { "content": " }\n\n }\n\n pub fn decode_from(buf: &[u8]) -> Result<BgpExtCommunity, BgpError> {\n\n match buf.len() {\n\n 8 => Ok(BgpExtCommunity {\n\n ctype: buf[0],\n\n subtype: buf[1],\n\n a: getn_u16(&buf[2..4]),\n\n b: getn_u32(&buf[4..8]),\n\n }),\n\n _ => Err(BgpError::static_str(\"Invalid BgpExtCommunity item length\")),\n\n }\n\n }\n\n pub fn encode_to(&self, buf: &mut [u8]) -> Result<usize, BgpError> {\n\n if buf.len() < 8 {\n\n return Err(BgpError::insufficient_buffer_size());\n\n }\n\n buf[0] = self.ctype;\n\n buf[1] = self.subtype;\n\n setn_u16(self.a, &mut buf[2..4]);\n", "file_path": "src/message/attributes/extcommunity.rs", "rank": 79, "score": 29497.790291589197 }, { "content": " pub fn decode_from(buf: &[u8]) -> Result<BgpLargeCommunity, BgpError> {\n\n match buf.len() {\n\n 12 => Ok(BgpLargeCommunity {\n\n ga: getn_u32(&buf[0..4]),\n\n ldp1: getn_u32(&buf[4..8]),\n\n ldp2: getn_u32(&buf[8..12]),\n\n }),\n\n _ => Err(BgpError::static_str(\n\n \"Invalid BgpLargeCommunity item length\",\n\n )),\n\n }\n\n }\n\n pub fn encode_to(&self,buf: &mut [u8]) -> Result<usize, BgpError> {\n\n if buf.len()<12 {\n\n return Err(BgpError::insufficient_buffer_size());\n\n }\n\n setn_u32(self.ga,buf);\n\n setn_u32(self.ldp1,&mut buf[4..8]);\n\n setn_u32(self.ldp2,&mut buf[8..12]);\n\n Ok(12)\n", "file_path": "src/message/attributes/community.rs", "rank": 80, "score": 29497.60031864597 }, { "content": " write!(f, \"BgpMED {:?}\", self.value)\n\n }\n\n}\n\nimpl BgpAttr for BgpMED {\n\n fn attr(&self) -> BgpAttrParams {\n\n BgpAttrParams {\n\n typecode: 4,\n\n flags: 128,\n\n }\n\n }\n\n fn encode_to(&self, _peer: &BgpSessionParams, buf: &mut [u8]) -> Result<usize, BgpError> {\n\n if buf.len() >= 4 {\n\n setn_u32(self.value, buf);\n\n Ok(4)\n\n } else {\n\n Err(BgpError::static_str(\"Invalid MED length\"))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/message/attributes/med.rs", "rank": 81, "score": 29496.721251329203 }, { "content": " return Err(BgpError::static_str(\n\n \"Invalid path attribute length\",\n\n ));\n\n }\n\n setn_u16(attrlen as u16, &mut buf[2..4]);\n\n } else {\n\n if attrlen > 255 {\n\n return Err(BgpError::static_str(\n\n \"Invalid path attribute length\",\n\n ));\n\n }\n\n buf[2] = attrlen as u8;\n\n }\n\n Ok(curpos + attrlen)\n\n }\n\n pub fn encode_to(\n\n &self,\n\n peer: &BgpSessionParams,\n\n buf: &mut [u8],\n\n ) -> Result<usize, BgpError> {\n", "file_path": "src/message/attributes/mod.rs", "rank": 82, "score": 29495.767358394187 }, { "content": " pub fn decode_from(peer: &BgpSessionParams, buf: &[u8]) -> Result<BgpClusterList, BgpError> {\n\n let mut pos: usize = 0;\n\n let mut v = Vec::new();\n\n let itemsize = match peer.peer_mode {\n\n BgpTransportMode::IPv4 => 4,\n\n BgpTransportMode::IPv6 => 16,\n\n };\n\n while (pos + itemsize) <= buf.len() {\n\n v.push(decode_addr_from(&buf[pos..(pos + itemsize)])?);\n\n pos += itemsize;\n\n }\n\n Ok(BgpClusterList { value: v })\n\n }\n\n}\n\nimpl std::fmt::Debug for BgpClusterList {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_struct(\"BgpClusterList\")\n\n .field(\"value\", &self.value)\n\n .finish()\n\n }\n", "file_path": "src/message/attributes/clusterlist.rs", "rank": 83, "score": 29495.44709086909 }, { "content": " BgpOriginatorID { value: o }\n\n }\n\n pub fn decode_from(peer: &BgpSessionParams, buf: &[u8]) -> Result<BgpOriginatorID, BgpError> {\n\n match peer.peer_mode {\n\n BgpTransportMode::IPv4 => Ok(BgpOriginatorID {\n\n value: decode_addr_from(&buf[..4])?,\n\n }),\n\n BgpTransportMode::IPv6 => Ok(BgpOriginatorID {\n\n value: if buf.len() < 16 {\n\n decode_addr_from(&buf[..4])?\n\n } else {\n\n decode_addr_from(&buf[..16])?\n\n },\n\n }),\n\n }\n\n }\n\n}\n\nimpl std::fmt::Debug for BgpOriginatorID {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_struct(\"BgpOriginatorID\")\n", "file_path": "src/message/attributes/originatorid.rs", "rank": 84, "score": 29495.30780862841 }, { "content": "impl BgpAttr for BgpAttrSet {\n\n fn attr(&self) -> BgpAttrParams {\n\n BgpAttrParams {\n\n typecode: 128,\n\n flags: 224,\n\n }\n\n }\n\n fn encode_to(\n\n &self,\n\n _peer: &BgpSessionParams,\n\n _buf: &mut [u8],\n\n ) -> Result<usize, BgpError> {\n\n unimplemented!()\n\n }\n\n}\n\n#[cfg(feature = \"serialization\")]\n\nimpl serde::Serialize for BgpAttrSet {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: serde::Serializer,\n\n {\n\n let mut state = serializer.serialize_struct(\"BgpAttrSet\", 2)?;\n\n state.serialize_field(\"asn\", &self.asn)?;\n\n state.serialize_field(\"attrs\", &self.attrs)?;\n\n state.end()\n\n }\n\n}\n", "file_path": "src/message/attributes/attrset.rs", "rank": 85, "score": 29495.233974720573 }, { "content": " fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"ASPath {:?}\", self.value)\n\n }\n\n}\n\nimpl BgpAttr for BgpASpath {\n\n fn attr(&self) -> BgpAttrParams {\n\n BgpAttrParams {\n\n typecode: 2,\n\n flags: 0x50,\n\n }\n\n }\n\n fn encode_to(\n\n &self,\n\n peer: &BgpSessionParams,\n\n buf: &mut [u8],\n\n ) -> Result<usize, BgpError> {\n\n let mut pos: usize;\n\n if self.value.len()<1 {\n\n\t return Ok(0);\n\n\t}\n", "file_path": "src/message/attributes/aspath.rs", "rank": 86, "score": 29495.232595180463 }, { "content": " Ok(pos)\n\n }\n\n}\n\nimpl BgpCommunity {\n\n pub fn new(v: u32) -> BgpCommunity {\n\n BgpCommunity {\n\n value: v\n\n }\n\n }\n\n pub fn from(h: u16, l: u16) -> BgpCommunity {\n\n BgpCommunity {\n\n value: ((h as u32) << 16) | (l as u32),\n\n }\n\n }\n\n pub fn decode_from(buf: &[u8]) -> Result<BgpCommunity, BgpError> {\n\n match buf.len() {\n\n 4 => Ok(BgpCommunity {\n\n value: getn_u32(&buf),\n\n }),\n\n _ => Err(BgpError::static_str(\n", "file_path": "src/message/attributes/community.rs", "rank": 87, "score": 29494.548908488698 }, { "content": " }\n\n pub fn decode_from(buf: &[u8]) -> Result<BgpMED, BgpError> {\n\n if buf.len() >= 4 {\n\n Ok(BgpMED {\n\n value: getn_u32(&buf),\n\n })\n\n } else {\n\n Err(BgpError::static_str(\"Invalid MED length\"))\n\n }\n\n }\n\n}\n\nimpl std::fmt::Debug for BgpMED {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_struct(\"BgpMED\")\n\n .field(\"value\", &self.value)\n\n .finish()\n\n }\n\n}\n\nimpl std::fmt::Display for BgpMED {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n", "file_path": "src/message/attributes/med.rs", "rank": 88, "score": 29494.509171480888 }, { "content": "}\n\nimpl std::fmt::Display for BgpClusterList {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"BgpClusterList {:?}\", self.value)\n\n }\n\n}\n\nimpl BgpAttr for BgpClusterList {\n\n fn attr(&self) -> BgpAttrParams {\n\n BgpAttrParams {\n\n typecode: 10,\n\n flags: 80,\n\n }\n\n }\n\n fn encode_to(&self, _peer: &BgpSessionParams, buf: &mut [u8]) -> Result<usize, BgpError> {\n\n let mut pos: usize = 0;\n\n for i in &self.value {\n\n pos += encode_addr_to(i, &mut buf[pos..])?;\n\n }\n\n Ok(pos)\n\n }\n", "file_path": "src/message/attributes/clusterlist.rs", "rank": 89, "score": 29494.452139215464 }, { "content": " pub fn new(v: u32) -> BgpLocalpref {\n\n BgpLocalpref {\n\n value: v\n\n }\n\n }\n\n pub fn decode_from(buf: &[u8]) -> Result<BgpLocalpref, BgpError> {\n\n if buf.len() >= 4 {\n\n Ok(BgpLocalpref {\n\n value: getn_u32(&buf),\n\n })\n\n } else {\n\n Err(BgpError::static_str(\"Invalid localpref length\"))\n\n }\n\n }\n\n}\n\nimpl std::fmt::Debug for BgpLocalpref {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_struct(\"BgpLocalpref\")\n\n .field(\"value\", &self.value)\n\n .finish()\n", "file_path": "src/message/attributes/localpref.rs", "rank": 90, "score": 29494.386686919283 }, { "content": " addrs: BgpAddrs::VPNV6M(nlri),\n\n }\n\n }\n\n pub fn decode_from(peer: &BgpSessionParams, buf: &[u8]) -> Result<BgpMPUpdates, BgpError> {\n\n let afi = getn_u16(&buf);\n\n let safi = buf[2];\n\n let mut curpos: usize = 4;\n\n let nh: BgpAddr;\n\n let nhlen = buf[3] as usize;\n\n match afi {\n\n 1 => {\n\n //ipv4\n\n match safi {\n\n 1 | 2 | 4 | 5 | 133 => {\n\n //unicast|multicast|labeled unicast|mvpn|flow\n\n nh = BgpAddr::V4(decode_addrv4_from(&buf[curpos..(curpos + nhlen)])?);\n\n curpos += nhlen;\n\n }\n\n 128 | 129 | 134 => {\n\n //vpnv4u|vpnv4m|flow\n", "file_path": "src/message/attributes/multiproto.rs", "rank": 91, "score": 29494.36731670163 }, { "content": " BgpAttrParams {\n\n typecode: 8,\n\n flags: 192,\n\n }\n\n }\n\n fn encode_to(\n\n &self,\n\n _peer: &BgpSessionParams,\n\n buf: &mut [u8],\n\n ) -> Result<usize, BgpError> {\n\n if buf.len()<self.value.len()*4 {\n\n return Err(BgpError::insufficient_buffer_size());\n\n }\n\n let mut curpos: usize = 0;\n\n for c in &self.value {\n\n let lng=c.encode_to(&mut buf[curpos..])?;\n\n curpos+=lng;\n\n }\n\n Ok(curpos)\n\n }\n", "file_path": "src/message/attributes/community.rs", "rank": 92, "score": 29494.207288979283 }, { "content": " BgpAttrParams {\n\n typecode: 22,\n\n flags: 192,\n\n }\n\n }\n\n fn encode_to(&self, _peer: &BgpSessionParams, _buf: &mut [u8]) -> Result<usize, BgpError> {\n\n unimplemented!();\n\n }\n\n}\n\n\n\n#[cfg(feature = \"serialization\")]\n\nimpl serde::Serialize for BgpPMSITunnel {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: serde::Serializer,\n\n {\n\n serializer.serialize_str(format!(\"{}\", self).as_str())\n\n /*\n\n let mut state = serializer.serialize_struct(\"BgpPMSITunnel\", 2)?;\n\n state.serialize_field(\"flags\", &self.flags)?;\n\n state.serialize_field(\"tunnel_type\", &self.tunnel_type)?;\n\n state.end()\n\n */\n\n }\n\n}\n", "file_path": "src/message/attributes/pmsitunnelattr.rs", "rank": 93, "score": 29493.992491431643 }, { "content": "}\n\nimpl std::fmt::Display for BgpAggregatorAS {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"BgpAggregatorAS {:?} {:?}\", self.asn, self.addr)\n\n }\n\n}\n\nimpl BgpAttr for BgpAggregatorAS {\n\n fn attr(&self) -> BgpAttrParams {\n\n BgpAttrParams {\n\n typecode: 7,\n\n flags: 64,\n\n }\n\n }\n\n fn encode_to(\n\n &self,\n\n peer: &BgpSessionParams,\n\n _buf: &mut [u8],\n\n ) -> Result<usize, BgpError> {\n\n Ok(if peer.has_as32bit { 4 } else { 2 })\n\n }\n", "file_path": "src/message/attributes/aggregatoras.rs", "rank": 94, "score": 29493.944223951203 }, { "content": " /// Aggregation router ID\n\n pub addr: std::net::Ipv4Addr,\n\n}\n\nimpl BgpAggregatorAS {\n\n pub fn decode_from(\n\n peer: &BgpSessionParams,\n\n buf: &[u8],\n\n ) -> Result<BgpAggregatorAS, BgpError> {\n\n if peer.has_as32bit {\n\n if buf.len() == 8 {\n\n Ok(BgpAggregatorAS {\n\n asn: getn_u32(&buf),\n\n addr: decode_addrv4_from(&buf[4..8])?,\n\n })\n\n } else {\n\n Err(BgpError::static_str(\n\n \"Invalid AggregatorAS 32-bit length\",\n\n ))\n\n }\n\n } else {\n", "file_path": "src/message/attributes/aggregatoras.rs", "rank": 95, "score": 29493.792532375486 }, { "content": " \"Invalid BgpCommunity item length\",\n\n )),\n\n }\n\n }\n\n fn encode_to(\n\n &self,\n\n buf: &mut [u8],\n\n ) -> Result<usize, BgpError> {\n\n if buf.len()<4 {\n\n return Err(BgpError::insufficient_buffer_size())\n\n };\n\n setn_u32(self.value,buf);\n\n Ok(4)\n\n }\n\n}\n\nimpl std::fmt::Debug for BgpCommunity {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_struct(\"Community\")\n\n .field(\"value\", &self.value)\n\n .finish()\n", "file_path": "src/message/attributes/community.rs", "rank": 96, "score": 29493.58317305459 }, { "content": "impl BgpCommunityList {\n\n pub fn decode_from(buf: &[u8]) -> Result<BgpCommunityList, BgpError> {\n\n let mut pos: usize = 0;\n\n let mut v = std::collections::BTreeSet::new();\n\n while pos < buf.len() {\n\n v.insert(BgpCommunity::decode_from(&buf[pos..(pos + 4)])?);\n\n pos += 4;\n\n }\n\n Ok(BgpCommunityList { value: v })\n\n }\n\n}\n\nimpl std::fmt::Debug for BgpCommunityList {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_struct(\"BgpCommunityList\")\n\n .field(\"value\", &self.value)\n\n .finish()\n\n }\n\n}\n\nimpl std::fmt::Display for BgpCommunityList {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n", "file_path": "src/message/attributes/community.rs", "rank": 97, "score": 29493.07576759923 }, { "content": " }\n\n}\n\nimpl std::fmt::Display for BgpLocalpref {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"BgpLocalpref {:?}\", self.value)\n\n }\n\n}\n\nimpl BgpAttr for BgpLocalpref {\n\n fn attr(&self) -> BgpAttrParams {\n\n BgpAttrParams {\n\n typecode: 5,\n\n flags: 64,\n\n }\n\n }\n\n fn encode_to(\n\n &self,\n\n _peer: &BgpSessionParams,\n\n buf: &mut [u8],\n\n ) -> Result<usize, BgpError> {\n\n if buf.len() >= 4 {\n", "file_path": "src/message/attributes/localpref.rs", "rank": 98, "score": 29492.87482775842 }, { "content": " }\n\n }\n\n }\n\n}\n\n\n\nimpl BgpAttr for BgpOrigin {\n\n fn attr(&self) -> BgpAttrParams {\n\n BgpAttrParams {\n\n typecode: 1,\n\n flags: 64,\n\n }\n\n }\n\n fn encode_to(\n\n &self,\n\n _peer: &BgpSessionParams,\n\n buf: &mut [u8],\n\n ) -> Result<usize, BgpError> {\n\n if buf.len() < 1 {\n\n Err(BgpError::static_str(\n\n \"Invalid PA len for BgpOrigin\",\n", "file_path": "src/message/attributes/origin.rs", "rank": 99, "score": 29492.710746706678 } ]
Rust
embassy-stm32/src/pwm/mod.rs
Liamolucko/embassy
4f4b19d920c103453b2b0f9ce7994ca830ddb2d7
use crate::gpio; use crate::rcc::RccPeripheral; use crate::time::Hertz; use core::marker::PhantomData; use embassy::util::Unborrow; use embassy_hal_common::unborrow; use stm32_metapac::timer::vals::Ocm; pub struct Pwm<'d, T: Instance> { phantom: PhantomData<&'d mut T>, } pub struct Ch1 {} pub struct Ch2 {} pub struct Ch3 {} pub struct Ch4 {} #[derive(Clone, Copy)] pub enum Channel { Ch1, Ch2, Ch3, Ch4, } impl<'d, T: Instance> Pwm<'d, T> { pub fn new<F: Into<Hertz>>( _tim: impl Unborrow<Target = T> + 'd, ch1: impl Unborrow<Target = impl PwmPin<T, Ch1>> + 'd, ch2: impl Unborrow<Target = impl PwmPin<T, Ch2>> + 'd, ch3: impl Unborrow<Target = impl PwmPin<T, Ch3>> + 'd, ch4: impl Unborrow<Target = impl PwmPin<T, Ch4>> + 'd, freq: F, ) -> Self { unborrow!(ch1, ch2, ch3, ch4); T::enable(); T::reset(); let r = T::regs(); let mut this = Pwm { phantom: PhantomData, }; unsafe { ch1.configure(); ch2.configure(); ch3.configure(); ch4.configure(); } unsafe { use stm32_metapac::timer::vals::Dir; this.set_freq(freq); r.cr1().write(|w| { w.set_cen(true); w.set_dir(Dir::UP) }); this.set_ocm(Channel::Ch1, Ocm::PWMMODE1); this.set_ocm(Channel::Ch2, Ocm::PWMMODE1); this.set_ocm(Channel::Ch3, Ocm::PWMMODE1); this.set_ocm(Channel::Ch4, Ocm::PWMMODE1); } this } unsafe fn set_ocm(&mut self, channel: Channel, mode: Ocm) { let r = T::regs(); match channel { Channel::Ch1 => r.ccmr_output(0).modify(|w| w.set_ocm(0, mode)), Channel::Ch2 => r.ccmr_output(0).modify(|w| w.set_ocm(1, mode)), Channel::Ch3 => r.ccmr_output(1).modify(|w| w.set_ocm(0, mode)), Channel::Ch4 => r.ccmr_output(1).modify(|w| w.set_ocm(1, mode)), } } unsafe fn set_enable(&mut self, channel: Channel, enable: bool) { let r = T::regs(); match channel { Channel::Ch1 => r.ccer().modify(|w| w.set_cce(0, enable)), Channel::Ch2 => r.ccer().modify(|w| w.set_cce(1, enable)), Channel::Ch3 => r.ccer().modify(|w| w.set_cce(2, enable)), Channel::Ch4 => r.ccer().modify(|w| w.set_cce(3, enable)), } } pub fn enable(&mut self, channel: Channel) { unsafe { self.set_enable(channel, true) } } pub fn disable(&mut self, channel: Channel) { unsafe { self.set_enable(channel, false) } } pub fn set_freq<F: Into<Hertz>>(&mut self, freq: F) { use core::convert::TryInto; let clk = T::frequency(); let r = T::regs(); let freq: Hertz = freq.into(); let ticks: u32 = clk.0 / freq.0; let psc: u16 = (ticks / (1 << 16)).try_into().unwrap(); let arr: u16 = (ticks / (u32::from(psc) + 1)).try_into().unwrap(); unsafe { r.psc().write(|w| w.set_psc(psc)); r.arr().write(|w| w.set_arr(arr)); } } pub fn get_max_duty(&self) -> u32 { let r = T::regs(); unsafe { r.arr().read().arr() as u32 } } pub fn set_duty(&mut self, channel: Channel, duty: u32) { use core::convert::TryInto; assert!(duty < self.get_max_duty()); let duty: u16 = duty.try_into().unwrap(); let r = T::regs(); unsafe { match channel { Channel::Ch1 => r.ccr(0).modify(|w| w.set_ccr(duty)), Channel::Ch2 => r.ccr(1).modify(|w| w.set_ccr(duty)), Channel::Ch3 => r.ccr(2).modify(|w| w.set_ccr(duty)), Channel::Ch4 => r.ccr(3).modify(|w| w.set_ccr(duty)), } } } } pub(crate) mod sealed { pub trait Instance { fn regs() -> crate::pac::timer::TimGp16; } } pub trait Instance: sealed::Instance + Sized + RccPeripheral + 'static {} #[allow(unused)] macro_rules! impl_timer { ($inst:ident) => { impl crate::pwm::sealed::Instance for crate::peripherals::$inst { fn regs() -> crate::pac::timer::TimGp16 { crate::pac::timer::TimGp16(crate::pac::$inst.0) } } impl crate::pwm::Instance for crate::peripherals::$inst {} }; } pub trait PwmPin<Timer, Channel>: gpio::OptionalPin { unsafe fn configure(&mut self); } impl<Timer, Channel> PwmPin<Timer, Channel> for gpio::NoPin { unsafe fn configure(&mut self) {} } #[allow(unused)] macro_rules! impl_pwm_pin { ($timer:ident, $channel:ident, $pin:ident, $af:expr) => { impl crate::pwm::PwmPin<crate::peripherals::$timer, crate::pwm::$channel> for crate::peripherals::$pin { unsafe fn configure(&mut self) { use crate::gpio::sealed::{AFType, Pin}; use crate::gpio::Speed; self.set_low(); self.set_speed(Speed::VeryHigh); self.set_as_af($af, AFType::OutputPushPull); } } }; } crate::pac::peripherals!( (timer, $inst:ident) => { impl_timer!($inst); }; ); crate::pac::peripheral_pins!( ($inst:ident, timer,TIM_GP16, $pin:ident, CH1, $af:expr) => { impl_pwm_pin!($inst, Ch1, $pin, $af); }; ($inst:ident, timer,TIM_GP16, $pin:ident, CH2, $af:expr) => { impl_pwm_pin!($inst, Ch2, $pin, $af); }; ($inst:ident, timer,TIM_GP16, $pin:ident, CH3, $af:expr) => { impl_pwm_pin!($inst, Ch3, $pin, $af); }; ($inst:ident, timer,TIM_GP16, $pin:ident, CH4, $af:expr) => { impl_pwm_pin!($inst, Ch4, $pin, $af); }; );
use crate::gpio; use crate::rcc::RccPeripheral; use crate::time::Hertz; use core::marker::PhantomData; use embassy::util::Unborrow; use embassy_hal_common::unborrow; use stm32_metapac::timer::vals::Ocm; pub struct Pwm<'d, T: Instance> { phantom: PhantomData<&'d mut T>, } pub struct Ch1 {} pub struct Ch2 {} pub struct Ch3 {} pub struct Ch4 {} #[derive(Clone, Copy)] pub enum Channel { Ch1, Ch2, Ch3, Ch4, } impl<'d, T: Instance> Pwm<'d, T> { pub fn new<F: Into<Hertz>>( _tim: impl Unborrow<Target = T> + 'd, ch1: impl Unborrow<Target = impl PwmPin<T, Ch1>> + 'd, ch2: impl Unborrow<Target = impl PwmPin<T, Ch2>> + 'd, ch3: impl Unborrow<Target = impl PwmPin<T, Ch3>> + 'd, ch4: impl Unborrow<Target = impl PwmPin<T, Ch4>> + 'd, freq: F, ) -> Self { unborrow!(ch1, ch2, ch3, ch4); T::enable(); T::reset(); let r = T::regs(); let mut this = Pwm { phantom: PhantomData, }; unsafe { ch1.configure(); ch2.configure(); ch3.configure(); ch4.configure(); } unsafe { use stm32_metapac::timer::vals::Dir; this.set_freq(freq); r.cr1().write(|w| { w.set_cen(true); w.set_dir(Dir::UP) }); this.set_ocm(Channel::Ch1, Ocm::PWMMODE1); this.set_ocm(Channel::Ch2, Ocm::PWMMODE1); this.set_ocm(Channel::Ch3, Ocm::PWMMODE1); this.set_ocm(Channel::Ch4, Ocm::PWMMODE1); } this } unsafe fn set_ocm(&mut self, channel: Channel, mode: Ocm) { let r = T::regs(); match channel { Channel::Ch1 => r.ccmr_output(0).modify(|w| w.set_ocm(0, mode)), Channel::Ch2 => r.ccmr_output(0).modify(|w| w.set_ocm(1, mode)), Channel::Ch3 => r.ccmr_output(1).modify(|w| w.set_ocm(0, mode)), Channel::Ch4 => r.ccmr_output(1).modify(|w| w.set_ocm(1, mode)), } } unsafe fn set_enable(&mut self, channel: Channel, enable: bool) { let r = T::regs(); match channel { Channel::Ch1 => r.ccer().modify(|w| w.set_cce(0, enable)), Channel::Ch2 => r.ccer().modify(|w| w.set_cce(1, enable)), Channel::Ch3 => r.ccer().modify(|w| w.set_cce(2, enable)), Channel::Ch4 => r.ccer().modify(|w| w.set_cce(3, enable)), } } pub fn enable(&mut self, channel: Channel) { unsafe { self.set_enable(channel, true) } } pub fn disable(&mut self, channel: Channel) { unsafe { self.set_enable(channel, false) } }
pub fn get_max_duty(&self) -> u32 { let r = T::regs(); unsafe { r.arr().read().arr() as u32 } } pub fn set_duty(&mut self, channel: Channel, duty: u32) { use core::convert::TryInto; assert!(duty < self.get_max_duty()); let duty: u16 = duty.try_into().unwrap(); let r = T::regs(); unsafe { match channel { Channel::Ch1 => r.ccr(0).modify(|w| w.set_ccr(duty)), Channel::Ch2 => r.ccr(1).modify(|w| w.set_ccr(duty)), Channel::Ch3 => r.ccr(2).modify(|w| w.set_ccr(duty)), Channel::Ch4 => r.ccr(3).modify(|w| w.set_ccr(duty)), } } } } pub(crate) mod sealed { pub trait Instance { fn regs() -> crate::pac::timer::TimGp16; } } pub trait Instance: sealed::Instance + Sized + RccPeripheral + 'static {} #[allow(unused)] macro_rules! impl_timer { ($inst:ident) => { impl crate::pwm::sealed::Instance for crate::peripherals::$inst { fn regs() -> crate::pac::timer::TimGp16 { crate::pac::timer::TimGp16(crate::pac::$inst.0) } } impl crate::pwm::Instance for crate::peripherals::$inst {} }; } pub trait PwmPin<Timer, Channel>: gpio::OptionalPin { unsafe fn configure(&mut self); } impl<Timer, Channel> PwmPin<Timer, Channel> for gpio::NoPin { unsafe fn configure(&mut self) {} } #[allow(unused)] macro_rules! impl_pwm_pin { ($timer:ident, $channel:ident, $pin:ident, $af:expr) => { impl crate::pwm::PwmPin<crate::peripherals::$timer, crate::pwm::$channel> for crate::peripherals::$pin { unsafe fn configure(&mut self) { use crate::gpio::sealed::{AFType, Pin}; use crate::gpio::Speed; self.set_low(); self.set_speed(Speed::VeryHigh); self.set_as_af($af, AFType::OutputPushPull); } } }; } crate::pac::peripherals!( (timer, $inst:ident) => { impl_timer!($inst); }; ); crate::pac::peripheral_pins!( ($inst:ident, timer,TIM_GP16, $pin:ident, CH1, $af:expr) => { impl_pwm_pin!($inst, Ch1, $pin, $af); }; ($inst:ident, timer,TIM_GP16, $pin:ident, CH2, $af:expr) => { impl_pwm_pin!($inst, Ch2, $pin, $af); }; ($inst:ident, timer,TIM_GP16, $pin:ident, CH3, $af:expr) => { impl_pwm_pin!($inst, Ch3, $pin, $af); }; ($inst:ident, timer,TIM_GP16, $pin:ident, CH4, $af:expr) => { impl_pwm_pin!($inst, Ch4, $pin, $af); }; );
pub fn set_freq<F: Into<Hertz>>(&mut self, freq: F) { use core::convert::TryInto; let clk = T::frequency(); let r = T::regs(); let freq: Hertz = freq.into(); let ticks: u32 = clk.0 / freq.0; let psc: u16 = (ticks / (1 << 16)).try_into().unwrap(); let arr: u16 = (ticks / (u32::from(psc) + 1)).try_into().unwrap(); unsafe { r.psc().write(|w| w.set_psc(psc)); r.arr().write(|w| w.set_arr(arr)); } }
function_block-full_function
[ { "content": "/// Low power blocking wait loop using WFE/SEV.\n\npub fn low_power_wait_until(mut condition: impl FnMut() -> bool) {\n\n while !condition() {\n\n // WFE might \"eat\" an event that would have otherwise woken the executor.\n\n cortex_m::asm::wfe();\n\n }\n\n // Retrigger an event to be transparent to the executor.\n\n cortex_m::asm::sev();\n\n}\n", "file_path": "embassy-hal-common/src/lib.rs", "rank": 0, "score": 348813.2899789337 }, { "content": "/// Creates a future which copies all the bytes from one object to another.\n\n///\n\n/// The returned future will copy all the bytes read from this `AsyncBufRead` into the\n\n/// `writer` specified. This future will only complete once the `reader` has hit\n\n/// EOF and all bytes have been written to and flushed from the `writer`\n\n/// provided.\n\n///\n\n/// On success the number of bytes is returned.\n\n///\n\n/// # Examples\n\n///\n\n/// ``` ignore\n\n/// # futures::executor::block_on(async {\n\n/// use futures::io::{self, AsyncWriteExt, Cursor};\n\n///\n\n/// let reader = Cursor::new([1, 2, 3, 4]);\n\n/// let mut writer = Cursor::new(vec![0u8; 5]);\n\n///\n\n/// let bytes = io::copy_buf(reader, &mut writer).await?;\n\n/// writer.close().await?;\n\n///\n\n/// assert_eq!(bytes, 4);\n\n/// assert_eq!(writer.into_inner(), [1, 2, 3, 4, 0]);\n\n/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();\n\n/// ```\n\npub fn copy_buf<R, W>(reader: R, writer: &mut W) -> CopyBuf<'_, R, W>\n\nwhere\n\n R: AsyncBufRead,\n\n W: AsyncWrite + Unpin + ?Sized,\n\n{\n\n CopyBuf {\n\n reader,\n\n writer,\n\n amt: 0,\n\n }\n\n}\n\n\n\n/// Future for the [`copy_buf()`] function.\n\n#[pin_project]\n\n#[derive(Debug)]\n\n#[must_use = \"futures do nothing unless you `.await` or poll them\"]\n\npub struct CopyBuf<'a, R, W: ?Sized> {\n\n #[pin]\n\n reader: R,\n\n writer: &'a mut W,\n", "file_path": "embassy/src/io/util/copy_buf.rs", "rank": 1, "score": 309122.4739412153 }, { "content": "pub fn in_thread_mode() -> bool {\n\n #[cfg(feature = \"std\")]\n\n return Some(\"main\") == std::thread::current().name();\n\n\n\n #[cfg(not(feature = \"std\"))]\n\n return cortex_m::peripheral::SCB::vect_active()\n\n == cortex_m::peripheral::scb::VectActive::ThreadMode;\n\n}\n\n\n\n/// A \"mutex\" that does nothing and cannot be shared between threads.\n\npub struct NoopMutex<T> {\n\n inner: T,\n\n}\n\n\n\nimpl<T> NoopMutex<T> {\n\n pub const fn new(value: T) -> Self {\n\n NoopMutex { inner: value }\n\n }\n\n}\n\n\n", "file_path": "embassy/src/blocking_mutex/mod.rs", "rank": 2, "score": 272372.7312716027 }, { "content": "pub fn is_config_up() -> bool {\n\n STACK.borrow().borrow().as_ref().unwrap().config_up\n\n}\n\n\n\npub async fn run() {\n\n futures::future::poll_fn(|cx| {\n\n Stack::with(|stack| stack.poll(cx));\n\n Poll::<()>::Pending\n\n })\n\n .await\n\n}\n\n\n", "file_path": "embassy-net/src/stack.rs", "rank": 3, "score": 235283.92922411312 }, { "content": "pub fn is_link_up() -> bool {\n\n STACK.borrow().borrow().as_ref().unwrap().link_up\n\n}\n\n\n", "file_path": "embassy-net/src/stack.rs", "rank": 4, "score": 235283.92922411312 }, { "content": "pub fn is_init() -> bool {\n\n STACK.borrow().borrow().is_some()\n\n}\n\n\n", "file_path": "embassy-net/src/stack.rs", "rank": 5, "score": 235283.92922411312 }, { "content": "pub trait Instance: Unborrow<Target = Self> + sealed::Instance + 'static {\n\n type Interrupt: Interrupt;\n\n}\n\n\n\nmacro_rules! impl_pwm {\n\n ($type:ident, $pac_type:ident, $irq:ident) => {\n\n impl crate::pwm::sealed::Instance for peripherals::$type {\n\n fn regs() -> &'static pac::pwm0::RegisterBlock {\n\n unsafe { &*pac::$pac_type::ptr() }\n\n }\n\n }\n\n impl crate::pwm::Instance for peripherals::$type {\n\n type Interrupt = crate::interrupt::$irq;\n\n }\n\n };\n\n}\n", "file_path": "embassy-nrf/src/pwm.rs", "rank": 6, "score": 223977.19360379927 }, { "content": "pub trait Channel: sealed::Channel + Unborrow<Target = Self> + Sized {\n\n /// Returns the number of the channel\n\n fn number(&self) -> usize;\n\n}\n\n\n", "file_path": "embassy-nrf/src/ppi/mod.rs", "rank": 7, "score": 216991.5012558299 }, { "content": "pub trait Channel: sealed::Channel + Unborrow<Target = Self> + 'static {}\n\n\n\npub struct NoDma;\n\n\n\nunsafe impl Unborrow for NoDma {\n\n type Target = NoDma;\n\n\n\n unsafe fn unborrow(self) -> Self::Target {\n\n self\n\n }\n\n}\n\n\n\n// safety: must be called only once at startup\n\npub(crate) unsafe fn init() {\n\n #[cfg(bdma)]\n\n bdma::init();\n\n #[cfg(dma)]\n\n dma::init();\n\n #[cfg(dmamux)]\n\n dmamux::init();\n\n}\n", "file_path": "embassy-stm32/src/dma/mod.rs", "rank": 8, "score": 216991.5012558299 }, { "content": "pub trait Instance: Unborrow<Target = Self> + sealed::Instance + 'static {\n\n type Interrupt: Interrupt;\n\n}\n\n\n\nmacro_rules! impl_qspi {\n\n ($type:ident, $pac_type:ident, $irq:ident) => {\n\n impl crate::qspi::sealed::Instance for peripherals::$type {\n\n fn regs() -> &'static pac::qspi::RegisterBlock {\n\n unsafe { &*pac::$pac_type::ptr() }\n\n }\n\n fn state() -> &'static crate::qspi::sealed::State {\n\n static STATE: crate::qspi::sealed::State = crate::qspi::sealed::State::new();\n\n &STATE\n\n }\n\n }\n\n impl crate::qspi::Instance for peripherals::$type {\n\n type Interrupt = crate::interrupt::$irq;\n\n }\n\n };\n\n}\n", "file_path": "embassy-nrf/src/qspi.rs", "rank": 10, "score": 183114.4063485955 }, { "content": "pub trait Instance: Unborrow<Target = Self> + sealed::Instance + 'static {\n\n type Interrupt: Interrupt;\n\n}\n\n\n\nmacro_rules! impl_twim {\n\n ($type:ident, $pac_type:ident, $irq:ident) => {\n\n impl crate::twim::sealed::Instance for peripherals::$type {\n\n fn regs() -> &'static pac::twim0::RegisterBlock {\n\n unsafe { &*pac::$pac_type::ptr() }\n\n }\n\n fn state() -> &'static crate::twim::sealed::State {\n\n static STATE: crate::twim::sealed::State = crate::twim::sealed::State::new();\n\n &STATE\n\n }\n\n }\n\n impl crate::twim::Instance for peripherals::$type {\n\n type Interrupt = crate::interrupt::$irq;\n\n }\n\n };\n\n}\n", "file_path": "embassy-nrf/src/twim.rs", "rank": 11, "score": 183114.4063485955 }, { "content": "pub trait Instance: Unborrow<Target = Self> + sealed::Instance + 'static {\n\n type Interrupt: Interrupt;\n\n}\n\n\n\nmacro_rules! impl_spim {\n\n ($type:ident, $pac_type:ident, $irq:ident) => {\n\n impl crate::spim::sealed::Instance for peripherals::$type {\n\n fn regs() -> &'static pac::spim0::RegisterBlock {\n\n unsafe { &*pac::$pac_type::ptr() }\n\n }\n\n fn state() -> &'static crate::spim::sealed::State {\n\n static STATE: crate::spim::sealed::State = crate::spim::sealed::State::new();\n\n &STATE\n\n }\n\n }\n\n impl crate::spim::Instance for peripherals::$type {\n\n type Interrupt = crate::interrupt::$irq;\n\n }\n\n };\n\n}\n", "file_path": "embassy-nrf/src/spim.rs", "rank": 12, "score": 183114.4063485955 }, { "content": "pub trait Instance: Unborrow<Target = Self> + sealed::Instance + 'static + Send {\n\n type Interrupt: Interrupt;\n\n}\n", "file_path": "embassy-nrf/src/timer.rs", "rank": 13, "score": 178178.21296794634 }, { "content": "pub trait Instance: Unborrow<Target = Self> + sealed::Instance + 'static + Send {\n\n type Interrupt: Interrupt;\n\n}\n\n\n\nmacro_rules! impl_uarte {\n\n ($type:ident, $pac_type:ident, $irq:ident) => {\n\n impl crate::uarte::sealed::Instance for peripherals::$type {\n\n fn regs() -> &'static pac::uarte0::RegisterBlock {\n\n unsafe { &*pac::$pac_type::ptr() }\n\n }\n\n fn state() -> &'static crate::uarte::sealed::State {\n\n static STATE: crate::uarte::sealed::State = crate::uarte::sealed::State::new();\n\n &STATE\n\n }\n\n }\n\n impl crate::uarte::Instance for peripherals::$type {\n\n type Interrupt = crate::interrupt::$irq;\n\n }\n\n };\n\n}\n", "file_path": "embassy-nrf/src/uarte.rs", "rank": 14, "score": 178178.21296794634 }, { "content": "enum Mode {\n\n Fast,\n\n Standard,\n\n}\n\n\n\nimpl Mode {\n\n fn f_s(&self) -> i2c::vals::FS {\n\n match self {\n\n Mode::Fast => i2c::vals::FS::FAST,\n\n Mode::Standard => i2c::vals::FS::STANDARD,\n\n }\n\n }\n\n}\n\n\n", "file_path": "embassy-stm32/src/i2c/v1.rs", "rank": 15, "score": 175019.14922954503 }, { "content": "/// Splits a bounded mpsc channel into a `Sender` and `Receiver`.\n\n///\n\n/// All data sent on `Sender` will become available on `Receiver` in the same\n\n/// order as it was sent.\n\n///\n\n/// The `Sender` can be cloned to `send` to the same channel from multiple code\n\n/// locations. Only one `Receiver` is valid.\n\n///\n\n/// If the `Receiver` is disconnected while trying to `send`, the `send` method\n\n/// will return a `SendError`. Similarly, if `Sender` is disconnected while\n\n/// trying to `recv`, the `recv` method will return a `RecvError`.\n\n///\n\n/// Note that when splitting the channel, the sender and receiver cannot outlive\n\n/// their channel. The following will therefore fail compilation:\n\n////\n\n/// ```compile_fail\n\n/// use embassy::channel::mpsc;\n\n/// use embassy::channel::mpsc::{Channel, WithThreadModeOnly};\n\n///\n\n/// let (sender, receiver) = {\n\n/// let mut channel = Channel::<WithThreadModeOnly, u32, 3>::with_thread_mode_only();\n\n/// mpsc::split(&mut channel)\n\n/// };\n\n/// ```\n\npub fn split<M, T, const N: usize>(\n\n channel: &mut Channel<M, T, N>,\n\n) -> (Sender<M, T, N>, Receiver<M, T, N>)\n\nwhere\n\n M: MutexKind,\n\n{\n\n let sender = Sender { channel };\n\n let receiver = Receiver { channel };\n\n channel.lock(|c| {\n\n c.register_receiver();\n\n c.register_sender();\n\n });\n\n (sender, receiver)\n\n}\n\n\n\nimpl<'ch, M, T, const N: usize> Receiver<'ch, M, T, N>\n\nwhere\n\n M: MutexKind,\n\n{\n\n /// Receives the next value for this receiver.\n", "file_path": "embassy/src/channel/mpsc.rs", "rank": 16, "score": 174310.78933815763 }, { "content": " pub trait Instance {\n\n fn regs() -> &'static pac::pwm0::RegisterBlock;\n\n }\n\n}\n\n\n", "file_path": "embassy-nrf/src/pwm.rs", "rank": 17, "score": 173354.8482797036 }, { "content": "pub trait TxDmaChannel<T: Instance>: sealed::TxDmaChannel<T> + dma::Channel {}\n", "file_path": "embassy-stm32/src/spi/mod.rs", "rank": 18, "score": 173266.10108030107 }, { "content": "pub trait RxDmaChannel<T: Instance>: sealed::RxDmaChannel<T> + dma::Channel {}\n\n\n\ncrate::pac::peripherals!(\n\n (spi, $inst:ident) => {\n\n impl sealed::Instance for peripherals::$inst {\n\n fn regs() -> &'static crate::pac::spi::Spi {\n\n &crate::pac::$inst\n\n }\n\n }\n\n\n\n impl Instance for peripherals::$inst {}\n\n };\n\n);\n\n\n\nmacro_rules! impl_pin {\n\n ($inst:ident, $pin:ident, $signal:ident, $af:expr) => {\n\n impl $signal<peripherals::$inst> for peripherals::$pin {}\n\n\n\n impl sealed::$signal<peripherals::$inst> for peripherals::$pin {\n\n fn af_num(&self) -> u8 {\n", "file_path": "embassy-stm32/src/spi/mod.rs", "rank": 19, "score": 173266.10108030107 }, { "content": "pub trait Channel: sealed::Channel {}\n\n\n\npub struct AnyChannel {\n\n number: u8,\n\n}\n\n\n\nimpl Channel for AnyChannel {}\n\nimpl sealed::Channel for AnyChannel {\n\n fn number(&self) -> u8 {\n\n self.number\n\n }\n\n}\n\n\n\nmacro_rules! channel {\n\n ($type:ident, $num:expr) => {\n\n impl Channel for peripherals::$type {}\n\n impl sealed::Channel for peripherals::$type {\n\n fn number(&self) -> u8 {\n\n $num\n\n }\n", "file_path": "embassy-rp/src/dma.rs", "rank": 20, "score": 171773.49034356693 }, { "content": "pub trait Channel: sealed::Channel + Sized {\n\n fn number(&self) -> usize;\n\n fn degrade(self) -> AnyChannel {\n\n AnyChannel {\n\n number: self.number() as u8,\n\n }\n\n }\n\n}\n\n\n\npub struct AnyChannel {\n\n number: u8,\n\n}\n\nunsafe_impl_unborrow!(AnyChannel);\n\nimpl sealed::Channel for AnyChannel {}\n\nimpl Channel for AnyChannel {\n\n fn number(&self) -> usize {\n\n self.number as usize\n\n }\n\n}\n\n\n", "file_path": "embassy-nrf/src/gpiote.rs", "rank": 22, "score": 166477.07866138383 }, { "content": "pub trait Channel: sealed::Channel + Sized {\n\n fn number(&self) -> usize;\n\n fn degrade(self) -> AnyChannel {\n\n AnyChannel {\n\n number: self.number() as u8,\n\n }\n\n }\n\n}\n\n\n\npub struct AnyChannel {\n\n number: u8,\n\n}\n\nunsafe_impl_unborrow!(AnyChannel);\n\nimpl sealed::Channel for AnyChannel {}\n\nimpl Channel for AnyChannel {\n\n fn number(&self) -> usize {\n\n self.number as usize\n\n }\n\n}\n\n\n", "file_path": "embassy-stm32/src/exti.rs", "rank": 23, "score": 166477.07866138383 }, { "content": "#[inline]\n\nfn rfbusys() -> bool {\n\n // safety: atmoic read with no side-effects\n\n //unsafe { (*pac::PWR::ptr()).sr2.read().rfbusys().is_busy() }\n\n let pwr = pac::PWR;\n\n unsafe { pwr.sr2().read().rfbusys() == pac::pwr::vals::Rfbusys::BUSY }\n\n}\n\n\n\n/*\n\n/// Returns `true` if the radio is busy or NSS is low.\n\n///\n\n/// See RM0461 Rev 4 section 5.3 page 181 \"Radio busy management\" for more\n\n/// details.\n", "file_path": "embassy-stm32/src/subghz/mod.rs", "rank": 24, "score": 164833.6663057346 }, { "content": "#[inline]\n\nfn rfbusyms() -> bool {\n\n let pwr = pac::PWR;\n\n unsafe { pwr.sr2().read().rfbusyms() == pac::pwr::vals::Rfbusyms::BUSY }\n\n}\n\n*/\n\n\n\n/// Sub-GHz radio peripheral\n\npub struct SubGhz<'d, Tx, Rx> {\n\n spi: Spi<'d, SUBGHZSPI, Tx, Rx>,\n\n}\n\n\n\nimpl<'d, Tx, Rx> SubGhz<'d, Tx, Rx> {\n\n fn pulse_radio_reset() {\n\n let rcc = pac::RCC;\n\n unsafe {\n\n rcc.csr().modify(|w| w.set_rfrst(true));\n\n rcc.csr().modify(|w| w.set_rfrst(false));\n\n }\n\n }\n\n\n", "file_path": "embassy-stm32/src/subghz/mod.rs", "rank": 25, "score": 164833.6663057346 }, { "content": "pub trait StaticChannel: Channel {\n\n fn degrade(self) -> AnyStaticChannel;\n\n}\n\n\n", "file_path": "embassy-nrf/src/ppi/mod.rs", "rank": 26, "score": 163793.1714425927 }, { "content": "pub trait ConfigurableChannel: Channel {\n\n fn degrade(self) -> AnyConfigurableChannel;\n\n}\n\n\n", "file_path": "embassy-nrf/src/ppi/mod.rs", "rank": 27, "score": 163793.1714425927 }, { "content": " pub trait Channel {}\n\n}\n\n\n", "file_path": "embassy-stm32/src/exti.rs", "rank": 28, "score": 162950.14344210323 }, { "content": " pub trait Channel {}\n\n}\n\n\n", "file_path": "embassy-nrf/src/gpiote.rs", "rank": 29, "score": 162950.14344210323 }, { "content": " pub trait Channel {\n\n fn number(&self) -> u8;\n\n\n\n fn regs(&self) -> pac::dma::Channel {\n\n pac::DMA.ch(self.number() as _)\n\n }\n\n }\n\n}\n\n\n", "file_path": "embassy-rp/src/dma.rs", "rank": 30, "score": 162950.14344210323 }, { "content": "fn make_dma_channel_counts(out: &mut String, data: &BTreeMap<String, u8>) {\n\n if data.len() == 0 {\n\n return;\n\n }\n\n write!(\n\n out,\n\n \"#[macro_export]\n\nmacro_rules! dma_channels_count {{\n\n \"\n\n )\n\n .unwrap();\n\n for (name, count) in data {\n\n write!(out, \"({}) => ({});\\n\", name, count,).unwrap();\n\n }\n\n write!(out, \" }}\\n\").unwrap();\n\n}\n\n\n", "file_path": "stm32-metapac-gen/src/lib.rs", "rank": 31, "score": 160542.21943820137 }, { "content": " pub trait Channel {}\n", "file_path": "embassy-nrf/src/ppi/mod.rs", "rank": 32, "score": 160470.6112862526 }, { "content": " pub trait Channel {\n\n /// Starts this channel for writing a stream of words.\n\n ///\n\n /// Safety:\n\n /// - `buf` must be alive for the entire duration of the DMA transfer.\n\n /// - `reg_addr` must be a valid peripheral register address to write to.\n\n unsafe fn start_write<W: super::Word>(\n\n &mut self,\n\n request: Request,\n\n buf: &[W],\n\n reg_addr: *mut W,\n\n );\n\n\n\n /// Starts this channel for writing a word repeatedly.\n\n ///\n\n /// Safety:\n\n /// - `reg_addr` must be a valid peripheral register address to write to.\n\n unsafe fn start_write_repeated<W: super::Word>(\n\n &mut self,\n\n request: Request,\n", "file_path": "embassy-stm32/src/dma/mod.rs", "rank": 33, "score": 160470.6112862526 }, { "content": "#[cfg(usart_v2)]\n\nfn rdr(r: crate::pac::usart::Usart) -> *mut u8 {\n\n r.rdr().ptr() as _\n\n}\n\n\n", "file_path": "embassy-stm32/src/usart/mod.rs", "rank": 34, "score": 158525.01637652863 }, { "content": "#[cfg(usart_v2)]\n\nfn tdr(r: crate::pac::usart::Usart) -> *mut u8 {\n\n r.tdr().ptr() as _\n\n}\n\n\n", "file_path": "embassy-stm32/src/usart/mod.rs", "rank": 35, "score": 158525.01637652863 }, { "content": "pub trait RxDma<T: Instance>: sealed::RxDma<T> + dma::Channel {}\n", "file_path": "embassy-stm32/src/usart/mod.rs", "rank": 36, "score": 157404.36875368503 }, { "content": "pub trait TxDma<T: Instance>: sealed::TxDma<T> + dma::Channel {}\n\n\n\nmacro_rules! i2c_state {\n\n (I2C1) => {\n\n 0\n\n };\n\n (I2C2) => {\n\n 1\n\n };\n\n (I2C3) => {\n\n 2\n\n };\n\n (I2C4) => {\n\n 3\n\n };\n\n (I2C5) => {\n\n 4\n\n };\n\n}\n\n\n", "file_path": "embassy-stm32/src/i2c/mod.rs", "rank": 37, "score": 157404.36875368503 }, { "content": "pub trait RxDma<T: Instance>: sealed::RxDma<T> + dma::Channel {}\n\n\n", "file_path": "embassy-stm32/src/i2c/mod.rs", "rank": 38, "score": 157404.36875368503 }, { "content": "pub trait TxDma<T: Instance>: sealed::TxDma<T> + dma::Channel {}\n\n\n\ncrate::pac::interrupts!(\n\n ($inst:ident, usart, $block:ident, $signal_name:ident, $irq:ident) => {\n\n impl sealed::Instance for peripherals::$inst {\n\n fn regs(&self) -> crate::pac::usart::Usart {\n\n crate::pac::$inst\n\n }\n\n }\n\n\n\n impl Instance for peripherals::$inst {\n\n type Interrupt = crate::interrupt::$irq;\n\n }\n\n\n\n };\n\n);\n\n\n\nmacro_rules! impl_pin {\n\n ($inst:ident, $pin:ident, $signal:ident, $af:expr) => {\n\n impl sealed::$signal<peripherals::$inst> for peripherals::$pin {\n", "file_path": "embassy-stm32/src/usart/mod.rs", "rank": 39, "score": 157404.36875368503 }, { "content": "pub fn gen_chip(\n\n options: &Options,\n\n chip_core_name: &str,\n\n chip: &Chip,\n\n core: &Core,\n\n core_index: usize,\n\n all_peripheral_versions: &mut HashSet<(String, String)>,\n\n) {\n\n let mut ir = ir::IR::new();\n\n\n\n let mut dev = ir::Device {\n\n interrupts: Vec::new(),\n\n peripherals: Vec::new(),\n\n };\n\n\n\n // Load DBGMCU register for chip\n\n let mut dbgmcu: Option<ir::IR> = core.peripherals.iter().find_map(|(name, p)| {\n\n if name == \"DBGMCU\" {\n\n p.block.as_ref().map(|block| {\n\n let bi = BlockInfo::parse(block);\n", "file_path": "stm32-metapac-gen/src/lib.rs", "rank": 40, "score": 156407.03582143242 }, { "content": "fn is_primary_source(path: &Path) -> bool {\n\n let mut current = path;\n\n\n\n loop {\n\n let current_file_name = current.file_name().unwrap().to_str().unwrap();\n\n if current_file_name == \"target\"\n\n || current_file_name == \"stm32-metapac-gen\"\n\n || current_file_name == \"stm32-data\"\n\n {\n\n return false;\n\n }\n\n\n\n if let Some(path) = current.parent() {\n\n current = path.into();\n\n if current == root_dir() {\n\n return true;\n\n }\n\n } else {\n\n return false;\n\n }\n\n }\n\n}\n\n\n", "file_path": "xtask/src/main.rs", "rank": 42, "score": 153476.0397727384 }, { "content": "fn rand(buf: &mut [u8]) {\n\n unsafe { _embassy_rand(buf) }\n\n}\n", "file_path": "embassy-net/src/stack.rs", "rank": 43, "score": 153448.34004536085 }, { "content": " pub trait RxDmaChannel<T: Instance> {\n\n fn request(&self) -> dma::Request;\n\n }\n\n}\n\n\n", "file_path": "embassy-stm32/src/spi/mod.rs", "rank": 44, "score": 152528.028860041 }, { "content": " pub trait TxDmaChannel<T: Instance> {\n\n fn request(&self) -> dma::Request;\n\n }\n\n\n", "file_path": "embassy-stm32/src/spi/mod.rs", "rank": 45, "score": 152528.028860041 }, { "content": "pub trait MuxChannel: sealed::MuxChannel + super::Channel {\n\n type Mux;\n\n}\n\n\n\npac::dma_channels! {\n\n ($channel_peri:ident, $dma_peri:ident, $version:ident, $channel_num:expr, {dmamux: $dmamux:ident, dmamux_channel: $dmamux_channel:expr}) => {\n\n impl sealed::MuxChannel for peripherals::$channel_peri {\n\n const DMAMUX_CH_NUM: u8 = $dmamux_channel;\n\n const DMAMUX_REGS: pac::dmamux::Dmamux = pac::$dmamux;\n\n }\n\n impl MuxChannel for peripherals::$channel_peri {\n\n type Mux = $dmamux;\n\n }\n\n };\n\n}\n\n\n\n/// safety: must be called only once\n\npub(crate) unsafe fn init() {\n\n crate::pac::peripheral_rcc! {\n\n ($name:ident, dmamux, DMAMUX, $clock:ident, ($reg:ident, $field:ident, $set_field:ident), $rst:tt) => {\n\n crate::pac::RCC.$reg().modify(|reg| {\n\n reg.$set_field(true);\n\n });\n\n };\n\n }\n\n}\n", "file_path": "embassy-stm32/src/dma/dmamux.rs", "rank": 46, "score": 152321.97963589494 }, { "content": "pub fn config() -> Config {\n\n #[allow(unused_mut)]\n\n let mut config = Config::default();\n\n\n\n #[cfg(feature = \"stm32h755zi\")]\n\n {\n\n config.rcc.sys_ck = Some(Hertz(400_000_000));\n\n config.rcc.pll1.q_ck = Some(Hertz(100_000_000));\n\n }\n\n\n\n config\n\n}\n", "file_path": "tests/stm32/src/example_common.rs", "rank": 47, "score": 151973.8369654298 }, { "content": "#[allow(unused)]\n\npub fn config() -> Config {\n\n let mut config = Config::default();\n\n config.rcc.sys_ck = Some(400.mhz().into());\n\n config.rcc.pll1.q_ck = Some(100.mhz().into());\n\n config.rcc.enable_dma1 = true;\n\n config\n\n}\n", "file_path": "examples/stm32h7/src/example_common.rs", "rank": 48, "score": 151973.8369654298 }, { "content": "#[allow(unused)]\n\npub fn config() -> Config {\n\n let mut config = Config::default();\n\n config.rcc.sys_ck = Some(200.mhz().into());\n\n config\n\n}\n", "file_path": "examples/stm32f7/src/example_common.rs", "rank": 49, "score": 151973.8369654298 }, { "content": "/// True if the chip named `name` is supported else false\n\nfn is_supported(name: &str) -> bool {\n\n SUPPORTED_FAMILIES\n\n .iter()\n\n .any(|family| name.starts_with(family))\n\n}\n\n\n", "file_path": "stm32-gen-features/src/lib.rs", "rank": 50, "score": 151234.04720071622 }, { "content": "/// Blocks for at least `duration`.\n\npub fn block_for(duration: Duration) {\n\n let expires_at = Instant::now() + duration;\n\n while Instant::now() < expires_at {}\n\n}\n", "file_path": "embassy/src/time/delay.rs", "rank": 51, "score": 148090.74595537307 }, { "content": "pub fn gen(options: Options) {\n\n fs::create_dir_all(options.out_dir.join(\"src/peripherals\")).unwrap();\n\n fs::create_dir_all(options.out_dir.join(\"src/chips\")).unwrap();\n\n\n\n let mut all_peripheral_versions: HashSet<(String, String)> = HashSet::new();\n\n let mut chip_core_names: Vec<String> = Vec::new();\n\n\n\n for chip_name in &options.chips {\n\n println!(\"Generating {}...\", chip_name);\n\n\n\n let chip = load_chip(&options, chip_name);\n\n for (core_index, core) in chip.cores.iter().enumerate() {\n\n let chip_core_name = match chip.cores.len() {\n\n 1 => chip_name.clone(),\n\n _ => format!(\"{}-{}\", chip_name, core.name),\n\n };\n\n\n\n chip_core_names.push(chip_core_name.clone());\n\n gen_chip(\n\n &options,\n", "file_path": "stm32-metapac-gen/src/lib.rs", "rank": 52, "score": 145858.70065724064 }, { "content": "fn calc_prescs(freq: u32) -> (u8, u8) {\n\n let clk_peri = crate::clocks::clk_peri_freq();\n\n\n\n // final SPI frequency: spi_freq = clk_peri / presc / postdiv\n\n // presc must be in 2..=254, and must be even\n\n // postdiv must be in 1..=256\n\n\n\n // divide extra by 2, so we get rid of the \"presc must be even\" requirement\n\n let ratio = div_roundup(clk_peri, freq * 2);\n\n if ratio > 127 * 256 {\n\n panic!(\"Requested too low SPI frequency\");\n\n }\n\n\n\n let presc = div_roundup(ratio, 256);\n\n let postdiv = if presc == 1 {\n\n ratio\n\n } else {\n\n div_roundup(ratio, presc)\n\n };\n\n\n", "file_path": "embassy-rp/src/spi.rs", "rank": 53, "score": 145489.29030862192 }, { "content": "/// Initialize embassy.\n\npub fn init(config: Config) -> Peripherals {\n\n let p = Peripherals::take();\n\n\n\n unsafe {\n\n if config.enable_debug_during_sleep {\n\n dbgmcu::Dbgmcu::enable_all();\n\n }\n\n\n\n gpio::init();\n\n dma::init();\n\n #[cfg(exti)]\n\n exti::init();\n\n\n\n rcc::init(config.rcc);\n\n\n\n // must be after rcc init\n\n #[cfg(feature = \"_time-driver\")]\n\n time_driver::init();\n\n }\n\n\n\n p\n\n}\n", "file_path": "embassy-stm32/src/lib.rs", "rank": 54, "score": 142429.30636472057 }, { "content": "pub fn init(_config: config::Config) -> Peripherals {\n\n // Do this first, so that it panics if user is calling `init` a second time\n\n // before doing anything important.\n\n let peripherals = Peripherals::take();\n\n\n\n unsafe {\n\n clocks::init();\n\n timer::init();\n\n }\n\n\n\n peripherals\n\n}\n", "file_path": "embassy-rp/src/lib.rs", "rank": 55, "score": 137255.1922512891 }, { "content": "pub fn init(config: config::Config) -> Peripherals {\n\n // Do this first, so that it panics if user is calling `init` a second time\n\n // before doing anything important.\n\n let peripherals = Peripherals::take();\n\n\n\n let r = unsafe { &*pac::CLOCK::ptr() };\n\n\n\n // Start HFCLK.\n\n match config.hfclk_source {\n\n config::HfclkSource::Internal => {}\n\n config::HfclkSource::ExternalXtal => {\n\n // Datasheet says this is likely to take 0.36ms\n\n r.events_hfclkstarted.write(|w| unsafe { w.bits(0) });\n\n r.tasks_hfclkstart.write(|w| unsafe { w.bits(1) });\n\n while r.events_hfclkstarted.read().bits() == 0 {}\n\n }\n\n }\n\n\n\n // Configure LFCLK.\n\n #[cfg(not(any(feature = \"_nrf5340\", feature = \"_nrf9160\")))]\n", "file_path": "embassy-nrf/src/lib.rs", "rank": 56, "score": 137255.1922512891 }, { "content": "#[proc_macro]\n\npub fn interrupt_take(item: TokenStream) -> TokenStream {\n\n let name = syn::parse_macro_input!(item as syn::Ident);\n\n let name = format!(\"{}\", name);\n\n let name_interrupt = format_ident!(\"{}\", name);\n\n let name_handler = format!(\"__EMBASSY_{}_HANDLER\", name);\n\n\n\n let result = quote! {\n\n {\n\n #[allow(non_snake_case)]\n\n #[export_name = #name]\n\n pub unsafe extern \"C\" fn trampoline() {\n\n extern \"C\" {\n\n #[link_name = #name_handler]\n\n static HANDLER: ::embassy::interrupt::Handler;\n\n }\n\n\n\n let func = HANDLER.func.load(::embassy::export::atomic::Ordering::Relaxed);\n\n let ctx = HANDLER.ctx.load(::embassy::export::atomic::Ordering::Relaxed);\n\n let func: fn(*mut ()) = ::core::mem::transmute(func);\n\n func(ctx)\n", "file_path": "embassy-macros/src/lib.rs", "rank": 57, "score": 136355.37924981696 }, { "content": "#[proc_macro]\n\npub fn interrupt_declare(item: TokenStream) -> TokenStream {\n\n let name = syn::parse_macro_input!(item as syn::Ident);\n\n let name = format_ident!(\"{}\", name);\n\n let name_interrupt = format_ident!(\"{}\", name);\n\n let name_handler = format!(\"__EMBASSY_{}_HANDLER\", name);\n\n\n\n let result = quote! {\n\n #[allow(non_camel_case_types)]\n\n pub struct #name_interrupt(());\n\n unsafe impl ::embassy::interrupt::Interrupt for #name_interrupt {\n\n type Priority = crate::interrupt::Priority;\n\n fn number(&self) -> u16 {\n\n use cortex_m::interrupt::InterruptNumber;\n\n let irq = InterruptEnum::#name;\n\n irq.number() as u16\n\n }\n\n unsafe fn steal() -> Self {\n\n Self(())\n\n }\n\n unsafe fn __handler(&self) -> &'static ::embassy::interrupt::Handler {\n", "file_path": "embassy-macros/src/lib.rs", "rank": 58, "score": 136355.37924981696 }, { "content": "fn set_ipv4_addr(iface: &mut Interface, cidr: Ipv4Cidr) {\n\n iface.update_ip_addrs(|addrs| {\n\n let dest = addrs.iter_mut().next().unwrap();\n\n *dest = IpCidr::Ipv4(cidr);\n\n });\n\n}\n\n\n", "file_path": "embassy-net/src/stack.rs", "rank": 59, "score": 136228.0348229737 }, { "content": "/// An input that can be used as either or negative end of a ADC differential in the SAADC periperhal.\n\npub trait Input: sealed::Input + Unborrow<Target = Self> {}\n\n\n\nmacro_rules! impl_saadc_input {\n\n ($pin:ident, $ch:ident) => {\n\n impl crate::saadc::sealed::Input for crate::peripherals::$pin {\n\n fn channel(&self) -> crate::saadc::InputChannel {\n\n crate::saadc::InputChannel::$ch\n\n }\n\n }\n\n impl crate::saadc::Input for crate::peripherals::$pin {}\n\n };\n\n}\n", "file_path": "embassy-nrf/src/saadc.rs", "rank": 60, "score": 133975.79324837055 }, { "content": "pub trait Pin: Unborrow<Target = Self> + sealed::Pin {\n\n /// Degrade to a generic pin struct\n\n fn degrade(self) -> AnyPin {\n\n AnyPin {\n\n pin_bank: self.pin_bank(),\n\n }\n\n }\n\n}\n\n\n\npub struct AnyPin {\n\n pin_bank: u8,\n\n}\n\nunsafe_impl_unborrow!(AnyPin);\n\nimpl Pin for AnyPin {}\n\nimpl sealed::Pin for AnyPin {\n\n fn pin_bank(&self) -> u8 {\n\n self.pin_bank\n\n }\n\n}\n\n\n\n// ==========================\n\n\n", "file_path": "embassy-rp/src/gpio.rs", "rank": 61, "score": 133970.7050043666 }, { "content": "enum State<T> {\n\n None,\n\n Waiting(Waker),\n\n Signaled(T),\n\n}\n\n\n\nunsafe impl<T: Send> Send for Signal<T> {}\n\nunsafe impl<T: Send> Sync for Signal<T> {}\n\n\n\nimpl<T: Send> Signal<T> {\n\n pub const fn new() -> Self {\n\n Self {\n\n state: UnsafeCell::new(State::None),\n\n }\n\n }\n\n\n\n /// Mark this Signal as completed.\n\n pub fn signal(&self, val: T) {\n\n critical_section::with(|_| unsafe {\n\n let state = &mut *self.state.get();\n", "file_path": "embassy/src/channel/signal.rs", "rank": 62, "score": 132027.73978916896 }, { "content": "/// Get the list of all the chips and their supported cores\n\n///\n\n/// Print errors to `stderr` when something is returned by the glob but is not in the returned\n\n/// [`Vec`]\n\n///\n\n/// This function is slow because all the yaml files are parsed.\n\npub fn chip_names_and_cores() -> Vec<(String, Vec<String>)> {\n\n glob::glob(\"../stm32-data/data/chips/*.yaml\")\n\n .unwrap()\n\n .filter_map(|entry| entry.map_err(|e| eprintln!(\"{:?}\", e)).ok())\n\n .filter_map(|entry| {\n\n if let Some(name) = entry.file_stem().and_then(|stem| stem.to_str()) {\n\n Some((name.to_lowercase(), chip_cores(&entry)))\n\n } else {\n\n eprintln!(\"{:?} is not a regular file\", entry);\n\n None\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "stm32-gen-features/src/lib.rs", "rank": 63, "score": 131463.77368521562 }, { "content": "#[repr(C)]\n\nstruct RDes {\n\n rdes0: VolatileCell<u32>,\n\n rdes1: VolatileCell<u32>,\n\n rdes2: VolatileCell<u32>,\n\n rdes3: VolatileCell<u32>,\n\n}\n\n\n\nimpl RDes {\n\n pub const fn new() -> Self {\n\n Self {\n\n rdes0: VolatileCell::new(0),\n\n rdes1: VolatileCell::new(0),\n\n rdes2: VolatileCell::new(0),\n\n rdes3: VolatileCell::new(0),\n\n }\n\n }\n\n\n\n /// Return true if this RDes is acceptable to us\n\n #[inline(always)]\n\n pub fn valid(&self) -> bool {\n", "file_path": "embassy-stm32/src/eth/v2/descriptors.rs", "rank": 64, "score": 129721.60497701261 }, { "content": "pub trait Instance: sealed::Instance {}\n\n\n\nmacro_rules! impl_instance {\n\n ($type:ident, $irq:ident) => {\n\n impl sealed::Instance for peripherals::$type {\n\n fn regs(&self) -> pac::uart::Uart {\n\n pac::$type\n\n }\n\n }\n\n impl Instance for peripherals::$type {}\n\n };\n\n}\n\n\n\nimpl_instance!(UART0, UART0);\n\nimpl_instance!(UART1, UART1);\n\n\n", "file_path": "embassy-rp/src/uart.rs", "rank": 65, "score": 129331.15966412514 }, { "content": "pub trait Instance: sealed::Instance {}\n\n\n\nmacro_rules! impl_instance {\n\n ($type:ident, $irq:ident) => {\n\n impl sealed::Instance for peripherals::$type {\n\n fn regs(&self) -> pac::spi::Spi {\n\n pac::$type\n\n }\n\n }\n\n impl Instance for peripherals::$type {}\n\n };\n\n}\n\n\n\nimpl_instance!(SPI0, Spi0);\n\nimpl_instance!(SPI1, Spi1);\n\n\n", "file_path": "embassy-rp/src/spi.rs", "rank": 66, "score": 129331.15966412514 }, { "content": "struct ChannelState<T, const N: usize> {\n\n queue: Deque<T, N>,\n\n closed: bool,\n\n receiver_registered: bool,\n\n senders_registered: u32,\n\n receiver_waker: WakerRegistration,\n\n senders_waker: WakerRegistration,\n\n}\n\n\n\nimpl<T, const N: usize> ChannelState<T, N> {\n\n const fn new() -> Self {\n\n ChannelState {\n\n queue: Deque::new(),\n\n closed: false,\n\n receiver_registered: false,\n\n senders_registered: 0,\n\n receiver_waker: WakerRegistration::new(),\n\n senders_waker: WakerRegistration::new(),\n\n }\n\n }\n", "file_path": "embassy/src/channel/mpsc.rs", "rank": 67, "score": 128586.38354606429 }, { "content": "pub trait ExtendedInstance: Instance + sealed::ExtendedInstance {}\n\n\n\nmacro_rules! impl_timer {\n\n ($type:ident, $pac_type:ident, $irq:ident, $ccs:literal) => {\n\n impl crate::timer::sealed::Instance for peripherals::$type {\n\n const CCS: usize = $ccs;\n\n fn regs() -> &'static pac::timer0::RegisterBlock {\n\n unsafe { &*(pac::$pac_type::ptr() as *const pac::timer0::RegisterBlock) }\n\n }\n\n fn waker(n: usize) -> &'static ::embassy::waitqueue::AtomicWaker {\n\n use ::embassy::waitqueue::AtomicWaker;\n\n const NEW_AW: AtomicWaker = AtomicWaker::new();\n\n static WAKERS: [AtomicWaker; $ccs] = [NEW_AW; $ccs];\n\n &WAKERS[n]\n\n }\n\n }\n\n impl crate::timer::Instance for peripherals::$type {\n\n type Interrupt = crate::interrupt::$irq;\n\n }\n\n };\n", "file_path": "embassy-nrf/src/timer.rs", "rank": 68, "score": 128058.0510204857 }, { "content": "#[repr(C)]\n\nstruct RDes {\n\n rdes0: VolatileCell<u32>,\n\n rdes1: VolatileCell<u32>,\n\n rdes2: VolatileCell<u32>,\n\n rdes3: VolatileCell<u32>,\n\n}\n\n\n\nimpl RDes {\n\n pub const fn new() -> Self {\n\n Self {\n\n rdes0: VolatileCell::new(0),\n\n rdes1: VolatileCell::new(0),\n\n rdes2: VolatileCell::new(0),\n\n rdes3: VolatileCell::new(0),\n\n }\n\n }\n\n\n\n /// Return true if this RDes is acceptable to us\n\n #[inline(always)]\n\n pub fn valid(&self) -> bool {\n", "file_path": "embassy-stm32/src/eth/v1c/rx_desc.rs", "rank": 69, "score": 127469.18464834368 }, { "content": "#[cfg(feature = \"wasm\")]\n\n#[proc_macro_attribute]\n\npub fn main(args: TokenStream, item: TokenStream) -> TokenStream {\n\n let macro_args = syn::parse_macro_input!(args as syn::AttributeArgs);\n\n let task_fn = syn::parse_macro_input!(item as syn::ItemFn);\n\n\n\n let macro_args = match MainArgs::from_list(&macro_args) {\n\n Ok(v) => v,\n\n Err(e) => {\n\n return TokenStream::from(e.write_errors());\n\n }\n\n };\n\n\n\n let embassy_path = macro_args.embassy_prefix.append(\"embassy\");\n\n\n\n let mut fail = false;\n\n if task_fn.sig.asyncness.is_none() {\n\n task_fn\n\n .sig\n\n .span()\n\n .unwrap()\n\n .error(\"task functions must be async\")\n", "file_path": "embassy-macros/src/lib.rs", "rank": 70, "score": 126973.05881123553 }, { "content": "#[proc_macro_attribute]\n\npub fn interrupt(args: TokenStream, input: TokenStream) -> TokenStream {\n\n let mut f: ItemFn = syn::parse(input).expect(\"`#[interrupt]` must be applied to a function\");\n\n\n\n if !args.is_empty() {\n\n return parse::Error::new(Span::call_site(), \"This attribute accepts no arguments\")\n\n .to_compile_error()\n\n .into();\n\n }\n\n\n\n let fspan = f.span();\n\n let ident = f.sig.ident.clone();\n\n let ident_s = ident.to_string();\n\n\n\n // XXX should we blacklist other attributes?\n\n\n\n let valid_signature = f.sig.constness.is_none()\n\n && f.vis == Visibility::Inherited\n\n && f.sig.abi.is_none()\n\n && f.sig.inputs.is_empty()\n\n && f.sig.generics.params.is_empty()\n", "file_path": "embassy-macros/src/lib.rs", "rank": 71, "score": 126973.05881123553 }, { "content": "#[proc_macro_attribute]\n\npub fn task(args: TokenStream, item: TokenStream) -> TokenStream {\n\n let macro_args = syn::parse_macro_input!(args as syn::AttributeArgs);\n\n let mut task_fn = syn::parse_macro_input!(item as syn::ItemFn);\n\n\n\n let macro_args = match TaskArgs::from_list(&macro_args) {\n\n Ok(v) => v,\n\n Err(e) => {\n\n return TokenStream::from(e.write_errors());\n\n }\n\n };\n\n\n\n let embassy_prefix = macro_args.embassy_prefix.append(\"embassy\");\n\n let embassy_path = embassy_prefix.path();\n\n\n\n let pool_size: usize = macro_args.pool_size.unwrap_or(1);\n\n\n\n let mut fail = false;\n\n if task_fn.sig.asyncness.is_none() {\n\n task_fn\n\n .sig\n", "file_path": "embassy-macros/src/lib.rs", "rank": 72, "score": 126973.05881123553 }, { "content": "pub trait OptionalPin: Unborrow<Target = Self> + sealed::OptionalPin + Sized {\n\n type Pin: Pin;\n\n fn pin(&self) -> Option<&Self::Pin>;\n\n fn pin_mut(&mut self) -> Option<&mut Self::Pin>;\n\n\n\n #[inline]\n\n fn psel_bits(&self) -> u32 {\n\n self.pin().map_or(1u32 << 31, Pin::psel_bits)\n\n }\n\n\n\n /// Convert from concrete pin type PX_XX to type erased `Option<AnyPin>`.\n\n #[inline]\n\n fn degrade_optional(mut self) -> Option<AnyPin> {\n\n self.pin_mut()\n\n .map(|pin| unsafe { core::ptr::read(pin) }.degrade())\n\n }\n\n}\n\n\n\nimpl<T: Pin> sealed::OptionalPin for T {}\n\nimpl<T: Pin> OptionalPin for T {\n", "file_path": "embassy-nrf/src/gpio.rs", "rank": 73, "score": 125905.59615157482 }, { "content": "pub trait OptionalPin: Unborrow<Target = Self> + sealed::OptionalPin + Sized {\n\n type Pin: Pin;\n\n fn pin(&self) -> Option<&Self::Pin>;\n\n fn pin_mut(&mut self) -> Option<&mut Self::Pin>;\n\n\n\n /// Convert from concrete pin type PIN_XX to type erased `Option<AnyPin>`.\n\n #[inline]\n\n fn degrade_optional(mut self) -> Option<AnyPin> {\n\n self.pin_mut()\n\n .map(|pin| unsafe { core::ptr::read(pin) }.degrade())\n\n }\n\n}\n\n\n\nimpl<T: Pin> sealed::OptionalPin for T {}\n\nimpl<T: Pin> OptionalPin for T {\n\n type Pin = T;\n\n\n\n #[inline]\n\n fn pin(&self) -> Option<&T> {\n\n Some(self)\n", "file_path": "embassy-rp/src/gpio.rs", "rank": 74, "score": 125905.59615157482 }, { "content": " pub trait Instance {\n\n fn regs() -> &'static pac::uarte0::RegisterBlock;\n\n fn state() -> &'static State;\n\n }\n\n}\n\n\n", "file_path": "embassy-nrf/src/uarte.rs", "rank": 75, "score": 125874.16224537061 }, { "content": " pub trait Instance {\n\n fn regs() -> pac::rng::Rng;\n\n }\n\n}\n\n\n", "file_path": "embassy-stm32/src/rng.rs", "rank": 76, "score": 125874.16224537061 }, { "content": " pub trait Instance {\n\n fn regs() -> &'static pac::qspi::RegisterBlock;\n\n fn state() -> &'static State;\n\n }\n\n}\n\n\n", "file_path": "embassy-nrf/src/qspi.rs", "rank": 77, "score": 125874.16224537061 }, { "content": " pub trait Instance {\n\n fn regs(&self) -> pac::spi::Spi;\n\n }\n", "file_path": "embassy-rp/src/spi.rs", "rank": 78, "score": 125874.16224537061 }, { "content": " pub trait Instance {\n\n fn regs(&self) -> pac::uart::Uart;\n\n }\n", "file_path": "embassy-rp/src/uart.rs", "rank": 79, "score": 125874.16224537061 }, { "content": " pub trait Instance {\n\n /// The number of CC registers this instance has.\n\n const CCS: usize;\n\n fn regs() -> &'static pac::timer0::RegisterBlock;\n\n /// Storage for the waker for CC register `n`.\n\n fn waker(n: usize) -> &'static AtomicWaker;\n\n }\n", "file_path": "embassy-nrf/src/timer.rs", "rank": 80, "score": 125874.16224537061 }, { "content": " pub trait Instance {\n\n fn regs() -> &'static pac::spim0::RegisterBlock;\n\n fn state() -> &'static State;\n\n }\n\n}\n\n\n", "file_path": "embassy-nrf/src/spim.rs", "rank": 81, "score": 125874.16224537061 }, { "content": " pub trait Instance {\n\n fn regs() -> &'static pac::twim0::RegisterBlock;\n\n fn state() -> &'static State;\n\n }\n\n}\n\n\n", "file_path": "embassy-nrf/src/twim.rs", "rank": 82, "score": 125874.16224537061 }, { "content": "fn make_peripheral_counts(out: &mut String, data: &BTreeMap<String, u8>) {\n\n write!(\n\n out,\n\n \"#[macro_export]\n\nmacro_rules! peripheral_count {{\n\n \"\n\n )\n\n .unwrap();\n\n for (name, count) in data {\n\n write!(out, \"({}) => ({});\\n\", name, count,).unwrap();\n\n }\n\n write!(out, \" }}\\n\").unwrap();\n\n}\n\n\n", "file_path": "stm32-metapac-gen/src/lib.rs", "rank": 83, "score": 125529.10878679075 }, { "content": "pub trait Pin: Unborrow<Target = Self> + sealed::Pin + Sized + 'static {\n\n /// Number of the pin within the port (0..31)\n\n #[inline]\n\n fn pin(&self) -> u8 {\n\n self._pin()\n\n }\n\n\n\n /// Port of the pin\n\n #[inline]\n\n fn port(&self) -> Port {\n\n match self.pin_port() / 32 {\n\n 0 => Port::Port0,\n\n #[cfg(feature = \"_gpio-p1\")]\n\n 1 => Port::Port1,\n\n _ => unsafe { unreachable_unchecked() },\n\n }\n\n }\n\n\n\n #[inline]\n\n fn psel_bits(&self) -> u32 {\n", "file_path": "embassy-nrf/src/gpio.rs", "rank": 84, "score": 125386.30927945465 }, { "content": "pub trait Instance: sealed::Instance + 'static {}\n\n\n", "file_path": "embassy-stm32/src/dac/mod.rs", "rank": 85, "score": 123741.2897451272 }, { "content": "pub trait Instance: sealed::Instance + 'static {\n\n type Interrupt: Interrupt;\n\n}\n\n\n", "file_path": "embassy-stm32/src/i2c/mod.rs", "rank": 86, "score": 123741.2897451272 }, { "content": "pub trait Instance: sealed::Instance + 'static {}\n", "file_path": "embassy-stm32/src/sdmmc/v2.rs", "rank": 87, "score": 123741.2897451272 }, { "content": "pub trait Instance: sealed::Instance + 'static {}\n", "file_path": "embassy-stm32/src/adc/mod.rs", "rank": 88, "score": 123741.2897451272 }, { "content": "/// Generate data needed in `../stm32-metapac/Cargo.toml`\n\n///\n\n/// Print errors to `stderr` when something is returned by the glob but is not in the returned\n\n/// [`Vec`]\n\n///\n\n/// # Panic\n\n/// Panics if a file contains yaml syntax errors or if a value does not have a consistent type\n\npub fn stm32_metapac_needed_data(names_and_cores: &[(String, Vec<String>)]) -> String {\n\n let mut result = String::new();\n\n for (chip_name, cores) in names_and_cores {\n\n if cores.len() > 1 {\n\n for core_name in cores {\n\n result += &format!(\"{}-{} = []\\n\", chip_name, core_name);\n\n }\n\n } else {\n\n result += &format!(\"{} = []\\n\", chip_name);\n\n }\n\n }\n\n result\n\n}\n\n\n", "file_path": "stm32-gen-features/src/lib.rs", "rank": 89, "score": 123679.2798290377 }, { "content": "/// Generate data needed in `../embassy-stm32/Cargo.toml`\n\n///\n\n/// Print errors to `stderr` when something is returned by the glob but is not in the returned\n\n/// [`Vec`]\n\n///\n\n/// # Panic\n\n/// Panics if a file contains yaml syntax errors or if a value does not have a consistent type\n\npub fn embassy_stm32_needed_data(names_and_cores: &[(String, Vec<String>)]) -> String {\n\n let mut result = String::new();\n\n for (chip_name, cores) in names_and_cores.supported() {\n\n if cores.len() > 1 {\n\n for core_name in cores.iter() {\n\n result += &format!(\n\n \"{chip}-{core} = [ \\\"stm32-metapac/{chip}-{core}\\\" ]\\n\",\n\n chip = chip_name,\n\n core = core_name\n\n );\n\n }\n\n } else {\n\n result += &format!(\"{chip} = [ \\\"stm32-metapac/{chip}\\\" ]\\n\", chip = chip_name);\n\n }\n\n }\n\n result\n\n}\n\n\n", "file_path": "stm32-gen-features/src/lib.rs", "rank": 90, "score": 123679.2798290377 }, { "content": " pub trait Instance {\n\n fn regs() -> &'static crate::pac::adc::Adc;\n\n fn common_regs() -> &'static crate::pac::adccommon::AdcCommon;\n\n }\n\n\n", "file_path": "embassy-stm32/src/adc/mod.rs", "rank": 91, "score": 123397.83277701025 }, { "content": " pub trait Instance {\n\n fn regs() -> &'static crate::pac::can::Can;\n\n }\n\n\n", "file_path": "embassy-stm32/src/can/bxcan.rs", "rank": 92, "score": 123397.83277701025 }, { "content": " pub trait Instance {\n\n type Interrupt: Interrupt;\n\n\n\n fn inner() -> SdmmcInner;\n\n fn state() -> &'static AtomicWaker;\n\n }\n", "file_path": "embassy-stm32/src/sdmmc/v2.rs", "rank": 93, "score": 123397.83277701025 }, { "content": " pub trait Instance {\n\n fn regs() -> &'static crate::pac::spi::Spi;\n\n }\n\n\n", "file_path": "embassy-stm32/src/spi/mod.rs", "rank": 94, "score": 123397.83277701025 }, { "content": " pub trait ExtendedInstance {}\n\n\n", "file_path": "embassy-nrf/src/timer.rs", "rank": 95, "score": 123397.83277701025 }, { "content": " T::regs().cr().modify(|reg| {\n\n reg.set_en2(true);\n\n });\n\n }\n\n }\n\n\n\n Self {\n\n ch1,\n\n ch2,\n\n phantom: PhantomData,\n\n }\n\n }\n\n\n\n pub fn enable_channel(&mut self, ch: Channel) -> Result<(), Error> {\n\n match ch {\n\n Channel::Ch1 => {\n\n if self.ch1.is_none() {\n\n Err(Error::UnconfiguredChannel)\n\n } else {\n\n unsafe {\n", "file_path": "embassy-stm32/src/dac/v2.rs", "rank": 99, "score": 50.74795175170573 } ]
Rust
src/lib.rs
JerTH/elfy
2a761bcc4de002b72a538eae3e5ec1d5cc0f3ba8
#![warn(missing_docs)] use std::error::Error; use std::fmt::{ Display, Formatter }; use std::io::{ Read, Seek, SeekFrom }; use std::collections::HashMap; #[macro_use] mod macros; pub mod types; pub mod numeric; pub mod constants; use crate::types::*; pub type ParseElfResult<T> = std::result::Result<T, ParseElfError>; #[derive(Debug)] pub enum ParseElfError { IoError{ inner: std::io::Error }, #[allow(missing_docs)] InvalidSectionType(u32), #[allow(missing_docs)] InvalidProgramFlags(u32), #[allow(missing_docs)] InvalidProgramHeader(u32), #[allow(missing_docs)] InvalidVersion(u32), #[allow(missing_docs)] InvalidMachine(u16), #[allow(missing_docs)] InvalidElfType(u16), #[allow(missing_docs)] InvalidOsAbi(u8), #[allow(missing_docs)] InvalidIdentVersion(u8), #[allow(missing_docs)] InvalidDataFormat(u8), #[allow(missing_docs)] InvalidDataClass(u8), InvalidParsingDescriptor, } impl Error for ParseElfError { fn source(&self) -> Option<&(dyn Error + 'static)> { match self { ParseElfError::IoError{ inner } => Some(inner), _ => None, } } } impl Display for ParseElfError { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { write!(f, "{:?}", self) } } impl From<std::io::Error> for ParseElfError { fn from(err: std::io::Error) -> ParseElfError { ParseElfError::IoError{ inner: err } } } trait Parslet { fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> where Self: Sized; } enum Descriptor { None, Data{ format: DataFormat, class: DataClass }, } impl Descriptor { fn data_class(&self) -> ParseElfResult<DataClass> { match self { Descriptor::Data{ class, .. } => Ok(*class), Descriptor::None => Err(ParseElfError::InvalidParsingDescriptor), } } fn data_format(&self) -> ParseElfResult<DataFormat> { match self { Descriptor::Data{ format, .. } => Ok(*format), Descriptor::None => Err(ParseElfError::InvalidParsingDescriptor) } } } #[derive(Debug)] pub struct Elf { header: ElfHeader, sections: Vec<Section>, segments: Vec<Segment>, section_map: HashMap<String, usize>, } impl Elf { pub fn load<P: AsRef<std::path::Path>>(path: P) -> ParseElfResult<Elf> { let file = std::fs::File::open(path)?; let mut buf = std::io::BufReader::new(file); Ok(Elf::parse(&mut buf)?) } pub fn parse<R: Read + Seek>(reader: &mut R) -> ParseElfResult<Elf> { let mut descriptor = Descriptor::None; let header = ElfHeader::parse(reader, &mut descriptor)?; let sections = parse_sections(reader, &mut descriptor, &header)?; let segments = parse_segments(reader, &mut descriptor, &header)?; let mut section_map = HashMap::new(); associate_string_table(&mut section_map, &sections, &header); Ok(Elf{ header, sections, segments, section_map }) } pub fn try_get_section(&self, section_name: &str) -> Option<&Section> { self.sections.get(*self.section_map.get(section_name)?) } pub fn header(&self) -> &ElfHeader { &self.header } pub fn sections(&self) -> SectionIter { SectionIter { elf: &self, idx: 0 } } pub fn segments(&self) -> SegmentIter { SegmentIter { elf: &self, idx: 0 } } } fn parse_sections<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor, header: &ElfHeader) -> ParseElfResult<Vec<Section>> { reader.seek(SeekFrom::Start(header.section_headers_offset()))?; let mut sections = Vec::new(); for _ in 0..header.section_header_count() { sections.push(Section::parse(reader, descriptor)?) } Ok(sections) } fn parse_segments<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor, header: &ElfHeader) -> ParseElfResult<Vec<Segment>> { reader.seek(SeekFrom::Start(header.program_headers_offset()))?; let mut segments = Vec::new(); for _ in 0..header.program_header_count() { segments.push(Segment::parse(reader, descriptor)?) } Ok(segments) } fn associate_string_table(section_map: &mut HashMap<String, usize>, sections: &[Section], header: &ElfHeader) { if let Some(idx) = header.section_name_table_index() { if let SectionData::Strings(table) = &sections[idx].data() { for (i, _section) in sections.iter().enumerate() { let name = table[i].clone(); section_map.insert(name, i); } } } } pub struct SectionIter<'a> { elf: &'a Elf, idx: usize, } impl<'a> Iterator for SectionIter<'a> { type Item = &'a Section; fn next(&mut self) -> Option<Self::Item> { let item = self.elf.sections.get(self.idx)?; self.idx += 1; Some(item) } } pub struct SegmentIter<'a> { elf: &'a Elf, idx: usize, } impl<'a> Iterator for SegmentIter<'a> { type Item = &'a Segment; fn next(&mut self) -> Option<Self::Item> { let item = self.elf.segments.get(self.idx)?; self.idx += 1; Some(item) } } pub mod prelude { pub use crate::numeric::*; pub use crate::types::*; pub use crate::Elf; } #[cfg(test)] mod test { use super::*; fn _load_example_binary() -> Elf { let elf = Elf::load("examples/example-binary").unwrap(); elf } #[test] fn get_section_bytes() { let elf = _load_example_binary(); let text = elf.try_get_section(".text").unwrap(); if let SectionData::Bytes(_bytes) = text.data() { } } #[test] fn section_iters() { let elf = _load_example_binary(); for (i, s) in elf.sections().enumerate() { match i { 0 => assert_eq!(s.header().section_type(), SectionType::Null), 2 => assert_eq!(s.header().section_type(), SectionType::ProgramData), 5 => assert_eq!(s.header().section_type(), SectionType::SymbolTable), 6 => assert_eq!(s.header().section_type(), SectionType::StringTable), _ => continue } } } #[test] fn segment_iters() { let elf = _load_example_binary(); println!("{:#?}", elf); for (i, h) in elf.segments().enumerate() { match i { 0 => assert_eq!(h.header().program_header_type(), ProgramHeaderType::Phdr), 1 => assert_eq!(h.header().program_header_type(), ProgramHeaderType::Loadable), 2 => assert_eq!(h.header().program_header_type(), ProgramHeaderType::Loadable), 3 => assert_eq!(h.header().program_header_type(), ProgramHeaderType::GnuStack), 4 => assert_eq!(h.header().program_header_type(), ProgramHeaderType::ArmExidx), _ => continue } } } }
#![warn(missing_docs)] use std::error::Error; use std::fmt::{ Display, Formatter }; use std::io::{ Read, Seek, SeekFrom }; use std::collections::HashMap; #[macro_use] mod macros; pub mod types; pub mod numeric; pub mod constants; use crate::types::*; pub type ParseElfResult<T> = std::result::Result<T, ParseElfError>; #[derive(Debug)] pub enum ParseElfError { IoError{ inner: std::io::Error }, #[allow(missing_docs)] InvalidSectionType(u32), #[allow(missing_docs)] InvalidProgramFlags(u32), #[allow(missing_docs)] InvalidProgramHeader(u32), #[allow(missing_docs)] InvalidVersion(u32), #[allow(missing_docs)] InvalidMachine(u16), #[allow(missing_docs)] InvalidElfType(u16), #[allow(missing_docs)] InvalidOsAbi(u8), #[allow(missing_docs)] InvalidIdentVersion(u8), #[allow(missing_docs)] InvalidDataFormat(u8), #[allow(missing_docs)] InvalidDataClass(u8), InvalidParsingDescriptor, } impl Error for ParseElfError { fn source(&self) -> Option<&(dyn Error + 'static)> { match self { ParseElfError::IoError{ inner } => Some(inner), _ => None, } } } impl Display for ParseElfError { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { write!(f, "{:?}", self) } } impl From<std::io::Error> for ParseElfError { fn from(err: std::io::Error) -> ParseElfError { ParseElfError::IoError{ inner: err } } } trait Parslet { fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> where Self: Sized; } enum Descriptor { None, Data{ format: DataFormat, class: DataClass }, } impl Descriptor { fn data_class(&self) -> ParseElfResult<DataClass> { match self { Descriptor::Data{ class, .. } => Ok(*class), Descriptor::None => Err(ParseElfError::InvalidParsingDescriptor), } } fn data_format(&self) -> ParseElfResult<DataFormat> { match self { Descriptor::Data{ format, .. } => Ok(*format), Descriptor::None => Err(ParseElfError::InvalidParsingDescriptor) } } } #[derive(Debug)] pub struct Elf { header: ElfHeader, sections: Vec<Section>, segments: Vec<Segment>, section_map: HashMap<String, usize>, } impl Elf { pub fn load<P: AsRef<std::path::Path>>(path: P) -> ParseElfResult<Elf> { let file = std::fs::File::open(path)?; let mut buf = std::io::BufReader::new(file); Ok(Elf::parse(&mut buf)?) } pub fn parse<R: Read + Seek>(reader: &mut R) -> ParseElfResult<Elf> { let mut descriptor = Descriptor::None; let header = ElfHeader::parse(reader, &mut descriptor)?; let sections = parse_sections(reader, &mut descriptor, &header)?; let segments = parse_segments(reader, &mut descriptor, &header)?; let mut section_map = HashMap::new(); associate_string_table(&mut section_map, &sections, &header); Ok(Elf{ header, sections, segments, section_map }) } pub fn try_get_section(&self, section_name: &str) -> Option<&Section> { self.sections.get(*self.section_map.get(section_name)?) } pub fn header(&self) -> &ElfHeader { &self.header } pub fn sections(&self) -> SectionIter { SectionIter { elf: &self, idx: 0 } } pub fn segments(&self) -> SegmentIter { SegmentIter { elf: &self, idx: 0 } } } fn parse_sections<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor, header: &ElfHeader) -> ParseElfResult<Vec<Section>> { reader.seek(SeekFrom::Start(header.section_headers_offset()))?; let mut sections = Vec::new(); for _ in 0..header.section_header_count() { sections.push(Section::pa
0 => assert_eq!(h.header().program_header_type(), ProgramHeaderType::Phdr), 1 => assert_eq!(h.header().program_header_type(), ProgramHeaderType::Loadable), 2 => assert_eq!(h.header().program_header_type(), ProgramHeaderType::Loadable), 3 => assert_eq!(h.header().program_header_type(), ProgramHeaderType::GnuStack), 4 => assert_eq!(h.header().program_header_type(), ProgramHeaderType::ArmExidx), _ => continue } } } }
rse(reader, descriptor)?) } Ok(sections) } fn parse_segments<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor, header: &ElfHeader) -> ParseElfResult<Vec<Segment>> { reader.seek(SeekFrom::Start(header.program_headers_offset()))?; let mut segments = Vec::new(); for _ in 0..header.program_header_count() { segments.push(Segment::parse(reader, descriptor)?) } Ok(segments) } fn associate_string_table(section_map: &mut HashMap<String, usize>, sections: &[Section], header: &ElfHeader) { if let Some(idx) = header.section_name_table_index() { if let SectionData::Strings(table) = &sections[idx].data() { for (i, _section) in sections.iter().enumerate() { let name = table[i].clone(); section_map.insert(name, i); } } } } pub struct SectionIter<'a> { elf: &'a Elf, idx: usize, } impl<'a> Iterator for SectionIter<'a> { type Item = &'a Section; fn next(&mut self) -> Option<Self::Item> { let item = self.elf.sections.get(self.idx)?; self.idx += 1; Some(item) } } pub struct SegmentIter<'a> { elf: &'a Elf, idx: usize, } impl<'a> Iterator for SegmentIter<'a> { type Item = &'a Segment; fn next(&mut self) -> Option<Self::Item> { let item = self.elf.segments.get(self.idx)?; self.idx += 1; Some(item) } } pub mod prelude { pub use crate::numeric::*; pub use crate::types::*; pub use crate::Elf; } #[cfg(test)] mod test { use super::*; fn _load_example_binary() -> Elf { let elf = Elf::load("examples/example-binary").unwrap(); elf } #[test] fn get_section_bytes() { let elf = _load_example_binary(); let text = elf.try_get_section(".text").unwrap(); if let SectionData::Bytes(_bytes) = text.data() { } } #[test] fn section_iters() { let elf = _load_example_binary(); for (i, s) in elf.sections().enumerate() { match i { 0 => assert_eq!(s.header().section_type(), SectionType::Null), 2 => assert_eq!(s.header().section_type(), SectionType::ProgramData), 5 => assert_eq!(s.header().section_type(), SectionType::SymbolTable), 6 => assert_eq!(s.header().section_type(), SectionType::StringTable), _ => continue } } } #[test] fn segment_iters() { let elf = _load_example_binary(); println!("{:#?}", elf); for (i, h) in elf.segments().enumerate() { match i {
random
[ { "content": "}\n\n\n\nmacro_rules! read_u64 {\n\n ($reader:expr, $descriptor:expr) => {\n\n {\n\n let mut __bytes: [u8; 8] = [0; 8];\n\n let mut __temp: u64 = 0;\n\n $reader.read_exact(&mut __bytes)?;\n\n unsafe {\n\n __temp = std::mem::transmute::<[u8; 8], u64>(__bytes);\n\n\n\n if $descriptor.data_format()? == DataFormat::BigEndian {\n\n __temp = __temp.to_le();\n\n }\n\n }\n\n __temp\n\n }\n\n };\n\n}\n", "file_path": "src/macros.rs", "rank": 5, "score": 23643.246668882588 }, { "content": " }\n\n };\n\n}\n\n\n\nmacro_rules! read_u32 {\n\n ($reader:expr, $descriptor:expr) => {\n\n {\n\n let mut __bytes: [u8; 4] = [0; 4];\n\n let mut __temp: u32 = 0;\n\n $reader.read_exact(&mut __bytes)?;\n\n unsafe {\n\n __temp = std::mem::transmute::<[u8; 4], u32>(__bytes);\n\n\n\n if $descriptor.data_format()? == DataFormat::BigEndian {\n\n __temp = __temp.to_le();\n\n }\n\n }\n\n __temp\n\n }\n\n };\n", "file_path": "src/macros.rs", "rank": 6, "score": 23643.246668882588 }, { "content": " $reader.read_exact(&mut __bytes)?;\n\n __bytes[0]\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! read_u16 {\n\n ($reader:expr, $descriptor:expr) => {\n\n {\n\n let mut __bytes: [u8; 2] = [0; 2];\n\n let mut __temp: u16 = 0;\n\n $reader.read_exact(&mut __bytes)?;\n\n unsafe {\n\n __temp = std::mem::transmute::<[u8; 2], u16>(__bytes);\n\n\n\n if $descriptor.data_format()? == DataFormat::BigEndian {\n\n __temp = __temp.to_le();\n\n }\n\n }\n\n __temp\n", "file_path": "src/macros.rs", "rank": 7, "score": 23643.183292879003 }, { "content": "/**\n\n * Helper macros\n\n */\n\n\n\nmacro_rules! read_n_bytes {\n\n ($reader:expr, $num:expr) => {\n\n {\n\n let mut __bytes: Vec<u8> = Vec::with_capacity($num as usize);\n\n for byte in $reader.bytes().take($num) {\n\n __bytes.push(byte.unwrap());\n\n }\n\n __bytes\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! read_byte {\n\n ($reader:expr) => {\n\n {\n\n let mut __bytes: [u8; 1] = [0; 1];\n", "file_path": "src/macros.rs", "rank": 8, "score": 23638.571378870623 }, { "content": " pub const NONE: u16 = 0x0000;\n\n pub const ARM: u16 = 0x0028;\n\n pub const ATMELAVR: u16 = 0x0054;\n\n pub const AMD64: u16 = 0x003E;\n\n pub const ST200: u16 = 0x0064;\n\n pub const RISCV: u16 = 0x00F3;\n\n}\n\n\n\npub mod data_formats {\n\n pub const LITTLE_ENDIAN: u8 = 0x01;\n\n pub const BIG_ENDIAN: u8 = 0x02;\n\n}\n\n\n\npub mod data_classes {\n\n pub const ELF32: u8 = 0x01;\n\n pub const ELF64: u8 = 0x02;\n\n}\n\n\n\npub mod section_types {\n\n pub const NULL: u32 = 0x00;\n", "file_path": "src/constants.rs", "rank": 9, "score": 23579.138504581057 }, { "content": "//! Constants defined by the ELF standard and vendors\n\n//! \n\n//! The ELF standard uses many standard defined and vendor defined constants\n\n//! to describe the contents of an ELF file. These constants are listed\n\n//! here grouped in modules for clarity and correctness\n\n//! \n\n//! These constants should not be modified, as they are critical to the correct\n\n//! interpretation of ELF file data.\n\n\n\n#![allow(missing_docs)]\n\n\n\nuse crate::numeric::*;\n\n\n\npub const CURRENT_IDENT_VERSION: u8 = 0x01;\n\npub const CURRENT_ELF_VERSION: u32 = 0x01;\n\n\n\npub const MAGIC_BYTES: [u8; 4] = [0x7F, 0x45, 0x4C, 0x46];\n\npub const SHN_UNDEF: Short = Short(0);\n\n\n\npub mod machines {\n", "file_path": "src/constants.rs", "rank": 10, "score": 23577.90205036532 }, { "content": " pub const WRITE: u32 = 0b010;\n\n pub const EXEC: u32 = 0b001;\n\n pub const READ_WRITE: u32 = 0b110;\n\n pub const READ_EXEC: u32 = 0b101;\n\n pub const READ_WRITE_EXEC: u32 = 0b111;\n\n}\n\n\n\npub mod section_flags {\n\n pub const NONE: u64 = 0b000;\n\n pub const WRITE: u64 = 0b001;\n\n pub const ALLOC: u64 = 0b010;\n\n pub const EXEC: u64 = 0b100;\n\n pub const WRITE_ALLOC: u64 = 0b011;\n\n pub const WRITE_EXEC: u64 = 0b101;\n\n pub const ALLOC_EXEC: u64 = 0b110;\n\n pub const WRITE_ALLOC_EXEC: u64 = 0b111;\n\n}\n\n\n\npub mod elf_types {\n\n pub const NONE: u16 = 0x000;\n", "file_path": "src/constants.rs", "rank": 11, "score": 23576.853239108903 }, { "content": " pub const PROG_DATA: u32 = 0x01;\n\n pub const SYM_TABLE: u32 = 0x02;\n\n pub const STR_TABLE: u32 = 0x03;\n\n pub const REL_A: u32 = 0x04;\n\n pub const SYM_HASH: u32 = 0x05;\n\n pub const DYN_INFO: u32 = 0x06;\n\n pub const NOTE: u32 = 0x07;\n\n pub const NO_BITS: u32 = 0x08;\n\n pub const RELOCATION: u32 = 0x09;\n\n pub const SHLIB: u32 = 0x0A;\n\n pub const DYN_SYM_TAB: u32 = 0x0B;\n\n pub const INIT: u32 = 0x0E;\n\n pub const FINI: u32 = 0x0F;\n\n pub const PRE_INIT: u32 = 0x10;\n\n pub const GROUP: u32 = 0x11;\n\n pub const EXT_IDX: u32 = 0x12;\n\n}\n\n\n\npub mod program_flags {\n\n pub const READ: u32 = 0b100;\n", "file_path": "src/constants.rs", "rank": 12, "score": 23574.37820437256 }, { "content": " pub const RELOCATABLE: u16 = 0x001;\n\n pub const EXECUTABLE: u16 = 0x002;\n\n pub const SHARED: u16 = 0x003;\n\n pub const CORE: u16 = 0x004;\n\n pub const LO_PROC: u16 = 0xFF00;\n\n pub const HI_PROC: u16 = 0xFFFF;\n\n}\n\n\n\npub mod abi_versions {\n\n pub const UNSPECIFIED: u8 = 0x00;\n\n}\n\n\n\npub mod os_abis {\n\n pub const UNIX_SYSTEM_V: u8 = 0x00;\n\n}\n\n\n\npub mod processor_specific_header_types {\n\n pub const ARM_EXIDX: u32 = 0x7000_0001;\n\n}\n\n\n\npub mod os_specific_header_types {\n\n pub const GNU_STACK: u32 = 0x6474_E551;\n\n}\n", "file_path": "src/constants.rs", "rank": 13, "score": 23573.79284279037 }, { "content": " }\n\n\n\n /// Returns the contained value as a `u64`, zero extending it if necessary\n\n pub fn as_u64(&self) -> u64 {\n\n self.as_usize() as u64\n\n }\n\n}\n\n\n\nimpl Parslet for Size {\n\n fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> {\n\n match descriptor.data_class()? {\n\n DataClass::Elf32 => Ok(Size::Elf32Size(read_u32!(reader, descriptor))),\n\n DataClass::Elf64 => Ok(Size::Elf64Size(read_u64!(reader, descriptor))),\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for Size {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n", "file_path": "src/numeric.rs", "rank": 14, "score": 23396.06616253283 }, { "content": "//! Types describing various simple value types that may be found in an ELF file\n\n\n\nuse std::io::{ Read, Seek };\n\nuse std::convert::TryInto;\n\n\n\nuse crate::{ Parslet, ParseElfResult, Descriptor, DataClass, DataFormat };\n\n\n\n/// Represents a 16 bit half word in an ELF file\n\n#[derive(PartialEq, Eq, Clone, Copy)]\n\npub struct Short(pub u16);\n\n\n\nimpl Short {\n\n /// Returns the contained `u16` as a `usize`, zero extending it\n\n pub fn as_usize(self) -> usize {\n\n self.0 as usize\n\n }\n\n}\n\n\n\nimpl Parslet for Short {\n\n fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> {\n", "file_path": "src/numeric.rs", "rank": 15, "score": 23395.726162748437 }, { "content": " /// Returns the contained value as `usize`\n\n /// \n\n /// # Panics\n\n /// \n\n /// This method panics if the contained value would not fit into a `usize` without truncation\n\n pub fn as_usize(&self) -> usize {\n\n match self {\n\n Address::Elf32Addr(v) => (*v).try_into().expect(\"Unable to convert `Elf32Addr` to `usize` without truncating\"),\n\n Address::Elf64Addr(v) => (*v).try_into().expect(\"Unable to convert `Elf64Addr` to `usize` without truncating\")\n\n }\n\n }\n\n}\n\n\n\nimpl Parslet for Address {\n\n fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> {\n\n match descriptor.data_class()? {\n\n DataClass::Elf32 => Ok(Address::Elf32Addr(read_u32!(reader, descriptor))),\n\n DataClass::Elf64 => Ok(Address::Elf64Addr(read_u64!(reader, descriptor))),\n\n }\n\n }\n", "file_path": "src/numeric.rs", "rank": 16, "score": 23390.50220883916 }, { "content": " /// Returns the contained 'u32' as a `u64`, zero extending it\n\n pub fn as_u64(self) -> u64 {\n\n self.0 as u64\n\n }\n\n}\n\n\n\nimpl Parslet for Word {\n\n fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> {\n\n Ok(Word(read_u32!(reader, descriptor)))\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for Word {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\n\n\n/// Used to represent both 32 and 64 bit sizes and offsets within an ELF file\n", "file_path": "src/numeric.rs", "rank": 17, "score": 23389.265867910963 }, { "content": " Ok(Short(read_u16!(reader, descriptor)))\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for Short {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\n/// Represents a 32 bit word in an ELF file\n\n#[derive(PartialEq, Eq, Clone, Copy)]\n\npub struct Word(pub u32);\n\n\n\nimpl Word {\n\n /// Returns the contained `u32` as a `usize`, zero extending it if necessary\n\n pub fn as_usize(self) -> usize {\n\n self.0 as usize\n\n }\n\n\n", "file_path": "src/numeric.rs", "rank": 18, "score": 23380.869637157015 }, { "content": "#[derive(PartialEq, Eq, Clone, Copy)]\n\npub enum Size {\n\n /// The `Size` type for ELF32\n\n Elf32Size(u32),\n\n\n\n /// The `Size` type for ELF64\n\n Elf64Size(u64)\n\n}\n\n\n\nimpl Size {\n\n /// Returns the contained value as `usize`\n\n /// \n\n /// # Panics\n\n /// \n\n /// This method panics if the contained value would not fit into a `usize` without truncation\n\n pub fn as_usize(&self) -> usize {\n\n match self {\n\n Size::Elf32Size(v) => (*v).try_into().expect(\"Unable to convert `Elf32Size` to `usize` without truncating\"),\n\n Size::Elf64Size(v) => (*v).try_into().expect(\"Unable to convert `Elf64Size` to `usize` without truncating\")\n\n }\n", "file_path": "src/numeric.rs", "rank": 19, "score": 23377.327108246867 }, { "content": " Size::Elf32Size(v) => {\n\n write!(f, \"{}\", v)\n\n },\n\n Size::Elf64Size(v) => {\n\n write!(f, \"{}\", v)\n\n }\n\n }\n\n }\n\n}\n\n\n\n\n\n/// This struct is used to represent both 32 and 64 bit virtual or physical addresses in ELF files and process images\n\n#[derive(PartialEq, Eq, Clone, Copy)]\n\npub enum Address {\n\n /// The `Address` type for ELF32\n\n Elf32Addr(u32),\n\n /// The `Address` type for ELF64\n\n Elf64Addr(u64)\n\n}\n\nimpl Address {\n", "file_path": "src/numeric.rs", "rank": 20, "score": 23375.288565773306 }, { "content": "}\n\n\n\nimpl std::fmt::Debug for Address {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n Address::Elf32Addr(v) => {\n\n write!(f, \"{:#X}\", v)\n\n },\n\n Address::Elf64Addr(v) => {\n\n write!(f, \"{:#X}\", v)\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/numeric.rs", "rank": 21, "score": 23374.130863458628 }, { "content": "/// \n\n/// This has implications on how the file is read and parsed, as it changes the size and position of certain items within the file\n\n#[allow(missing_docs)]\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\npub enum DataClass {\n\n Elf32,\n\n Elf64,\n\n}\n\n\n\nimpl Parslet for DataClass {\n\n fn parse<R: Read + Seek>(reader: &mut R, _: &mut Descriptor) -> ParseElfResult<Self> {\n\n use constants::data_classes::*;\n\n \n\n match read_byte!(reader) {\n\n ELF32 => Ok(DataClass::Elf32),\n\n ELF64 => Ok(DataClass::Elf64),\n\n v => Err(ParseElfError::InvalidDataClass(v))\n\n }\n\n }\n\n}\n", "file_path": "src/types.rs", "rank": 22, "score": 19696.524752887406 }, { "content": "#[derive(Debug)]\n\npub struct Section {\n\n header: SectionHeader,\n\n data: SectionData,\n\n}\n\n\n\nimpl Section {\n\n /// Returns a reference to a 'SectionData' instance which contains the parsed data contained by the section \n\n pub fn data(&self) -> &SectionData {\n\n &self.data\n\n }\n\n\n\n /// Returns a reference to the sections header\n\n pub fn header(&self) -> &SectionHeader {\n\n &self.header\n\n }\n\n}\n\n\n\nimpl Parslet for Section {\n\n fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> {\n", "file_path": "src/types.rs", "rank": 23, "score": 19695.53743260306 }, { "content": " pub fn data(&self) -> &Vec<u8> {\n\n &self.data\n\n }\n\n\n\n /// Returns a reference to the segments program header\n\n pub fn header(&self) -> &ProgramHeader {\n\n &self.header\n\n }\n\n}\n\n\n\nimpl Parslet for Segment {\n\n fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> {\n\n let header = ProgramHeader::parse(reader, descriptor)?;\n\n \n\n /* Read segment bytes */\n\n let position = reader.seek(SeekFrom::Current(0)).unwrap(); // Save our position\n\n \n\n let segment_offs = header.offset.as_usize() as u64;\n\n\n\n let _ = reader.seek(SeekFrom::Start(segment_offs))?; // Move the readers position to the beginning of the segment\n", "file_path": "src/types.rs", "rank": 24, "score": 19695.526040230714 }, { "content": "\n\n /// Returns the segments physical address\n\n pub fn physical_address(&self) -> usize {\n\n self.physical_address.as_usize()\n\n }\n\n\n\n /// Returns the number of bytes the segment occupies in the file image\n\n pub fn file_size(&self) -> usize {\n\n self.file_size.as_usize()\n\n }\n\n\n\n /// Returns the number of bytes the segment occupies in the memory image\n\n pub fn memory_size(&self) -> usize {\n\n self.mem_size.as_usize()\n\n }\n\n}\n\n\n\nimpl Parslet for ProgramHeader {\n\n fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> {\n\n\n", "file_path": "src/types.rs", "rank": 25, "score": 19695.116694610366 }, { "content": "impl Parslet for Flags {\n\n fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> {\n\n match read_u32!(reader, descriptor) {\n\n v => Ok(Flags(v)),\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for Flags {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{:#b}\", self.0)\n\n }\n\n}\n\n\n\n\n\n\n\n/// Represents one section in a loaded Elf binary\n\n/// \n\n/// This structure contains both a section header and the parsed\n\n/// data to which that header describes\n", "file_path": "src/types.rs", "rank": 26, "score": 19694.797191717065 }, { "content": "\n\n /// Returns the number of section headers in the section header table\n\n pub (in crate) fn section_header_count(&self) -> usize {\n\n self.shnum.as_usize()\n\n }\n\n\n\n /// Returns the index into the section header table of the section name string table\n\n /// \n\n /// Not all ELF files contain a section name string table, in this case, `None` is returned\n\n pub (in crate) fn section_name_table_index(&self) -> Option<usize> {\n\n if self.shstrndx != constants::SHN_UNDEF {\n\n Some(self.shstrndx.as_usize())\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl Parslet for ElfHeader {\n\n fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> {\n", "file_path": "src/types.rs", "rank": 27, "score": 19694.79005610235 }, { "content": "\n\n/// Describes the format of data within an ELF file, either 2's complement little endian or big endian\n\n#[allow(missing_docs)]\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\npub enum DataFormat {\n\n LittleEndian,\n\n BigEndian,\n\n}\n\n\n\nimpl Parslet for DataFormat {\n\n fn parse<R: Read + Seek>(reader: &mut R, _: &mut Descriptor) -> ParseElfResult<Self> {\n\n use constants::data_formats::*;\n\n\n\n match read_byte!(reader) {\n\n LITTLE_ENDIAN => Ok(DataFormat::LittleEndian),\n\n BIG_ENDIAN => Ok(DataFormat::BigEndian),\n\n v => Err(ParseElfError::InvalidDataFormat(v))\n\n }\n\n }\n\n}\n", "file_path": "src/types.rs", "rank": 28, "score": 19693.98350014721 }, { "content": " }\n\n }\n\n \n\n /// Returns the extra info field of the section. The interpretation of this field is dependent on the sections type\n\n pub fn info(&self) -> usize {\n\n self.info.as_usize()\n\n }\n\n}\n\n\n\nimpl Parslet for SectionHeader {\n\n fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> {\n\n let section_header = SectionHeader {\n\n name_index: Size::parse(reader, descriptor)?,\n\n ty: SectionType::parse(reader, descriptor)?,\n\n flags: SectionFlags::parse(reader, descriptor)?,\n\n virtual_address: Address::parse(reader, descriptor)?,\n\n offset: Address::parse(reader, descriptor)?,\n\n section_size: Size::parse(reader, descriptor)?,\n\n link: Word::parse(reader, descriptor)?,\n\n info: Word::parse(reader, descriptor)?,\n", "file_path": "src/types.rs", "rank": 29, "score": 19693.196746482838 }, { "content": "\n\n /// Section contains binary data, such as executable code\n\n Bytes(Vec<u8>),\n\n\n\n /// Section contains null-terminated Utf8 strings\n\n Strings(Vec<String>),\n\n}\n\n\n\nimpl SectionData {\n\n fn parse_as<R: Read + Seek>(reader: &mut R, _descriptor: &Descriptor, header: &SectionHeader) -> ParseElfResult<SectionData> {\n\n let position = reader.seek(SeekFrom::Current(0)).unwrap(); // Save our position as it may change to read a section\n\n \n\n let section_offs = header.offset.as_usize() as u64;\n\n let _ = reader.seek(SeekFrom::Start(section_offs))?; // Move the readers position to the beginning of the section\n\n \n\n // Read the raw bytes of the section\n\n let bytes = read_n_bytes!(reader, header.section_size.as_usize());\n\n \n\n let data = match header.ty {\n\n SectionType::Null => {\n", "file_path": "src/types.rs", "rank": 30, "score": 19693.172925149443 }, { "content": " #[allow(missing_docs)]\n\n Group,\n\n #[allow(missing_docs)]\n\n ExtendedSectionIndices,\n\n\n\n /// Section contains information defined by and specific to the operating system\n\n OSSpecific(u32),\n\n\n\n #[allow(missing_docs)]\n\n Unknown(u32),\n\n}\n\n\n\n/// Describes the contents of an individual section which is used to determine how a section should be processed\n\nimpl Parslet for SectionType {\n\n fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> {\n\n use constants::section_types::*;\n\n\n\n match read_u32!(reader, descriptor) {\n\n NULL => Ok(SectionType::Null),\n\n PROG_DATA => Ok(SectionType::ProgramData),\n", "file_path": "src/types.rs", "rank": 31, "score": 19692.823187538503 }, { "content": "}\n\n\n\nimpl Parslet for ProgramHeaderFlags {\n\n fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> {\n\n \n\n use ProgramHeaderFlags::*;\n\n use constants::program_flags::*;\n\n\n\n match read_u32!(reader, descriptor) {\n\n READ => Ok(Read),\n\n WRITE => Ok(Write),\n\n EXEC => Ok(Execute),\n\n READ_WRITE => Ok(ReadWrite),\n\n READ_EXEC => Ok(ReadExecute),\n\n READ_WRITE_EXEC => Ok(ReadWriteExecute),\n\n v => Err(ParseElfError::InvalidProgramFlags(v))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 32, "score": 19692.75281039554 }, { "content": " Specified(u8),\n\n}\n\n\n\nimpl Parslet for AbiVersion {\n\n fn parse<R: Read + Seek>(reader: &mut R, _: &mut Descriptor) -> ParseElfResult<Self> {\n\n use constants::abi_versions::*;\n\n\n\n match read_byte!(reader) {\n\n UNSPECIFIED => Ok(AbiVersion::Unspecified),\n\n v => Ok(AbiVersion::Specified(v))\n\n }\n\n }\n\n}\n\n\n\n/// The type of ELF file\n\n#[derive(Debug, PartialEq, Eq, Clone)]\n\npub enum ElfType {\n\n #[allow(missing_docs)]\n\n None,\n\n\n", "file_path": "src/types.rs", "rank": 33, "score": 19691.88317659079 }, { "content": " Arm,\n\n St200,\n\n RiscV,\n\n}\n\n\n\nimpl Parslet for Machine {\n\n fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> {\n\n use constants::machines::*;\n\n\n\n match read_u16!(reader, descriptor) {\n\n NONE => Ok(Machine::None),\n\n ARM => Ok(Machine::Arm),\n\n ATMELAVR => Ok(Machine::AtmelAvr),\n\n AMD64 => Ok(Machine::Amd64),\n\n ST200 => Ok(Machine::St200),\n\n RISCV => Ok(Machine::RiscV),\n\n v => Err(ParseElfError::InvalidMachine(v))\n\n }\n\n }\n\n}\n", "file_path": "src/types.rs", "rank": 34, "score": 19691.71270769157 }, { "content": "//! Types which describe the decoded contents of an ELF file\n\n\n\nuse std::io::{ Read, Seek, SeekFrom };\n\n\n\nuse crate::{ Parslet, ParseElfResult, ParseElfError, Descriptor };\n\nuse crate::numeric::*;\n\nuse crate::constants;\n\n\n\n/// Represents an ELF file header\n\n/// \n\n/// This header is used to identify and process the rest of an Elf file, it includes offsets to\n\n/// the program header table and the section header table\n\n#[derive(Debug)]\n\npub struct ElfHeader {\n\n ident: Identifier,\n\n ty: ElfType,\n\n machine: Machine,\n\n version: Version,\n\n entry: Address,\n\n phoff: Size,\n", "file_path": "src/types.rs", "rank": 35, "score": 19690.937412386185 }, { "content": "\n\n/// The ELF version number. ELF only has one version, version one.\n\n#[derive(Debug, PartialEq, Eq, Clone)]\n\npub enum Version {\n\n /// The current ELF version\n\n Current,\n\n}\n\n\n\nimpl Parslet for Version {\n\n fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> {\n\n match read_u32!(reader, descriptor) {\n\n constants::CURRENT_ELF_VERSION => Ok(Version::Current),\n\n v => Err(ParseElfError::InvalidVersion(v))\n\n }\n\n }\n\n}\n\n\n\n/// ELF file flags\n\npub struct Flags(u32);\n\n\n", "file_path": "src/types.rs", "rank": 36, "score": 19690.8089204476 }, { "content": " } \n\n}\n\n\n\n/// The ELF file identifier\n\n/// \n\n/// This is the first piece of information decoded when reading an ELF file. It contains critical information\n\n/// necessary for the successful parsing of the rest of the file\n\n#[derive(Debug)]\n\npub struct Identifier {\n\n magic: Magic,\n\n class: DataClass,\n\n data: DataFormat,\n\n version: IdentVersion,\n\n os_abi: OsAbi,\n\n abi_ver: AbiVersion,\n\n}\n\n\n\nimpl Parslet for Identifier {\n\n fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> {\n\n let parsed = Identifier {\n", "file_path": "src/types.rs", "rank": 37, "score": 19690.612131545648 }, { "content": "impl Parslet for ProgramHeaderType {\n\n fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> {\n\n \n\n use constants::os_specific_header_types::*;\n\n use constants::processor_specific_header_types::*;\n\n\n\n use ProgramHeaderType::*;\n\n match read_u32!(reader, descriptor) {\n\n 0x00 => Ok(Null),\n\n 0x01 => Ok(Loadable),\n\n 0x02 => Ok(DynamicInfo),\n\n 0x03 => Ok(InterpreterInfo),\n\n 0x04 => Ok(AuxiliaryInfo),\n\n 0x05 => Ok(ShLib),\n\n 0x06 => Ok(Phdr),\n\n\n\n // Known OS specific\n\n GNU_STACK => Ok(GnuStack),\n\n\n\n // Known processor specific\n", "file_path": "src/types.rs", "rank": 38, "score": 19690.24936362126 }, { "content": " let data = read_n_bytes!(reader, header.file_size.as_usize()); // Read the segment\n\n let _ = reader.seek(SeekFrom::Start(position))?; // Reset our position\n\n\n\n let segment = Segment {\n\n header,\n\n data\n\n };\n\n\n\n Ok(segment)\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for Segment {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n writeln!(f, \"{:#?},\", self.header)?;\n\n write!(f, \"data: [raw]\")\n\n }\n\n}\n\n\n\n\n", "file_path": "src/types.rs", "rank": 39, "score": 19689.97624706752 }, { "content": " /// The file is used for linking\n\n Relocatable,\n\n\n\n /// The file is an executable binary\n\n Executable,\n\n\n\n /// The file is used for dynamic linking\n\n Shared,\n\n\n\n /// A core file\n\n Core,\n\n\n\n /// The files purpose is defined by the host processor\n\n ProcessorSpecific(u16)\n\n}\n\n\n\nimpl Parslet for ElfType {\n\n fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> {\n\n use constants::elf_types::*;\n\n\n", "file_path": "src/types.rs", "rank": 40, "score": 19689.846381325584 }, { "content": " #[allow(missing_docs)] \n\n WriteExecute,\n\n #[allow(missing_docs)] \n\n AllocExecute,\n\n #[allow(missing_docs)] \n\n WriteAllocExecute,\n\n\n\n /// Flags with meaning defined by the target processor\n\n ProcessorSpecific(Size),\n\n}\n\n\n\nimpl Parslet for SectionFlags {\n\n fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> {\n\n use constants::section_flags::*;\n\n \n\n // We capture flags first as a `Size` and then interpret it as\n\n // a u64 in order to prevent data loss, while still being able\n\n // to retain the `Size` variant of the original data in case\n\n // the flags are `ProcessorSpecific`\n\n let flags = Size::parse(reader, descriptor)?;\n", "file_path": "src/types.rs", "rank": 41, "score": 19689.112099189988 }, { "content": "pub enum OsAbi {\n\n UnixSystemV,\n\n}\n\n\n\nimpl Parslet for OsAbi {\n\n fn parse<R: Read + Seek>(reader: &mut R, _: &mut Descriptor) -> ParseElfResult<Self> {\n\n use constants::os_abis::*;\n\n \n\n match read_byte!(reader) {\n\n UNIX_SYSTEM_V => Ok(OsAbi::UnixSystemV),\n\n v => Err(ParseElfError::InvalidOsAbi(v))\n\n }\n\n }\n\n}\n\n\n\n/// If a specific OS ABI version is required, it will be indicated as `Specified(u8)`\n\n#[allow(missing_docs)]\n\n#[derive(Debug, PartialEq, Eq, Clone)]\n\npub enum AbiVersion {\n\n Unspecified,\n", "file_path": "src/types.rs", "rank": 42, "score": 19689.015907105244 }, { "content": "\n\n/// The ELF identifier version. There is only one version, version one\n\n#[derive(Debug)]\n\npub enum IdentVersion {\n\n /// The current ELF version\n\n Current,\n\n}\n\n\n\nimpl Parslet for IdentVersion {\n\n fn parse<R: Read + Seek>(reader: &mut R, _: &mut Descriptor) -> ParseElfResult<Self> {\n\n match read_byte!(reader) {\n\n constants::CURRENT_IDENT_VERSION => Ok(IdentVersion::Current), // Elf only has one version, version one. Nonetheless we parse it as \"current\"\n\n v => Err(ParseElfError::InvalidIdentVersion(v))\n\n }\n\n }\n\n}\n\n\n\n/// The OS ABI\n\n#[allow(missing_docs)]\n\n#[derive(Debug, PartialEq, Eq, Clone)]\n", "file_path": "src/types.rs", "rank": 43, "score": 19687.473798150604 }, { "content": "/// Indicates whether the file contains valid magic bytes\n\n#[allow(missing_docs)]\n\n#[derive(Debug, PartialEq, Eq, Clone)]\n\npub enum Magic {\n\n Valid,\n\n Invalid,\n\n}\n\n\n\nimpl Parslet for Magic {\n\n fn parse<R: Read + Seek>(reader: &mut R, _: &mut Descriptor) -> ParseElfResult<Self> {\n\n let bytes = read_n_bytes!(reader, 4);\n\n if bytes.as_slice() == &constants::MAGIC_BYTES[..] {\n\n Ok(Magic::Valid)\n\n } else {\n\n Ok(Magic::Invalid)\n\n }\n\n }\n\n}\n\n\n\n/// Describes the data class of an ELF file\n", "file_path": "src/types.rs", "rank": 44, "score": 19686.072216154833 }, { "content": " let header = SectionHeader::parse(reader, descriptor)?;\n\n let data = SectionData::parse_as(reader, &descriptor, &header)?;\n\n\n\n let section = Section {\n\n header,\n\n data\n\n };\n\n\n\n Ok(section)\n\n }\n\n}\n\n\n\n\n\n \n\n/// Describes the location and the contents of an Elf section\n\n#[derive(Debug)]\n\npub struct SectionHeader {\n\n name_index: Size,\n\n ty: SectionType,\n\n flags: SectionFlags,\n", "file_path": "src/types.rs", "rank": 45, "score": 19685.96555187748 }, { "content": " let ty = ProgramHeaderType::parse(reader, descriptor)?;\n\n let mut flags = ProgramHeaderFlags::None;\n\n\n\n let data_class = descriptor.data_class()?;\n\n\n\n // If this is an Elf64 file, the program flags appear before the 'offset' value\n\n if data_class == DataClass::Elf64 {\n\n flags = ProgramHeaderFlags::parse(reader, descriptor)?;\n\n }\n\n \n\n let offset = Address::parse(reader, descriptor)?;\n\n let virtual_address = Address::parse(reader, descriptor)?;\n\n let physical_address = Address::parse(reader, descriptor)?;\n\n let file_size = Size::parse(reader, descriptor)?;\n\n let mem_size = Size::parse(reader, descriptor)?;\n\n\n\n // If this is an Elf32 file, the program flags actually appear after the 'mem_size' value\n\n if data_class == DataClass::Elf32 {\n\n flags = ProgramHeaderFlags::parse(reader, descriptor)?;\n\n }\n", "file_path": "src/types.rs", "rank": 46, "score": 19685.20961098758 }, { "content": " _ => {\n\n SectionData::Bytes(bytes)\n\n },\n\n };\n\n\n\n let _ = reader.seek(SeekFrom::Start(position))?; // Reset the readers position\n\n Ok(data)\n\n }\n\n}\n\n\n\n/// Represents one segment in a loaded Elf binary\n\n/// \n\n/// This structure contains the program header associate with the segment, as well as a copy of the raw bytes the header describes\n\npub struct Segment {\n\n header: ProgramHeader,\n\n data: Vec<u8>,\n\n}\n\n\n\nimpl Segment {\n\n /// Returns a reference to a vector containing the raw data of the segment\n", "file_path": "src/types.rs", "rank": 47, "score": 19684.63623606993 }, { "content": " match read_u16!(reader, descriptor) {\n\n NONE => Ok(ElfType::None),\n\n RELOCATABLE => Ok(ElfType::Relocatable),\n\n EXECUTABLE => Ok(ElfType::Executable),\n\n SHARED => Ok(ElfType::Shared),\n\n CORE => Ok(ElfType::Core),\n\n\n\n v @ LO_PROC ..= HI_PROC => Ok(ElfType::ProcessorSpecific(v)),\n\n v => Err(ParseElfError::InvalidElfType(v))\n\n }\n\n }\n\n}\n\n\n\n/// The machine which this ELF file targets\n\n#[allow(missing_docs)]\n\n#[derive(Debug, PartialEq, Eq, Clone)]\n\npub enum Machine {\n\n None,\n\n AtmelAvr,\n\n Amd64,\n", "file_path": "src/types.rs", "rank": 48, "score": 19684.201286565447 }, { "content": "/// Program headers describe segments comprised of zero or more sections which are\n\n/// loaded into memory in order to construct a process image\n\n#[derive(Debug)]\n\npub struct ProgramHeader {\n\n ty: ProgramHeaderType,\n\n flags: ProgramHeaderFlags,\n\n offset: Address,\n\n virtual_address: Address,\n\n physical_address: Address,\n\n file_size: Size,\n\n mem_size: Size,\n\n align: Size\n\n}\n\n\n\nimpl ProgramHeader {\n\n /// Returns a `SectionType` describing the purpose of the section\n\n pub fn program_header_type(&self) -> ProgramHeaderType {\n\n self.ty\n\n }\n\n\n", "file_path": "src/types.rs", "rank": 49, "score": 19683.91586807638 }, { "content": " ARM_EXIDX => Ok(ArmExidx),\n\n\n\n v @ 0x6000_0000 ..= 0x6FFF_FFFF => Ok(ProgramHeaderType::OSSpecific(v)),\n\n v @ 0x7000_0000 ..= 0x7FFF_FFFF => Ok(ProgramHeaderType::ProcessorSpecific(v)),\n\n v => Err(ParseElfError::InvalidProgramHeader(v))\n\n }\n\n }\n\n}\n\n\n\n/// Flags which describe the allowable access patterns of a given section described by a `ProgramHeader`\n\n#[allow(missing_docs)]\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum ProgramHeaderFlags {\n\n None,\n\n Read,\n\n Write,\n\n Execute,\n\n ReadWrite,\n\n ReadExecute,\n\n ReadWriteExecute,\n", "file_path": "src/types.rs", "rank": 50, "score": 19682.871411242046 }, { "content": " align: Size::parse(reader, descriptor)?,\n\n entry_size: Size::parse(reader, descriptor)?,\n\n };\n\n\n\n Ok(section_header)\n\n }\n\n}\n\n\n\n/// Describes the type of information contained within a section\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\npub enum SectionType {\n\n /// Marks a section as inactive\n\n /// \n\n /// Section headers with the type 'Null' do not have a corresponding section in the file\n\n Null,\n\n\n\n /// Marks a section as containing data whose meaning is defined entirely by the program\n\n ProgramData,\n\n\n\n /// Marks a section as containing a symbol table\n", "file_path": "src/types.rs", "rank": 51, "score": 19682.743552952266 }, { "content": " virtual_address: Address,\n\n offset: Address,\n\n section_size: Size,\n\n link: Word,\n\n info: Word,\n\n align: Size,\n\n entry_size: Size,\n\n}\n\n\n\nimpl SectionHeader {\n\n /// Returns a `SectionType` describing the purpose of the section\n\n pub fn section_type(&self) -> SectionType {\n\n self.ty\n\n }\n\n\n\n /// Returns the sections flags\n\n pub fn flags(&self) -> SectionFlags {\n\n self.flags\n\n }\n\n\n", "file_path": "src/types.rs", "rank": 52, "score": 19682.6788345687 }, { "content": " shoff: Size,\n\n flags: Flags,\n\n ehsize: Short,\n\n phentsize: Short,\n\n phnum: Short,\n\n shentsize: Short,\n\n shnum: Short,\n\n shstrndx: Short,\n\n}\n\n\n\nimpl ElfHeader {\n\n /// Returns the address of the programs entry point. This is the point where execution begins\n\n pub fn entry(&self) -> usize {\n\n self.entry.as_usize()\n\n }\n\n\n\n /// Returns the type of the ELF file\n\n pub fn elf_type(&self) -> ElfType {\n\n self.ty.clone()\n\n }\n", "file_path": "src/types.rs", "rank": 53, "score": 19682.199325612877 }, { "content": " /// Returns the sections flags\n\n pub fn flags(&self) -> ProgramHeaderFlags {\n\n self.flags\n\n }\n\n\n\n /// Returns the sections alignment\n\n /// \n\n /// If no alignment is required, returns `None`\n\n pub fn alignment(&self) -> Option<usize> {\n\n if self.align.as_u64() <= 1u64 {\n\n None\n\n } else {\n\n Some(self.align.as_usize())\n\n }\n\n }\n\n\n\n /// Returns the virtual address at which the first byte of the segment resides in memory\n\n pub fn virtual_address(&self) -> usize {\n\n self.virtual_address.as_usize()\n\n }\n", "file_path": "src/types.rs", "rank": 54, "score": 19681.84983007329 }, { "content": " /// Returns the address at which the first byte of the section will appear in a memory image, or zero if it does not\n\n pub fn address(&self) -> usize {\n\n self.virtual_address.as_usize()\n\n }\n\n\n\n /// Returns the sections address alignment, or `None` if it does not require alignment\n\n pub fn alignment(&self) -> Option<usize> {\n\n if self.align.as_u64() <= 1u64 {\n\n None\n\n } else {\n\n Some(self.align.as_usize())\n\n }\n\n }\n\n\n\n /// If the section contains a table of fixed sized entries, this returns the size in bytes of each entry, or `None` otherwise\n\n pub fn entry_size(&self) -> Option<usize> {\n\n if self.entry_size.as_u64() == 0 {\n\n None\n\n } else {\n\n Some(self.entry_size.as_usize())\n", "file_path": "src/types.rs", "rank": 55, "score": 19681.7152719337 }, { "content": "#[cfg(test)]\n\nmod test {\n\n use crate::*;\n\n use super::*;\n\n\n\n fn _load_example_binary() -> Elf {\n\n let elf = Elf::load(\"examples/example-binary\").unwrap();\n\n elf\n\n }\n\n\n\n #[test]\n\n fn parse_elf_header() {\n\n let elf = _load_example_binary();\n\n\n\n let header = &elf.header;\n\n let ident = &header.ident;\n\n\n\n // Assert some known values in the test binary were parsed correctly\n\n assert_eq!(Magic::Valid, ident.magic);\n\n assert_eq!(DataClass::Elf32, ident.class);\n", "file_path": "src/types.rs", "rank": 56, "score": 19680.05737433958 }, { "content": "\n\n /// Returns the machine target of the ELF file\n\n pub fn machine(&self) -> Machine {\n\n self.machine.clone()\n\n }\n\n\n\n /// Returns the direct offset of the program header table within an ELF file\n\n pub (in crate) fn program_headers_offset(&self) -> u64 {\n\n self.phoff.as_u64()\n\n }\n\n\n\n /// Returns the number of program headers in the program header table\n\n pub (in crate) fn program_header_count(&self) -> usize {\n\n self.phnum.as_usize()\n\n }\n\n\n\n /// Returns the direct offset of the section header table within an ELF file\n\n pub (in crate) fn section_headers_offset(&self) -> u64 {\n\n self.shoff.as_u64()\n\n }\n", "file_path": "src/types.rs", "rank": 57, "score": 19679.557528788933 }, { "content": " SYM_TABLE => Ok(SectionType::SymbolTable),\n\n STR_TABLE => Ok(SectionType::StringTable),\n\n REL_A => Ok(SectionType::RelocationWithAddends),\n\n SYM_HASH => Ok(SectionType::SymbolHashTable),\n\n DYN_INFO => Ok(SectionType::DynamicInfo),\n\n NOTE => Ok(SectionType::Note),\n\n NO_BITS => Ok(SectionType::NoBits),\n\n RELOCATION => Ok(SectionType::Relocation),\n\n SHLIB => Ok(SectionType::ShLib),\n\n DYN_SYM_TAB => Ok(SectionType::DynamicSymbolTable),\n\n INIT => Ok(SectionType::InitArray),\n\n FINI => Ok(SectionType::FiniArray),\n\n PRE_INIT => Ok(SectionType::PreInitArray),\n\n GROUP => Ok(SectionType::Group),\n\n EXT_IDX => Ok(SectionType::ExtendedSectionIndices),\n\n\n\n v @ 0x6000_0000 ..= 0xFFFF_FFFF => Ok(SectionType::OSSpecific(v)),\n\n v => Ok(SectionType::Unknown(v)),\n\n //v => Err(ParseElfError::InvalidSectionType(v))\n\n }\n", "file_path": "src/types.rs", "rank": 58, "score": 19679.245356034706 }, { "content": " magic: Magic::parse(reader, descriptor)?,\n\n class: DataClass::parse(reader, descriptor)?,\n\n data: DataFormat::parse(reader, descriptor)?,\n\n version: IdentVersion::parse(reader, descriptor)?,\n\n os_abi: OsAbi::parse(reader, descriptor)?,\n\n abi_ver: AbiVersion::parse(reader, descriptor)?,\n\n };\n\n\n\n *descriptor = Descriptor::Data {\n\n class: parsed.class,\n\n format: parsed.data\n\n };\n\n\n\n // The end of the ident is composed of empty padding bytes, skip over them\n\n read_n_bytes!(reader, 7);\n\n\n\n Ok(parsed)\n\n } \n\n}\n\n\n", "file_path": "src/types.rs", "rank": 59, "score": 19678.50479310995 }, { "content": " Ok(match flags.as_u64() {\n\n NONE => SectionFlags::None,\n\n WRITE => SectionFlags::Write,\n\n ALLOC => SectionFlags::Alloc,\n\n EXEC => SectionFlags::Execute,\n\n WRITE_ALLOC => SectionFlags::WriteAlloc,\n\n WRITE_EXEC => SectionFlags::WriteExecute,\n\n ALLOC_EXEC => SectionFlags::AllocExecute,\n\n WRITE_ALLOC_EXEC => SectionFlags::WriteAllocExecute,\n\n\n\n _ => SectionFlags::ProcessorSpecific(flags)\n\n })\n\n }\n\n}\n\n\n\n/// Represents the parsed data contained in one section\n\n#[derive(Debug)]\n\npub enum SectionData {\n\n /// Section contains no data\n\n Null,\n", "file_path": "src/types.rs", "rank": 60, "score": 19678.16891694914 }, { "content": " let ident = Identifier::parse(reader, descriptor)?;\n\n\n\n let header = ElfHeader {\n\n ident,\n\n ty: ElfType::parse(reader, descriptor)?,\n\n machine: Machine::parse(reader, descriptor)?,\n\n version: Version::parse(reader, descriptor)?,\n\n entry: Address::parse(reader, descriptor)?,\n\n phoff: Size::parse(reader, descriptor)?,\n\n shoff: Size::parse(reader, descriptor)?,\n\n flags: Flags::parse(reader, descriptor)?,\n\n ehsize: Short::parse(reader, descriptor)?,\n\n phentsize: Short::parse(reader, descriptor)?,\n\n phnum: Short::parse(reader, descriptor)?,\n\n shentsize: Short::parse(reader, descriptor)?,\n\n shnum: Short::parse(reader, descriptor)?,\n\n shstrndx: Short::parse(reader, descriptor)?,\n\n };\n\n\n\n Ok(header)\n", "file_path": "src/types.rs", "rank": 61, "score": 19677.95286758846 }, { "content": " assert_eq!(DataFormat::LittleEndian, ident.data);\n\n assert_eq!(OsAbi::UnixSystemV, ident.os_abi);\n\n assert_eq!(AbiVersion::Unspecified, ident.abi_ver);\n\n\n\n assert_eq!(ElfType::Executable, header.ty);\n\n assert_eq!(Machine::Arm, header.machine);\n\n assert_eq!(Version::Current, header.version);\n\n assert_eq!(Address::Elf32Addr(0x11001), header.entry);\n\n assert_eq!(Size::Elf32Size(52), header.phoff);\n\n assert_eq!(Size::Elf32Size(8428), header.shoff);\n\n // TODO: Test flags (Flags type should be rewritten as a more descriptive enum)\n\n assert_eq!(Short(52), header.ehsize);\n\n assert_eq!(Short(32), header.phentsize);\n\n assert_eq!(Short(5), header.phnum);\n\n assert_eq!(Short(40), header.shentsize);\n\n assert_eq!(Short(8), header.shnum);\n\n assert_eq!(Short(6), header.shstrndx);\n\n }\n\n\n\n #[test]\n", "file_path": "src/types.rs", "rank": 62, "score": 19677.765208939476 }, { "content": " fn try_get_section() {\n\n let elf = _load_example_binary();\n\n let text = elf.try_get_section(\".text\").unwrap();\n\n\n\n assert_eq!(SectionFlags::AllocExecute, text.header.flags);\n\n }\n\n\n\n #[test]\n\n fn try_get_fake_section() {\n\n let elf = _load_example_binary(); \n\n\n\n // We know before hand that this section does not exist in the example binary\n\n assert!(elf.try_get_section(\".aaaabbbbcccc\").is_none());\n\n assert!(elf.try_get_section(\"_aaaabbbbcccc\").is_none());\n\n assert!(elf.try_get_section(\"aaaabbbbcccc\").is_none());\n\n }\n\n}\n", "file_path": "src/types.rs", "rank": 63, "score": 19677.579781828772 }, { "content": "\n\n let align = Size::parse(reader, descriptor)?;\n\n\n\n let program_header = ProgramHeader {\n\n ty,\n\n flags,\n\n offset,\n\n virtual_address,\n\n physical_address,\n\n file_size,\n\n mem_size,\n\n align,\n\n };\n\n\n\n Ok(program_header)\n\n }\n\n}\n\n\n\n/// Describes the way in which the section pointed to by a program header is to be processed\n\n#[allow(missing_docs)]\n", "file_path": "src/types.rs", "rank": 64, "score": 19677.30093457363 }, { "content": " SectionData::Null\n\n },\n\n\n\n // Program data is preserved as raw binary data, its meaning is defined by the consuming system\n\n SectionType::ProgramData => {\n\n SectionData::Bytes(bytes)\n\n },\n\n \n\n // Parse string tables as actual vectors of String\n\n SectionType::StringTable => {\n\n let splits = bytes.split(|c| *c == (b'\\0') ); \n\n \n\n let mut strings: Vec<String> = Vec::new();\n\n for slice in splits {\n\n let result = String::from_utf8(slice.to_vec());\n\n strings.push(result.unwrap());\n\n }\n\n SectionData::Strings(strings)\n\n },\n\n\n", "file_path": "src/types.rs", "rank": 65, "score": 19677.27119614713 }, { "content": " SymbolTable,\n\n\n\n /// Marks a section as containing a string table, there may be multiple string tables in a given ELF file\n\n StringTable,\n\n\n\n /// Marks a section as containing relocation data with explicit addends\n\n RelocationWithAddends,\n\n\n\n /// Marks a section as containing a symbol hash table\n\n SymbolHashTable,\n\n\n\n /// Marks a section as containing information for dynamic linking\n\n DynamicInfo,\n\n\n\n /// Marks a section as containing arbitrary information used to mark the section in some way\n\n /// \n\n /// This information is usually generated by some part of the toolchain used to produce the ELF file\n\n Note,\n\n\n\n /// Marks a section as containing no data, but otherwise resembles a 'ProgramData' section\n", "file_path": "src/types.rs", "rank": 66, "score": 19676.665172726276 }, { "content": " NoBits,\n\n\n\n /// Marks a section as containing relocation data without explicit addends\n\n Relocation,\n\n\n\n /// This section type is reserved and should not be used. ELF files which contain a section of this type do not conform to the ABI\n\n ShLib,\n\n\n\n /// Marks a section as containing a minimal symbol table used for dynamic linking\n\n DynamicSymbolTable,\n\n\n\n /// Marks a section as containing constructors\n\n InitArray,\n\n\n\n /// Marks a section as containing destructors\n\n FiniArray,\n\n\n\n /// Marks a section as containing pre-constructors\n\n PreInitArray,\n\n\n", "file_path": "src/types.rs", "rank": 67, "score": 19675.7681478366 }, { "content": " }\n\n}\n\n\n\n/// Section flags describe the allowable access patterns of an Elf section\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\npub enum SectionFlags {\n\n /// No section flags\n\n None,\n\n\n\n /// Section is writable at runtime\n\n Write,\n\n \n\n /// Section occupies space in memory at runtime\n\n Alloc,\n\n \n\n /// Section contains executable code\n\n Execute,\n\n\n\n #[allow(missing_docs)] \n\n WriteAlloc,\n", "file_path": "src/types.rs", "rank": 68, "score": 19675.681355688354 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum ProgramHeaderType {\n\n Null,\n\n Loadable,\n\n DynamicInfo,\n\n InterpreterInfo,\n\n AuxiliaryInfo,\n\n ShLib,\n\n Phdr,\n\n\n\n // Known OS specific\n\n GnuStack,\n\n\n\n // Known processor specific\n\n ArmExidx,\n\n\n\n OSSpecific(u32),\n\n ProcessorSpecific(u32),\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 69, "score": 19672.79141902375 }, { "content": "# Elfy\n\n![Build Status](https://travis-ci.org/JerTH/elfy.svg?branch=master)\n\n![Docs](https://docs.rs/elfy/badge.svg)\n\n\n\n[Documentation](https://docs.rs/elfy)\n\n\n\n[Crates.io](https://crates.io/crates/elfy)\n\n\n\n#### Description\n\nElfy is for loading and parsing [ELF](https://en.wikipedia.org/wiki/Executable_and_Linkable_Format) ([2](https://wiki.osdev.org/ELF)) files. The project began as a simple binary loader for an ARMv7-M virtual machine, but quickly evolved into its own standalone crate. The goal of Elfy is to provide a simple and ergonomic interface for working with ELF files of all types.\n\n\n\nElfy is currently focused on reading data important to statically compiled ARM executables, in the future it will support more architectures and ELF features.\n\n\n\n#### Usage\n\nTo use Elfy, first add it as a dependency in your projects `Cargo.toml`\n\n```toml\n\n[dependencies]\n\nelfy = \"0.2.2\"\n\n```\n\n\n\nTo load an ELF file, include Elfy as an external crate. Loading an ELF file from disk and parsing it is now as simple as calling `Elf::load(path) -> ParseElfResult<Elf>` where `path` is any valid `std::path::Path` to an ELF file. If the file doesn't exist, the file isn't valid ELF, or there is a problem parsing the file then a `Err(ParseElfError)` will be returned with a description of what went wrong.\n\n```rust\n\nextern crate elfy;\n\nuse elfy::Elf;\n\n\n\nfn main() {\n\n let elf = Elf::load(\"examples/example-binary\").expect(\"Something went wrong!\");\n\n\n\n // ...\n\n}\n\n```\n\n\n\nData inside of a loaded ELF file can be accessed using `Elf::try_get_section(&self, section_name) -> Option<&Section>`. If the section exists `Some(&Section)` will be returned, otherwise `None`.\n\n\n\nThe parsed data within a section can be accessed using `Section::data(&self) -> &SectionData`. The `SectionData` type is an enum representing the different formats of data that may be contained within an ELF file.\n\n```rust\n", "file_path": "README.md", "rank": 75, "score": 20.094463316042503 }, { "content": "use elfy::{ Section, SectionData, SectionType, SectionFlags };\n\n\n\nfn main() {\n\n // ...\n\n \n\n let text_section = elf.try_get_section(\".text\").expect(\"The section doesn't exist!\");\n\n let header = text_section.header();\n\n\n\n\n\n // The .text section usually contains executable machine code and as such will be\n\n // parsed as raw binary data. Here we retrieve a vector of that data in `bytes` \n\n if let SectionData::Bytes(bytes) = text_section.data() {\n\n // ...\n\n }\n\n\n\n // Sections containing executable code are of type `ProgramData`, and\n\n // are flagged as Alloc and Execute, meaning they take up space in a program\n\n // image and have execution permissions\n\n assert_eq!(SectionType::ProgramData, header.ty);\n\n assert_eq!(SectionFlags::AllocExecute, header.flags);\n\n\n\n // ...\n\n}\n\n```\n\n\n\nAlternatively data can be accessed through the provided `Section` and `Segment` iterators\n\n\n\n```rust\n\nfn main() {\n\n // ...\n\n\n\n for section in elf.sections() {\n\n // do something with each section\n\n }\n\n\n\n for segment in elf.segments() {\n\n // do something with each segment\n\n }\n\n\n\n // ...\n\n}\n\n```\n", "file_path": "README.md", "rank": 77, "score": 18.471052163766178 } ]
Rust
rs/2021/day20/src/main.rs
cs-cordero/advent-of-code
614b8f78b43c54ef180a7dc411a0d1366a62944f
use advent_of_code::read_input_as_string; use std::collections::HashSet; use std::fmt::{Display, Formatter}; type Point = (isize, isize); #[derive(Clone, Debug)] struct Rect { lo_row: isize, lo_col: isize, hi_row: isize, hi_col: isize, } impl Rect { fn contains(&self, point: &Point) -> bool { let (row, col) = *point; self.lo_row <= row && row <= self.hi_row && self.lo_col <= col && col <= self.hi_col } fn on_edge_or_outside(&self, point: &Point) -> bool { let (row, col) = *point; let on_horizontal_edge = (self.lo_col..=self.hi_col).contains(&col) && (row == self.lo_row || row == self.hi_row); let on_vertical_edge = (self.lo_row..=self.hi_row).contains(&row) && (col == self.lo_col || col == self.hi_col); on_horizontal_edge || on_vertical_edge || !self.contains(point) } fn expand(&self, increase: isize) -> Self { Self { lo_row: self.lo_row - increase, lo_col: self.lo_col - increase, hi_row: self.hi_row + increase, hi_col: self.hi_col + increase, } } fn iter(&self) -> impl Iterator<Item = (isize, isize)> { let lo_row = self.lo_row; let hi_row = self.hi_row; let lo_col = self.lo_col; let hi_col = self.hi_col; (lo_row..=hi_row).flat_map(move |row| (lo_col..=hi_col).map(move |col| (row, col))) } fn rows(&self) -> impl Iterator<Item = impl Iterator<Item = (isize, isize)>> { let lo_row = self.lo_row; let hi_row = self.hi_row; let lo_col = self.lo_col; let hi_col = self.hi_col; (lo_row..=hi_row).map(move |row| (lo_col..=hi_col).map(move |col| (row, col))) } } #[derive(Clone, Debug)] struct Image { pixels_on: HashSet<Point>, dimensions: Rect, enhance_iteration_toggle: bool, enhance_algorithm: Vec<char>, } impl Display for Image { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { let s = self .dimensions .rows() .map(|row| { row.map(|point| { if self.pixels_on.contains(&point) { '#' } else { '.' } }) .collect::<String>() }) .collect::<Vec<_>>() .join("\n"); write!(f, "{}", s) } } impl Image { fn create_enhanced_image(self) -> Image { let enhanced_rect = self.dimensions.expand(1); let new_pixels = enhanced_rect .iter() .filter(|point| { let algorithm_index = self.get_encoded_enhanced_pixel(*point, &enhanced_rect); *self.enhance_algorithm.get(algorithm_index).unwrap() == '#' }) .collect::<HashSet<Point>>(); Image { pixels_on: new_pixels, dimensions: enhanced_rect, enhance_iteration_toggle: !self.enhance_iteration_toggle, enhance_algorithm: self.enhance_algorithm, } } #[inline] fn get_encoded_enhanced_pixel(&self, center: Point, enhanced_rect: &Rect) -> usize { let edge_or_beyond_value = if self.distance_points_will_toggle() { if self.enhance_iteration_toggle { 1 } else { 0 } } else { 0 }; let (row, col) = center; [ (row - 1, col - 1), (row - 1, col), (row - 1, col + 1), (row, col - 1), (row, col), (row, col + 1), (row + 1, col - 1), (row + 1, col), (row + 1, col + 1), ] .iter() .map(|point| { if enhanced_rect.on_edge_or_outside(point) { edge_or_beyond_value } else if self.pixels_on.contains(point) { 1 } else { 0 } }) .fold(0usize, |acc, bit| (acc << 1) + bit) } #[inline] fn distance_points_will_toggle(&self) -> bool { self.enhance_algorithm .first() .zip(self.enhance_algorithm.last()) .map(|(first, last)| *first == '#' && *last == '.') .unwrap_or(false) } } fn main() { let raw = read_input_as_string("2021/day20/src/input.txt"); let image = { let (raw_algorithm, raw_image) = raw.split_once("\n\n").unwrap(); let algorithm = raw_algorithm.chars().collect::<Vec<_>>(); let image = raw_image .lines() .enumerate() .flat_map(|(row, line)| { line.chars() .enumerate() .filter(|(_, pixel)| *pixel == '#') .map(move |(col, _)| (row as isize, col as isize)) }) .collect::<HashSet<Point>>(); let dimensions = Rect { lo_row: 0, lo_col: 0, hi_row: *image.iter().map(|(row, _)| row).max().unwrap(), hi_col: *image.iter().map(|(_, col)| col).max().unwrap(), }; Image { pixels_on: image, dimensions, enhance_iteration_toggle: false, enhance_algorithm: algorithm, } }; let answer1 = image .clone() .create_enhanced_image() .create_enhanced_image() .pixels_on .len(); let answer2 = (0..50) .fold(image, |acc, _| acc.create_enhanced_image()) .pixels_on .len(); println!("Part 1: {:?}", answer1); println!("Part 2: {:?}", answer2); }
use advent_of_code::read_input_as_string; use std::collections::HashSet; use std::fmt::{Display, Formatter}; type Point = (isize, isize); #[derive(Clone, Debug)] struct Rect { lo_row: isize, lo_col: isize, hi_row: isize, hi_col: isize, } impl Rect { fn contains(&self, point: &Point) -> bool { let (row, col) = *point; self.lo_row <= row && row <= self.hi_row && self.lo_col <= col && col <= self.hi_col } fn on_edge_or_outside(&self, point: &Point) -> bool { let (row, col) = *point; let on_horizontal_edge = (self.lo_col..=self.hi_col).contains(&col) && (row == self.lo_row || row == self.hi_row); let on_vertical_edge = (self.lo_row..=self.hi_row).contains(&row) && (col == self.lo_col || col == self.hi_col); on_horizontal_edge || on_vertical_edge || !self.contains(point) } fn expand(&self, increase: isize) -> Self { Self { lo_row: self.lo_row - increase, lo_col: self.lo_col - increase, hi_row: self.hi_row + increase, hi_col: self.hi_col + increase, } } fn iter(&self) -> impl Iterator<Item = (isize, isize)> { let lo_row = self.lo_row; let hi_row = self.hi_row; let lo_col = self.lo_col; let hi_col = self.hi_col; (lo_row..=hi_row).flat_map(move |row| (lo_col..=hi_col).map(move |col| (row, col))) } fn rows(&self) -> impl Iterator<Item = impl Iterator<Item = (isize, isize)>> { let lo_row = self.lo_row; let hi_row = self.hi_row; let lo_col = self.lo_col; let hi_col = self.hi_col; (lo_row..=hi_row).map(move |row| (lo_col..=hi_col).map(move |col| (row, col))) } } #[derive(Clone, Debug)] struct Image { pixels_on: HashSet<Point>, dimensions: Rect, enhance_iteration_toggle: bool, enhance_algorithm: Vec<char>, } impl Display for Image { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { let s = self .dimensions .rows() .map(|row| { row.map(|point| { if self.pixels_on.contains(&point) { '#' } else { '.' } }) .collect::<String>() }) .collect::<Vec<_>>() .join("\n"); write!(f, "{}", s) } } impl Image { fn create_enhanced_image(self) -> Image { let enhanced_rect = self.dimensions.expand(1); let new_pixels = enhanced_rect .iter() .filter(|point| { let algorithm_index = self.get_encoded_enhanced_pixel(*point, &enhanced_rect); *self.enhance_algorithm.get(algorithm_index).unwrap() == '#' }) .collect::<HashSet<Point>>(); Image { pixels_on: new_pixels, dimensions: enhanced_rect, enhance_iteration_toggle: !self.enhance_iteration_toggle, enhance_algorithm: self.enhance_algorithm, } } #[inline] fn get_encoded_enhanced_pixel(&self, center: Point, enhanced_rect: &Rect) -> usize { let edge_or_beyond_value = if self.distance_points_will_toggle() { if self.enhance_iteration_toggle { 1 } else { 0 } } else { 0 }; let (row, col) = center; [ (row - 1, col - 1), (row - 1, col), (row - 1, col + 1), (row, col - 1), (row, col), (row, col + 1), (row + 1, col - 1), (row + 1, col), (row + 1, col + 1), ] .iter() .map(|point| { if enhanced_rect.on_edge_or_outside(point) { edge_or_beyond_value } else if self.pixels_on.contains(point) { 1 } else { 0 } }) .fold(0usize, |acc, bit| (acc << 1) + bit) } #[inline] fn distance_points_will_toggle(&self) -> bool { self.enhance_algorithm .first() .zip(self.enhance_algorithm.last()) .map(|(first, last)| *first == '#' && *last == '.') .unwrap_or(false) } }
fn main() { let raw = read_input_as_string("2021/day20/src/input.txt"); let image = { let (raw_algorithm, raw_image) = raw.split_once("\n\n").unwrap(); let algorithm = raw_algorithm.chars().collect::<Vec<_>>(); let image = raw_image .lines() .enumerate() .flat_map(|(row, line)| { line.chars() .enumerate() .filter(|(_, pixel)| *pixel == '#') .map(move |(col, _)| (row as isize, col as isize)) }) .collect::<HashSet<Point>>(); let dimensions = Rect { lo_row: 0, lo_col: 0, hi_row: *image.iter().map(|(row, _)| row).max().unwrap(), hi_col: *image.iter().map(|(_, col)| col).max().unwrap(), }; Image { pixels_on: image, dimensions, enhance_iteration_toggle: false, enhance_algorithm: algorithm, } }; let answer1 = image .clone() .create_enhanced_image() .create_enhanced_image() .pixels_on .len(); let answer2 = (0..50) .fold(image, |acc, _| acc.create_enhanced_image()) .pixels_on .len(); println!("Part 1: {:?}", answer1); println!("Part 2: {:?}", answer2); }
function_block-full_function
[ { "content": "fn find_monster(image: &[Vec<char>], row: usize, col: usize) -> bool {\n\n MONSTER_OFFSETS\n\n .iter()\n\n .copied()\n\n .all(|(offset_row, offset_col)| {\n\n let operator = match offset_row.cmp(&0) {\n\n Ordering::Less => usize::checked_sub,\n\n _ => usize::checked_add,\n\n };\n\n\n\n operator(row, offset_row.abs() as usize)\n\n .zip(col.checked_add(offset_col.abs() as usize))\n\n .and_then(|(r, c)| image.get(r).and_then(|r| r.get(c)))\n\n .filter(|&&character| character == '#')\n\n .is_some()\n\n })\n\n}\n\n\n", "file_path": "rs/2020/day20/src/main.rs", "rank": 0, "score": 305356.0065176045 }, { "content": "fn find_orientation_matching_condition<F: FnMut(&Image) -> bool>(\n\n image: &[Vec<char>],\n\n mut func: F,\n\n) -> Option<Image> {\n\n let mut image = image.to_owned();\n\n for _ in 0..4 {\n\n if func(&image) {\n\n return Some(image);\n\n }\n\n image = rotate_image(image);\n\n }\n\n image = flip_image(image);\n\n for _ in 0..4 {\n\n if func(&image) {\n\n return Some(image);\n\n }\n\n image = rotate_image(image);\n\n }\n\n None\n\n}\n\n\n", "file_path": "rs/2020/day20/src/main.rs", "rank": 1, "score": 273325.10939510126 }, { "content": "#[inline]\n\nfn consume_bits(bit_stream: &mut VecDeque<usize>, packet_size: &mut usize, bits: usize) -> usize {\n\n *packet_size += bits;\n\n (0..bits)\n\n .into_iter()\n\n .map(|_| bit_stream.pop_front().unwrap_or(0))\n\n .fold(0usize, |mut acc, bit| {\n\n assert!(bit == 0 || bit == 1);\n\n acc <<= 1;\n\n acc += bit;\n\n acc\n\n })\n\n}\n\n\n", "file_path": "rs/2021/day16/src/main.rs", "rank": 2, "score": 226883.4685231977 }, { "content": "#[inline]\n\nfn get_range(highest: bool) -> impl Iterator<Item = i64> {\n\n if highest {\n\n (1..10).rev().collect::<Vec<_>>().into_iter()\n\n } else {\n\n (1..10).collect::<Vec<_>>().into_iter()\n\n }\n\n}\n", "file_path": "rs/2021/day24/src/main.rs", "rank": 3, "score": 218067.58894138224 }, { "content": "type Point = (usize, usize);\n\n\n", "file_path": "rs/2021/day25/src/main.rs", "rank": 5, "score": 202938.65132956914 }, { "content": "type Point = (usize, usize);\n\nstatic TEMP_STORAGE_COORDINATES: [Point; 7] =\n\n [(1, 1), (1, 2), (1, 4), (1, 6), (1, 8), (1, 10), (1, 11)];\n\nstatic PART_1_TARGET_DESTINATION_A: [Point; 2] = [(2, 3), (3, 3)];\n\nstatic PART_1_TARGET_DESTINATION_B: [Point; 2] = [(2, 5), (3, 5)];\n\nstatic PART_1_TARGET_DESTINATION_C: [Point; 2] = [(2, 7), (3, 7)];\n\nstatic PART_1_TARGET_DESTINATION_D: [Point; 2] = [(2, 9), (3, 9)];\n\nstatic PART_2_TARGET_DESTINATION_A: [Point; 4] = [(2, 3), (3, 3), (4, 3), (5, 3)];\n\nstatic PART_2_TARGET_DESTINATION_B: [Point; 4] = [(2, 5), (3, 5), (4, 5), (5, 5)];\n\nstatic PART_2_TARGET_DESTINATION_C: [Point; 4] = [(2, 7), (3, 7), (4, 7), (5, 7)];\n\nstatic PART_2_TARGET_DESTINATION_D: [Point; 4] = [(2, 9), (3, 9), (4, 9), (5, 9)];\n\n\n", "file_path": "rs/2021/day23/src/main.rs", "rank": 6, "score": 202938.65132956914 }, { "content": "fn draw_line(point_to_frequency: &mut HashMap<Point, i32>, begin: Point, end: Point) {\n\n let (mut x, mut y) = begin;\n\n let direction_x = get_direction(x, end.0);\n\n let direction_y = get_direction(y, end.1);\n\n\n\n loop {\n\n *point_to_frequency.entry((x, y)).or_insert(0) += 1;\n\n\n\n if (x, y) == end {\n\n break;\n\n }\n\n\n\n x += direction_x;\n\n y += direction_y;\n\n }\n\n}\n\n\n", "file_path": "rs/2021/day05/src/main.rs", "rank": 7, "score": 198403.04306702106 }, { "content": "fn combine_image_row(row: Vec<Image>) -> Image {\n\n let image_count = row.len();\n\n let inner_row_count = row.get(0).unwrap().len();\n\n let inner_col_count = row.get(0).unwrap().get(0).unwrap().len();\n\n\n\n (0..inner_row_count)\n\n .map(|i| {\n\n let mut combined_row = Vec::with_capacity(image_count * inner_col_count);\n\n row.iter()\n\n .map(|image| &image[i])\n\n .for_each(|row_from_each_image| {\n\n combined_row.extend(row_from_each_image);\n\n });\n\n combined_row\n\n })\n\n .collect::<Vec<Vec<char>>>()\n\n}\n\n\n\nstatic MONSTER_OFFSETS: [(i8, i8); 15] = [\n\n (0, 0),\n", "file_path": "rs/2020/day20/src/main.rs", "rank": 8, "score": 197091.02544465638 }, { "content": "#[inline]\n\nfn get_bits(edge: impl Iterator<Item = char>) -> u32 {\n\n let mut result = 0;\n\n for element in edge {\n\n result <<= 1;\n\n match element {\n\n '.' => result += 0,\n\n '#' => result += 1,\n\n _ => panic!(\"Invalid {}\", element),\n\n }\n\n }\n\n result\n\n}\n", "file_path": "rs/2020/day20/src/main.rs", "rank": 9, "score": 193309.9604208467 }, { "content": "fn consume_packet(bit_stream: &mut VecDeque<usize>) -> Packet {\n\n let mut packet_size = 0;\n\n let packet_version = consume_bits(bit_stream, &mut packet_size, 3);\n\n let packet_type_id = consume_bits(bit_stream, &mut packet_size, 3);\n\n\n\n match packet_type_id {\n\n 4 => {\n\n let mut result = 0;\n\n\n\n loop {\n\n let read_bit = consume_bits(bit_stream, &mut packet_size, 1);\n\n let digit = consume_bits(bit_stream, &mut packet_size, 4);\n\n result <<= 4;\n\n result |= digit;\n\n\n\n if read_bit == 0 {\n\n break;\n\n }\n\n }\n\n\n", "file_path": "rs/2021/day16/src/main.rs", "rank": 10, "score": 193190.0164249387 }, { "content": "fn get_overlap_count(data: &[(Point, Point)]) -> usize {\n\n let mut freq = HashMap::new();\n\n for (begin, end) in data {\n\n draw_line(&mut freq, *begin, *end);\n\n }\n\n\n\n freq.values()\n\n .into_iter()\n\n .filter(|count| **count > 1)\n\n .count()\n\n}\n", "file_path": "rs/2021/day05/src/main.rs", "rank": 11, "score": 191179.88961911472 }, { "content": "/// Simulate probe trajectory\n\n///\n\n/// Returns Some(highest_y_position_ever_reached) if the probe ever touches the target.\n\n/// Returns None if it never hits the target.\n\nfn launch_probe(mut velocity_x: isize, mut velocity_y: isize) -> Option<isize> {\n\n let mut position_x = 0;\n\n let mut position_y = 0;\n\n let mut highest_y = 0;\n\n loop {\n\n let will_never_reach_x_target = (&position_x < TARGET_X_RANGE.start() && velocity_x <= 0)\n\n || (&position_x > TARGET_X_RANGE.end() && velocity_x >= 0);\n\n let will_never_reach_y_target = &position_y < TARGET_Y_RANGE.start();\n\n\n\n if will_never_reach_x_target || will_never_reach_y_target {\n\n return None;\n\n } else if TARGET_X_RANGE.contains(&position_x) && TARGET_Y_RANGE.contains(&position_y) {\n\n return Some(highest_y);\n\n }\n\n\n\n position_x += velocity_x;\n\n position_y += velocity_y;\n\n highest_y = max(highest_y, position_y);\n\n\n\n velocity_x = velocity_x.signum() * (velocity_x.abs() - 1);\n\n velocity_y -= 1;\n\n }\n\n}\n", "file_path": "rs/2021/day17/src/main.rs", "rank": 12, "score": 188037.6859131539 }, { "content": "fn binary_search(mut low: usize, mut high: usize, slice: &str) -> usize {\n\n let mut mid = (low + high) / 2;\n\n for char in slice.chars() {\n\n match char {\n\n 'F' | 'L' => high = mid,\n\n 'B' | 'R' => low = mid + 1,\n\n _ => panic!(\"Invalid character {}\", char),\n\n }\n\n mid = (low + high) / 2;\n\n }\n\n mid\n\n}\n\n\n", "file_path": "rs/2020/day05/src/main.rs", "rank": 13, "score": 183111.33972715418 }, { "content": "fn calculate_seat_id(row: usize, column: usize) -> usize {\n\n row * 8 + column\n\n}\n", "file_path": "rs/2020/day05/src/main.rs", "rank": 14, "score": 178483.40739158358 }, { "content": "fn collapse_stack(mut stack: Vec<Term>, remove_open: bool, addition_precedence: bool) -> Vec<Term> {\n\n if stack.is_empty() {\n\n return stack;\n\n }\n\n\n\n let mut value = get_from_scalar(stack.pop().unwrap());\n\n while stack\n\n .last()\n\n .filter(|&term| *term != Term::Open)\n\n .filter(|&term| (!addition_precedence || (remove_open || *term != Term::Multiply)))\n\n .is_some()\n\n {\n\n let operation = stack.pop().unwrap();\n\n let lhs = get_from_scalar(stack.pop().unwrap());\n\n match operation {\n\n Term::Add => value += lhs,\n\n Term::Multiply => value *= lhs,\n\n _ => panic!(\"Invalid operation {:?}\", operation),\n\n };\n\n }\n\n if remove_open && stack.last().filter(|&term| *term == Term::Open).is_some() {\n\n stack.pop().unwrap();\n\n }\n\n stack.push(Term::Scalar(value));\n\n stack\n\n}\n\n\n", "file_path": "rs/2020/day18/src/main.rs", "rank": 15, "score": 175395.305652252 }, { "content": "/// Optionally returns the cost of traveling to the target point.\n\n/// Returns None if the point cannot be reached.\n\nfn bfs(graph: &[Vec<Amphipod>], start: Point, target: Point) -> Option<usize> {\n\n let limits = get_limits(graph);\n\n let cost_per_move = match graph.get(start.0).unwrap().get(start.1).unwrap() {\n\n Amphipod::A => 1,\n\n Amphipod::B => 10,\n\n Amphipod::C => 100,\n\n Amphipod::D => 1000,\n\n _ => panic!(\"Only Amphipods may move. Tried to move from {:?}\", start),\n\n };\n\n\n\n let mut queue = VecDeque::new();\n\n let mut visited = HashSet::new();\n\n for point in get_adjacent_points_manhattan(start, limits) {\n\n queue.push_back((point, 1));\n\n visited.insert(point);\n\n }\n\n\n\n while let Some((coordinate, steps)) = queue.pop_front() {\n\n let current = graph.get(coordinate.0).unwrap().get(coordinate.1).unwrap();\n\n\n", "file_path": "rs/2021/day23/src/main.rs", "rank": 17, "score": 166871.71684944152 }, { "content": "fn step(octopuses: &mut [Vec<u8>]) -> usize {\n\n let mut queue = VecDeque::new();\n\n let mut flashed = HashSet::new();\n\n\n\n // increase energy\n\n for (r, row) in octopuses.iter_mut().enumerate() {\n\n for (c, octopus) in row.iter_mut().enumerate() {\n\n *octopus += 1; // could overflow\n\n if *octopus > 9 {\n\n // octopus flashes\n\n queue.push_back((r, c));\n\n flashed.insert((r, c));\n\n }\n\n }\n\n }\n\n\n\n // flash the octopuses\n\n let row_limit = octopuses.len();\n\n let col_limit = octopuses.iter().next().unwrap().len();\n\n while !queue.is_empty() {\n", "file_path": "rs/2021/day11/src/main.rs", "rank": 18, "score": 166065.81135527213 }, { "content": "#[inline]\n\nfn flip_image(mut image: Vec<Vec<char>>) -> Vec<Vec<char>> {\n\n image.reverse();\n\n image\n\n}\n\n\n", "file_path": "rs/2020/day20/src/main.rs", "rank": 19, "score": 158165.47733429357 }, { "content": "fn find_scores_for_first_and_last_winner(\n\n drawn_numbers: &[u32],\n\n mut boards: Vec<Board>,\n\n) -> (u32, u32) {\n\n let mut first_winner_score: Option<u32> = None;\n\n\n\n for drawn_number in drawn_numbers {\n\n boards\n\n .iter_mut()\n\n .for_each(|board| board.mark_number(*drawn_number));\n\n\n\n // swap-remove winners from the boards Vec, saving the first winner and returning once\n\n // the last winner is found.\n\n // assumes there is an unambiguous first-winner and an unambiguous last-winner.\n\n let mut i = 0;\n\n while i < boards.len() {\n\n if boards[i].is_winning() {\n\n let board = boards.swap_remove(i);\n\n let unmarked_numbers = board\n\n .number_to_coordinates\n", "file_path": "rs/2021/day04/src/main.rs", "rank": 20, "score": 157595.65604891055 }, { "content": "type Point = (i32, i32);\n\n\n", "file_path": "rs/2021/day05/src/main.rs", "rank": 21, "score": 150500.05146249552 }, { "content": "fn to_bits(s: &str) -> VecDeque<usize> {\n\n s.chars()\n\n .flat_map(|char| match char {\n\n '0' => vec![0, 0, 0, 0],\n\n '1' => vec![0, 0, 0, 1],\n\n '2' => vec![0, 0, 1, 0],\n\n '3' => vec![0, 0, 1, 1],\n\n '4' => vec![0, 1, 0, 0],\n\n '5' => vec![0, 1, 0, 1],\n\n '6' => vec![0, 1, 1, 0],\n\n '7' => vec![0, 1, 1, 1],\n\n '8' => vec![1, 0, 0, 0],\n\n '9' => vec![1, 0, 0, 1],\n\n 'A' => vec![1, 0, 1, 0],\n\n 'B' => vec![1, 0, 1, 1],\n\n 'C' => vec![1, 1, 0, 0],\n\n 'D' => vec![1, 1, 0, 1],\n\n 'E' => vec![1, 1, 1, 0],\n\n 'F' => vec![1, 1, 1, 1],\n\n _ => panic!(\"Invalid hexadecimal value\"),\n\n })\n\n .collect::<VecDeque<usize>>()\n\n}\n\n\n", "file_path": "rs/2021/day16/src/main.rs", "rank": 22, "score": 144662.4399060028 }, { "content": "#[inline]\n\nfn adjacent_points(data: &[Vec<u8>], location: (usize, usize)) -> Vec<(usize, usize)> {\n\n let mut result = Vec::with_capacity(4);\n\n let max_row_size = data.len();\n\n let max_col_size = data.iter().next().unwrap().len();\n\n let (row, col) = location;\n\n\n\n if let Some((prev_row, col)) = row.checked_sub(1).map(|prev_row| (prev_row, col)) {\n\n result.push((prev_row, col));\n\n }\n\n if let Some((row, prev_col)) = col.checked_sub(1).map(|prev_col| (row, prev_col)) {\n\n result.push((row, prev_col));\n\n }\n\n if row + 1 < max_row_size {\n\n result.push((row + 1, col));\n\n }\n\n if col + 1 < max_col_size {\n\n result.push((row, col + 1));\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "rs/2021/day09/src/main.rs", "rank": 23, "score": 142905.29088048195 }, { "content": "fn get_possible_moves(graph: &[Vec<Amphipod>]) -> Vec<(Point, Point)> {\n\n let source_movers = std::iter::once((PART_1_TARGET_DESTINATION_A, Amphipod::A))\n\n .chain(std::iter::once((PART_1_TARGET_DESTINATION_B, Amphipod::B)))\n\n .chain(std::iter::once((PART_1_TARGET_DESTINATION_C, Amphipod::C)))\n\n .chain(std::iter::once((PART_1_TARGET_DESTINATION_D, Amphipod::D)))\n\n .filter_map(|(locations, target_amphipod)| {\n\n let [first, second] = locations;\n\n let first_amphipod = get(graph, first).unwrap();\n\n let second_amphipod = get(graph, second).unwrap();\n\n if [first_amphipod, second_amphipod]\n\n .iter()\n\n .all(|a| *a == Amphipod::Empty || *a == target_amphipod)\n\n {\n\n // already solved or completely empty\n\n None\n\n } else if first_amphipod == Amphipod::Empty {\n\n if second_amphipod != target_amphipod {\n\n Some(second)\n\n } else {\n\n None\n", "file_path": "rs/2021/day23/src/main.rs", "rank": 24, "score": 142883.64896694757 }, { "content": "fn count_monsters(image: &[Vec<char>]) -> usize {\n\n let row_size = image.len();\n\n let col_size = image.iter().next().unwrap().len();\n\n\n\n (0..row_size)\n\n .flat_map(|r| (0..col_size).map(move |c| (r, c)))\n\n .filter(|(r, c)| find_monster(image, *r, *c))\n\n .count()\n\n}\n\n\n", "file_path": "rs/2020/day20/src/main.rs", "rank": 25, "score": 141599.25268800822 }, { "content": "#[inline]\n\nfn prune(map: &mut HashMap<usize, HashSet<Rule>>, rule_to_remove: Rule) {\n\n for (_, possible_rules) in map.iter_mut() {\n\n possible_rules.remove(&rule_to_remove);\n\n }\n\n map.retain(|_, possible_rules| !possible_rules.is_empty());\n\n}\n\n\n", "file_path": "rs/2020/day16/src/main.rs", "rank": 26, "score": 141308.98401078832 }, { "content": "fn get_possible_moves_part2(graph: &[Vec<Amphipod>]) -> Vec<(Point, Point)> {\n\n let source_movers = std::iter::once((PART_2_TARGET_DESTINATION_A, Amphipod::A))\n\n .chain(std::iter::once((PART_2_TARGET_DESTINATION_B, Amphipod::B)))\n\n .chain(std::iter::once((PART_2_TARGET_DESTINATION_C, Amphipod::C)))\n\n .chain(std::iter::once((PART_2_TARGET_DESTINATION_D, Amphipod::D)))\n\n .filter_map(|(locations, target_amphipod)| {\n\n let [first, second, third, fourth] = locations;\n\n let first_amphipod = get(graph, first).unwrap();\n\n let second_amphipod = get(graph, second).unwrap();\n\n let third_amphipod = get(graph, third).unwrap();\n\n let fourth_amphipod = get(graph, fourth).unwrap();\n\n\n\n let already_solved_or_all_empty = [\n\n first_amphipod,\n\n second_amphipod,\n\n third_amphipod,\n\n fourth_amphipod,\n\n ]\n\n .iter()\n\n .all(|a| *a == target_amphipod || *a == Amphipod::Empty);\n", "file_path": "rs/2021/day23/src/main.rs", "rank": 27, "score": 141308.8186661115 }, { "content": "#[inline]\n\nfn get(graph: &[Vec<Amphipod>], point: Point) -> Option<Amphipod> {\n\n graph.get(point.0).and_then(|r| r.get(point.1).copied())\n\n}\n\n\n", "file_path": "rs/2021/day23/src/main.rs", "rank": 28, "score": 141241.868942266 }, { "content": "fn brute_force_player_turn(dice: &mut u32, player_score: &mut u32, player_position: &mut u32) {\n\n *dice += 1;\n\n *player_position += *dice;\n\n *dice += 1;\n\n *player_position += *dice;\n\n *dice += 1;\n\n *player_position += *dice;\n\n *player_position %= BOARD_SIZE;\n\n *player_score += *player_position + 1;\n\n}\n\n\n", "file_path": "rs/2021/day21/src/main.rs", "rank": 29, "score": 139196.37010026298 }, { "content": "#[inline]\n\nfn transpose_image(image: Image) -> Image {\n\n let row_size = image.len();\n\n let col_size = image.get(0).unwrap().len();\n\n (0..col_size)\n\n .map(|c| (0..row_size).map(|r| image[r][c]).collect::<Vec<_>>())\n\n .collect::<Vec<_>>()\n\n}\n\n\n", "file_path": "rs/2020/day20/src/main.rs", "rank": 31, "score": 128391.8298469997 }, { "content": "#[inline]\n\nfn rotate_image(image: Image) -> Image {\n\n let mut result = transpose_image(image);\n\n for row in result.iter_mut() {\n\n row.reverse();\n\n }\n\n result\n\n}\n\n\n", "file_path": "rs/2020/day20/src/main.rs", "rank": 32, "score": 128391.8298469997 }, { "content": "fn compare_one_bits_to_zero_bits_at_index(\n\n ratings: &[impl AsRef<[char]>],\n\n index: usize,\n\n) -> Ordering {\n\n let one_count = ratings\n\n .iter()\n\n .filter(|rating| rating.as_ref()[index] == '1')\n\n .count();\n\n let zero_count = ratings.len() - one_count;\n\n one_count.cmp(&zero_count)\n\n}\n\n\n", "file_path": "rs/2021/day03/src/main.rs", "rank": 33, "score": 128161.02771473935 }, { "content": "fn recursive_game(player_1: &mut VecDeque<u32>, player_2: &mut VecDeque<u32>) -> Player {\n\n let mut seen_p1 = HashSet::new();\n\n let mut seen_p2 = HashSet::new();\n\n\n\n while !player_1.is_empty() && !player_2.is_empty() {\n\n let p1_hash = hash_vecdeque(player_1);\n\n let p2_hash = hash_vecdeque(player_2);\n\n\n\n if seen_p1.contains(&p1_hash) || seen_p2.contains(&p2_hash) {\n\n return Player::One;\n\n } else {\n\n seen_p1.insert(p1_hash);\n\n seen_p2.insert(p2_hash);\n\n }\n\n\n\n let p1 = player_1.pop_front().unwrap();\n\n let p2 = player_2.pop_front().unwrap();\n\n\n\n let winner = if player_1.len() >= p1 as usize && player_2.len() >= p2 as usize {\n\n let mut p1_copy = player_1.clone();\n", "file_path": "rs/2020/day22/src/main.rs", "rank": 34, "score": 128100.34476556652 }, { "content": "fn non_recursive_game(player_1: &mut VecDeque<u32>, player_2: &mut VecDeque<u32>) -> Player {\n\n while !player_1.is_empty() && !player_2.is_empty() {\n\n let p1 = player_1.pop_front().unwrap();\n\n let p2 = player_2.pop_front().unwrap();\n\n if p1 > p2 {\n\n player_1.push_back(p1);\n\n player_1.push_back(p2);\n\n } else {\n\n player_2.push_back(p2);\n\n player_2.push_back(p1);\n\n }\n\n }\n\n\n\n if player_1.is_empty() {\n\n Player::Two\n\n } else {\n\n Player::One\n\n }\n\n}\n\n\n", "file_path": "rs/2020/day22/src/main.rs", "rank": 35, "score": 126750.03904250124 }, { "content": "fn run_instructions(instructions: &[(String, i64)]) -> (bool, i64) {\n\n let mut accumulator: i64 = 0;\n\n let mut ip = 0;\n\n let mut seen_ip = HashSet::new();\n\n\n\n while ip < instructions.len() {\n\n if seen_ip.contains(&ip) {\n\n return (false, accumulator);\n\n }\n\n seen_ip.insert(ip);\n\n let (command, value) = instructions.get(ip).unwrap();\n\n match command.as_str() {\n\n \"acc\" => {\n\n accumulator += value;\n\n ip += 1;\n\n }\n\n \"jmp\" => {\n\n ip = (ip as i64 + value) as usize;\n\n }\n\n _ => {\n\n ip += 1;\n\n }\n\n }\n\n }\n\n (true, accumulator)\n\n}\n\n\n", "file_path": "rs/2020/day08/src/main.rs", "rank": 36, "score": 126193.12711434002 }, { "content": "fn is_valid(sorted_nums: &[u64], target: u64) -> bool {\n\n let mut left = 0;\n\n let mut right = sorted_nums.len() - 1;\n\n while left < right {\n\n let sum = sorted_nums[left] + sorted_nums[right];\n\n match sum.cmp(&target) {\n\n Ordering::Greater => right -= 1,\n\n Ordering::Less => left += 1,\n\n Ordering::Equal => return true,\n\n }\n\n }\n\n false\n\n}\n", "file_path": "rs/2020/day09/src/main.rs", "rank": 37, "score": 126193.12711434002 }, { "content": "fn evaluate_math(s: &str, add_has_precedence_over_mul: bool) -> u64 {\n\n if s.is_empty() {\n\n return 0;\n\n }\n\n\n\n let mut stack = Vec::<Term>::with_capacity(100);\n\n let parsed = s\n\n .replace('(', \"( \")\n\n .replace(')', \" )\")\n\n .split(' ')\n\n .map(|term| match term {\n\n \"+\" => Term::Add,\n\n \"*\" => Term::Multiply,\n\n \"(\" => Term::Open,\n\n \")\" => Term::Close,\n\n _ => Term::Scalar(\n\n term.parse()\n\n .unwrap_or_else(|_| panic!(\"Invalid value {}\", term)),\n\n ),\n\n })\n", "file_path": "rs/2020/day18/src/main.rs", "rank": 38, "score": 124369.31892597472 }, { "content": "fn dfs(monads: &[Monad], highest: bool) -> Option<i64> {\n\n fn dfs_helper(\n\n monad_index: usize,\n\n input: i64,\n\n prev_z: i64,\n\n monads: &[Monad],\n\n memo: &mut HashSet<(usize, i64, i64)>,\n\n highest: bool,\n\n ) -> Option<Vec<i64>> {\n\n if memo.contains(&(monad_index, input, prev_z)) || monad_index >= monads.len() {\n\n // println!(\"hit a memoized value\");\n\n None\n\n } else {\n\n memo.insert((monad_index, input, prev_z));\n\n let mut monad = monads.get(monad_index).unwrap().clone();\n\n monad.z = prev_z;\n\n let new_z = monad.run_with(input);\n\n\n\n if new_z == 0 && monad_index == monads.len() - 1 {\n\n Some(vec![input])\n", "file_path": "rs/2021/day24/src/main.rs", "rank": 39, "score": 122629.48148108172 }, { "content": "type SnailfishIndex = usize;\n\n\n", "file_path": "rs/2021/day18/src/main.rs", "rank": 40, "score": 120043.78360843097 }, { "content": "fn turn_off_box(boxes: &mut Vec<Box>, removal_box: Box) {\n\n let mut result = Vec::new();\n\n let mut queue = VecDeque::new();\n\n queue.extend(boxes.iter().copied());\n\n\n\n while let Some(existing_box) = queue.pop_front() {\n\n if removal_box.fully_contains(&existing_box) {\n\n continue;\n\n } else if let Some(overlap) = removal_box.overlap(&existing_box) {\n\n queue.extend(existing_box.difference(&overlap));\n\n } else {\n\n assert!(existing_box.is_disjoint(&removal_box));\n\n result.push(existing_box);\n\n }\n\n }\n\n\n\n *boxes = result;\n\n}\n", "file_path": "rs/2021/day22/src/main.rs", "rank": 41, "score": 118998.6316031689 }, { "content": "fn reduce_snailfish(graph: &mut Vec<Snailfish>, root: SnailfishIndex) {\n\n loop {\n\n if let Some(exploder_index) = locate_id_that_needs_to_explode(graph, root, 0) {\n\n let explode_config = get_explode_configuration(graph, root, exploder_index);\n\n explode(\n\n graph,\n\n explode_config.pair_exploder,\n\n explode_config.number_left,\n\n explode_config.number_right,\n\n );\n\n } else if let Some(splitter_index) = locate_id_that_needs_to_split(graph, root) {\n\n split_snailfish(graph, splitter_index);\n\n } else {\n\n break;\n\n }\n\n }\n\n}\n\n\n", "file_path": "rs/2021/day18/src/main.rs", "rank": 42, "score": 118998.6316031689 }, { "content": "fn turn_on_box(boxes: &mut Vec<Box>, new_box: Box) {\n\n let mut queue = VecDeque::new();\n\n queue.push_back(new_box);\n\n\n\n while let Some(new_box) = queue.pop_front() {\n\n let mut is_fully_disjoint = true;\n\n for existing_box in boxes.iter() {\n\n if let Some(overlap) = existing_box.overlap(&new_box) {\n\n is_fully_disjoint = false;\n\n queue.extend(new_box.difference(&overlap));\n\n break;\n\n }\n\n }\n\n if is_fully_disjoint {\n\n boxes.push(new_box);\n\n }\n\n }\n\n}\n\n\n", "file_path": "rs/2021/day22/src/main.rs", "rank": 43, "score": 118998.6316031689 }, { "content": "/// Helper method that gets the 8 squares surrounding a given point in a 2D grid.\n\n/// Note that the point is (row, col), where (0, 0) is in the top-left corner.\n\n///\n\n/// Example:\n\n/// Given: point: (2, 2) limits_non_inclusive: (5, 5)\n\n/// Yields: vec![(1, 1), (1, 2), (1, 3), (2, 1), (2, 3), (3, 1), (3, 2), (3, 3)]\n\n///\n\n/// Example:\n\n/// Given: point: (2, 0) limits_non_inclusive: (3, 5)\n\n/// Yields: vec![(1, 0), (1, 1), (2, 1)]\n\npub fn get_adjacent_points(\n\n point: (usize, usize),\n\n limits_non_inclusive: (usize, usize),\n\n) -> Vec<(usize, usize)> {\n\n let (row, col) = point;\n\n let (row_limit, col_limit) = limits_non_inclusive;\n\n let prev_row = row.checked_sub(1).filter(|value| *value < row_limit);\n\n let next_row = row.checked_add(1).filter(|value| *value < row_limit);\n\n let prev_col = col.checked_sub(1).filter(|value| *value < col_limit);\n\n let next_col = col.checked_add(1).filter(|value| *value < col_limit);\n\n let row = Some(row);\n\n let col = Some(col);\n\n\n\n let mut result = Vec::new();\n\n for r in [prev_row, row, next_row] {\n\n if r.is_none() {\n\n continue;\n\n }\n\n for c in [prev_col, col, next_col] {\n\n if r == row && c == col {\n", "file_path": "rs/src/lib.rs", "rank": 44, "score": 118527.81259480285 }, { "content": "fn split_snailfish(graph: &mut Vec<Snailfish>, split_index: SnailfishIndex) {\n\n let splitter = graph.get_mut(split_index).unwrap();\n\n let value = splitter.value();\n\n let new_left = Snailfish::Number {\n\n id: unsafe { generate_id() },\n\n value: (value as f32 / 2.0).floor() as u32,\n\n };\n\n let new_right = Snailfish::Number {\n\n id: unsafe { generate_id() },\n\n value: (value as f32 / 2.0).ceil() as u32,\n\n };\n\n let new_pair = Snailfish::Pair {\n\n id: splitter.id(),\n\n left: new_left.id(),\n\n right: new_right.id(),\n\n };\n\n graph.push(new_left);\n\n graph.push(new_right);\n\n graph[split_index] = new_pair;\n\n}\n\n\n", "file_path": "rs/2021/day18/src/main.rs", "rank": 45, "score": 117345.86573592054 }, { "content": "fn build_image(jigsaw: &[Vec<(u32, Image)>]) -> Image {\n\n jigsaw\n\n .iter()\n\n .map(|row| {\n\n row.iter()\n\n .map(|(_, image)| {\n\n image[1..image.len() - 1]\n\n .iter()\n\n .map(|row| row[1..row.len() - 1].iter().copied().collect::<Vec<_>>())\n\n .collect::<Vec<_>>()\n\n })\n\n .collect::<Vec<_>>()\n\n })\n\n .flat_map(combine_image_row)\n\n .collect::<Vec<_>>()\n\n}\n\n\n", "file_path": "rs/2020/day20/src/main.rs", "rank": 46, "score": 117340.48059367554 }, { "content": "/// Helper method that gets the 4 squares surrounding a given point in a 2D grid.\n\n/// Note that the point is (row, col), where (0, 0) is in the top-left corner.\n\n///\n\n/// Example:\n\n/// Given: point: (2, 2) limits_non_inclusive: (5, 5)\n\n/// Yields: vec![(1, 2), (3, 2), (2, 1), (2, 3)]\n\n///\n\n/// Example:\n\n/// Given: point: (2, 0) limits_non_inclusive: (3, 5)\n\n/// Yields: vec![(1, 0), (2, 1)]\n\npub fn get_adjacent_points_manhattan(\n\n point: (usize, usize),\n\n limits_non_inclusive: (usize, usize),\n\n) -> Vec<(usize, usize)> {\n\n let (row, col) = point;\n\n let (row_limit, col_limit) = limits_non_inclusive;\n\n let prev_row = row.checked_sub(1).filter(|value| *value < row_limit);\n\n let next_row = row.checked_add(1).filter(|value| *value < row_limit);\n\n let prev_col = col.checked_sub(1).filter(|value| *value < col_limit);\n\n let next_col = col.checked_add(1).filter(|value| *value < col_limit);\n\n\n\n let mut result = Vec::<(usize, usize)>::new();\n\n for next_row in [prev_row, next_row].iter().flatten() {\n\n result.push((*next_row, col));\n\n }\n\n for next_col in [prev_col, next_col].iter().flatten() {\n\n result.push((row, *next_col));\n\n }\n\n result\n\n}\n\n\n", "file_path": "rs/src/lib.rs", "rank": 47, "score": 116277.0908364587 }, { "content": "/// [[[[4,3],4],4],[7,[[8,4],9]]]\n\nfn parse_graph(mut s: VecDeque<char>) -> (SnailfishIndex, Vec<Snailfish>) {\n\n let mut result: Vec<Snailfish> = Vec::new();\n\n\n\n fn helper<'a>(\n\n remaining: &'a mut VecDeque<char>,\n\n result: &'a mut Vec<Snailfish>,\n\n ) -> Option<SnailfishIndex> {\n\n if let Some(next_char) = remaining.pop_front() {\n\n if next_char == '[' {\n\n let left = helper(remaining, result).unwrap();\n\n let right = helper(remaining, result).unwrap();\n\n let snailfish = Snailfish::Pair {\n\n id: unsafe { generate_id() },\n\n left,\n\n right,\n\n };\n\n result.push(snailfish);\n\n\n\n if !remaining.is_empty() && (remaining[0] == ',' || remaining[0] == ']') {\n\n remaining.pop_front();\n", "file_path": "rs/2021/day18/src/main.rs", "rank": 48, "score": 114236.72717664894 }, { "content": "/// For a given initial x velocity, finds the highest y position reached along\n\n/// the probe's trajectory while still landing in the target at some point later\n\n/// in the probe path.\n\n///\n\n/// Tests a hardcoded y range of 0..1000.\n\n/// If it never hits the target, then returns None.\n\nfn find_highest_y(initial_x_velocity: isize) -> Option<isize> {\n\n (0..1000)\n\n .filter_map(|initial_y_velocity| launch_probe(initial_x_velocity, initial_y_velocity))\n\n .max()\n\n}\n\n\n", "file_path": "rs/2021/day17/src/main.rs", "rank": 49, "score": 113567.06386660282 }, { "content": "fn count_trees(grid: &[String], right: usize, down: usize) -> usize {\n\n grid.iter()\n\n .enumerate()\n\n .skip(down)\n\n .filter(|(row, line)| {\n\n row % down == 0 && line.chars().nth((row * right / down) % line.len()).unwrap() == '#'\n\n })\n\n .count()\n\n}\n", "file_path": "rs/2020/day03/src/main.rs", "rank": 50, "score": 112362.59120652566 }, { "content": "type Image = Vec<Vec<char>>;\n\n\n", "file_path": "rs/2020/day20/src/main.rs", "rank": 51, "score": 111465.69655304038 }, { "content": "/// Determines the range of initial x velocities where the probe would stop moving\n\n/// in the x direction and the x position is in the correct range.\n\n/// This is not the same as the range of ALL initial x's that can be in the range.\n\nfn find_x_range() -> RangeInclusive<isize> {\n\n fn find_finished_x_position(initial_velocity: isize) -> isize {\n\n initial_velocity.signum() * ((initial_velocity.abs() * (initial_velocity.abs() + 1)) / 2)\n\n }\n\n\n\n // Find the lower bound\n\n let lo_bound = {\n\n let mut velocity = 0;\n\n loop {\n\n if TARGET_X_RANGE.contains(&find_finished_x_position(velocity)) {\n\n break;\n\n }\n\n velocity += 1;\n\n }\n\n velocity\n\n };\n\n\n\n // Find the upper bound\n\n let hi_bound_non_inclusive = {\n\n let mut velocity = lo_bound;\n", "file_path": "rs/2021/day17/src/main.rs", "rank": 52, "score": 110833.55004243113 }, { "content": "fn get_answer(pair_counts: &HashMap<String, usize>) -> usize {\n\n let freq = count_chars_from_pair_counts(pair_counts)\n\n .values()\n\n .copied()\n\n .collect::<Vec<_>>();\n\n\n\n let (min, max) = get_min_and_max(&freq);\n\n max - min\n\n}\n", "file_path": "rs/2021/day14/src/main.rs", "rank": 53, "score": 108052.91101960279 }, { "content": "fn count_increases(nums: &[i32]) -> usize {\n\n nums.windows(2)\n\n .filter(|window| window[0] < window[1])\n\n .count()\n\n}\n", "file_path": "rs/2021/day01/src/main.rs", "rank": 54, "score": 107906.59582205929 }, { "content": "fn evaluate_packet(packet: &Packet) -> usize {\n\n match packet {\n\n Packet::Literal(data) => data.value,\n\n Packet::Operator(data) => {\n\n let mut values = data\n\n .sub_packets\n\n .iter()\n\n .map(|sub_packet| evaluate_packet(sub_packet));\n\n match data.packet_type {\n\n 0 => values.sum(),\n\n 1 => values.product(),\n\n 2 => values.min().unwrap(),\n\n 3 => values.max().unwrap(),\n\n 5 => (values.next() > values.next()) as usize,\n\n 6 => (values.next() < values.next()) as usize,\n\n 7 => (values.next() == values.next()) as usize,\n\n _ => panic!(\"Invalid packet type\"),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "rs/2021/day16/src/main.rs", "rank": 55, "score": 107906.59582205929 }, { "content": "/// When rolling 3d3, there are 27 possible results.\n\n/// There is 1 way to make 3: (1:1:1)\n\n/// There are 3 ways to make 4: (1:1:2, 1:2:1, 2:1:1)\n\n/// There are 6 ways to make 5:\n\n/// (1:1:3, 1:3:1, 3:1:1, 1:2:2, 2:1:2, 2:2:1)\n\n/// There are 7 ways to make 6:\n\n/// (1:2:3, 1:3:2, 2:1:3, 2:3:1, 3:1:2, 3:2:1, 2:2:2)\n\n/// There are 6 ways to make 7:\n\n/// (1:3:3, 3:1:3, 3:3:1, 2:2:3, 2:3:2, 3:2:2)\n\n/// There are 3 ways to make 8: (3:3:2, 3:2:3, 2:3:3)\n\n/// There is 1 way to make 9: (3:3:3)\n\n///\n\n/// If both players get to roll without either winning, then\n\n/// there are 27*27 possible outcomes.\n\nfn find_wins_for_state_helper(state: &State, memo: &mut HashMap<State, Wins>) -> Wins {\n\n if memo.get(state).is_none() {\n\n let mut p1_wins = 0;\n\n let mut p2_wins = 0;\n\n\n\n let total_roll_to_ways_count = [(3, 1), (4, 3), (5, 6), (6, 7), (7, 6), (8, 3), (9, 1)];\n\n\n\n for (total_roll, p1_ways) in total_roll_to_ways_count {\n\n let new_p1_position = (state.p1_position + total_roll) % BOARD_SIZE;\n\n let new_p1_score = state.p1_score + new_p1_position + 1;\n\n if new_p1_score >= 21 {\n\n p1_wins += p1_ways;\n\n continue;\n\n }\n\n\n\n for (total_roll, p2_ways) in total_roll_to_ways_count {\n\n let new_p2_position = (state.p2_position + total_roll) % BOARD_SIZE;\n\n let new_p2_score = state.p2_score + new_p2_position + 1;\n\n if new_p2_score >= 21 {\n\n p2_wins += p2_ways;\n", "file_path": "rs/2021/day21/src/main.rs", "rank": 56, "score": 106977.66300166943 }, { "content": "/// Gets the limits of a 2D Vec, where the limit is the non-inclusive\n\n/// row and column that signifies the end of the 2D Vec.\n\npub fn get_limits<T>(values: &[Vec<T>]) -> (usize, usize) {\n\n let col = values.get(0).map(|r| r.len()).unwrap_or(0);\n\n let row = values.len();\n\n (row, col)\n\n}\n", "file_path": "rs/src/lib.rs", "rank": 57, "score": 106395.8492729808 }, { "content": "#[inline]\n\nfn overloaded_checked_add(num: usize, rhs: i8) -> Option<usize> {\n\n match rhs.cmp(&0) {\n\n Ordering::Less => num.checked_sub(rhs.abs() as usize),\n\n Ordering::Equal => Some(num),\n\n Ordering::Greater => num.checked_add(rhs.abs() as usize),\n\n }\n\n}\n", "file_path": "rs/2020/day11/src/main.rs", "rank": 58, "score": 106294.81402143517 }, { "content": "fn search_for_seat_id(slice: &str) -> usize {\n\n let len = slice.len();\n\n let row = binary_search(0, 127, &slice[..len - 3]);\n\n let column = binary_search(0, 7, &slice[len - 3..]);\n\n calculate_seat_id(row, column)\n\n}\n\n\n", "file_path": "rs/2020/day05/src/main.rs", "rank": 59, "score": 105887.14814512302 }, { "content": "fn sum_packet_versions(packet: &Packet) -> usize {\n\n match packet {\n\n Packet::Literal(data) => data.version,\n\n Packet::Operator(data) => {\n\n data.version\n\n + data\n\n .sub_packets\n\n .iter()\n\n .map(|sub| sum_packet_versions(sub))\n\n .sum::<usize>()\n\n }\n\n }\n\n}\n\n\n", "file_path": "rs/2021/day16/src/main.rs", "rank": 60, "score": 105887.14814512302 }, { "content": "fn find_rating(ratings: &[Vec<char>], rating_type: RatingType) -> u32 {\n\n let mut ratings = ratings.iter().collect::<Vec<_>>();\n\n let total_bits = ratings[0].len();\n\n\n\n for index in 0..total_bits {\n\n // find bit criteria\n\n let bit_criteria = {\n\n match compare_one_bits_to_zero_bits_at_index(&ratings, index) {\n\n Ordering::Greater | Ordering::Equal => match rating_type {\n\n RatingType::OxygenGenerator => '1',\n\n RatingType::CO2Scrubber => '0',\n\n },\n\n Ordering::Less => match rating_type {\n\n RatingType::OxygenGenerator => '0',\n\n RatingType::CO2Scrubber => '1',\n\n },\n\n }\n\n };\n\n\n\n // apply bit criteria\n", "file_path": "rs/2021/day03/src/main.rs", "rank": 61, "score": 104778.65752885837 }, { "content": "fn get_size(data: &[Vec<u8>], location: (usize, usize)) -> i32 {\n\n let mut seen = HashSet::new();\n\n let mut queue = VecDeque::new();\n\n let mut size = 0;\n\n queue.push_back(location);\n\n\n\n while !queue.is_empty() {\n\n let (row, col) = queue.pop_front().unwrap();\n\n if seen.contains(&(row, col)) {\n\n continue;\n\n } else {\n\n seen.insert((row, col));\n\n }\n\n\n\n let value = *data.get(row).and_then(|row| row.get(col)).unwrap();\n\n if value == 9 {\n\n continue;\n\n }\n\n\n\n size += 1;\n\n adjacent_points(data, (row, col))\n\n .into_iter()\n\n .for_each(|point| queue.push_back(point));\n\n }\n\n\n\n size\n\n}\n", "file_path": "rs/2021/day09/src/main.rs", "rank": 62, "score": 104752.14829010906 }, { "content": "/// Helper method that gets the first character out of a string slice.\n\n///\n\n/// Example:\n\n/// Given: \"vjbhasdfkel1\"\n\n/// Yields: Some('v')\n\n///\n\n/// Example:\n\n/// Given: \"\"\n\n/// Yields: None\n\npub fn first_char(s: &str) -> Option<char> {\n\n s.chars().next()\n\n}\n\n\n", "file_path": "rs/src/lib.rs", "rank": 63, "score": 103586.80020807087 }, { "content": "#[inline]\n\nfn adjacent_values(data: &[Vec<u8>], location: (usize, usize)) -> Vec<u8> {\n\n adjacent_points(data, location)\n\n .into_iter()\n\n .map(|(row, col)| *data.get(row).and_then(|row| row.get(col)).unwrap())\n\n .collect()\n\n}\n\n\n", "file_path": "rs/2021/day09/src/main.rs", "rank": 64, "score": 101826.54393308387 }, { "content": "fn count_chars_from_pair_counts(pair_counts: &HashMap<String, usize>) -> HashMap<char, usize> {\n\n pair_counts\n\n .iter()\n\n .fold(HashMap::new(), |mut acc, (pair, frequency)| {\n\n let mut pair_chars = pair.chars();\n\n let left = pair_chars.next().unwrap();\n\n let right = pair_chars.next().unwrap();\n\n acc.entry(left)\n\n .or_insert_with(SmartCounter::new)\n\n .increment_left(*frequency);\n\n acc.entry(right)\n\n .or_insert_with(SmartCounter::new)\n\n .increment_right(*frequency);\n\n acc\n\n })\n\n .into_iter()\n\n .map(|(char, smart_counter)| (char, smart_counter.get_count()))\n\n .collect()\n\n}\n\n\n", "file_path": "rs/2021/day14/src/main.rs", "rank": 65, "score": 97457.34059642036 }, { "content": "fn parse_rules(s: &str) -> HashMap<usize, HashSet<String>> {\n\n let mut result = HashMap::new();\n\n\n\n let mut temp = HashMap::new();\n\n for line in s.lines() {\n\n let (rule_id, raw_rule) = line.split_once(\": \").unwrap();\n\n let rule_id = rule_id.parse::<usize>().unwrap();\n\n match raw_rule {\n\n \"\\\"a\\\"\" | \"\\\"b\\\"\" => {\n\n let mut inner_result = HashSet::new();\n\n inner_result.insert(raw_rule.replace('\"', \"\").to_owned());\n\n result.insert(rule_id, inner_result);\n\n }\n\n _ => {\n\n temp.insert(\n\n rule_id,\n\n raw_rule\n\n .split(\" | \")\n\n .map(|sub_rule| sub_rule.split(' ').collect::<Vec<_>>())\n\n .collect::<Vec<_>>(),\n\n );\n\n }\n\n }\n\n }\n\n\n\n recursive_rule_parsing(result, &mut HashSet::new(), &temp, 0)\n\n}\n\n\n", "file_path": "rs/2020/day19/src/main.rs", "rank": 66, "score": 94227.86126008914 }, { "content": "fn get_edges(image: &[Vec<char>]) -> (u32, u32, u32, u32) {\n\n (\n\n // top\n\n get_bits(image[0].iter().copied()),\n\n // bottom\n\n get_bits(image[image.len() - 1].iter().copied()),\n\n // left\n\n get_bits(image.iter().map(|row| row[0])),\n\n // right\n\n get_bits(image.iter().map(|row| row[row.len() - 1])),\n\n )\n\n}\n\n\n", "file_path": "rs/2020/day20/src/main.rs", "rank": 67, "score": 92751.69378284932 }, { "content": "fn find_first_loop_size(card_public_key: u64, door_public_key: u64) -> (u64, Agent) {\n\n let subject_number = 7;\n\n let mut public_key = 1;\n\n let mut loop_size = 0;\n\n\n\n loop {\n\n loop_size += 1;\n\n public_key *= subject_number;\n\n public_key %= 20201227;\n\n if public_key == card_public_key {\n\n return (loop_size, Agent::Card);\n\n } else if public_key == door_public_key {\n\n return (loop_size, Agent::Door);\n\n }\n\n }\n\n}\n", "file_path": "rs/2020/day25/src/main.rs", "rank": 68, "score": 86762.24789895295 }, { "content": "class Point(NamedTuple):\n\n x: int\n\n y: int\n\n\n\n def __add__(self, other):\n\n x1, y1 = self\n\n x2, y2 = other\n", "file_path": "py/2018/day17/day17.py", "rank": 69, "score": 86133.42854204873 }, { "content": " class Point(NamedTuple):\n\n x: int\n\n y: int\n\n\n\n def __add__(self, other):\n\n x1, y1 = self\n\n x2, y2 = other\n\n return Grid.Point(x1 + x2, y1 + y2)\n\n\n\n def __repr__(self):\n\n return f\"({self.x}, {self.y})\"\n\n\n\n def get_adjacent_points(self, *, minx=None, maxx=None, miny=None, maxy=None):\n\n def generator():\n\n for delta in (\n\n (0, -1),\n\n (-1, 0),\n\n (1, 0),\n\n (0, 1),\n\n (-1, -1),\n\n (1, -1),\n\n (-1, 1),\n\n (1, 1),\n\n ):\n\n yield self + delta\n\n\n\n return [\n\n point\n\n for point in generator()\n\n if (minx is None or point.x >= minx)\n\n and (maxx is None or point.x <= maxx)\n\n and (miny is None or point.y >= miny)\n\n and (maxy is None or point.y <= maxy)\n", "file_path": "py/2018/day18/day18.py", "rank": 70, "score": 86133.42854204873 }, { "content": "class Point(NamedTuple):\n\n x: int\n\n y: int\n\n\n\n def __add__(self, other: Tuple[int, int]) -> \"Point\": # type: ignore\n\n x1, y1 = self\n\n x2, y2 = other\n\n return Point(x1 + x2, y1 + y2)\n\n\n\n @property\n\n def adjacent(self) -> Iterable[\"Point\"]:\n\n for delta in [(0, -1), (-1, 0), (1, 0), (0, 1)]:\n\n yield self + delta\n\n\n\n @staticmethod\n\n def coerce(point: \"GridKey\") -> \"Point\":\n\n if isinstance(point, Point) and type(point) == Point:\n\n return point\n", "file_path": "py/2018/day15/day15.py", "rank": 71, "score": 86133.42854204873 }, { "content": "class Point:\n\n row: int\n\n col: int\n\n\n\n def __iter__(self) -> Iterator[int]:\n\n return iter([self.row, self.col])\n\n\n\n def is_valid(self) -> bool:\n", "file_path": "py/2019/day24/day24.py", "rank": 72, "score": 86133.42854204873 }, { "content": " class Point(NamedTuple):\n\n x: int\n\n y: int\n\n\n\n def __add__(self, other):\n\n x1, y1 = self\n\n x2, y2 = other\n\n return Grid.Point(x1 + x2, y1 + y2)\n\n\n\n def __repr__(self):\n\n return f\"({self.x}, {self.y})\"\n\n\n\n def get_adjacent_points(self, *, minx=None, maxx=None, miny=None, maxy=None):\n\n def generator():\n\n for delta in (\n\n (0, -1),\n\n (-1, 0),\n\n (1, 0),\n\n (0, 1),\n\n (-1, -1),\n\n (1, -1),\n\n (-1, 1),\n\n (1, 1),\n\n ):\n\n yield self + delta\n\n\n\n return [\n\n point\n\n for point in generator()\n\n if (minx is None or point.x >= minx)\n\n and (maxx is None or point.x <= maxx)\n\n and (miny is None or point.y >= miny)\n\n and (maxy is None or point.y <= maxy)\n", "file_path": "py/2018/day20/day20.py", "rank": 73, "score": 86133.42854204873 }, { "content": "#[derive(Debug)]\n\nstruct Cave {\n\n cave_type: CaveType,\n\n paths: Vec<String>,\n\n}\n\n\n", "file_path": "rs/2021/day12/src/main.rs", "rank": 74, "score": 85449.55999302134 }, { "content": "#[derive(Debug)]\n\nstruct Bag {\n\n name: String,\n\n contained_by: HashMap<String, Rc<RefCell<Bag>>>,\n\n contains: HashMap<String, (usize, Weak<RefCell<Bag>>)>,\n\n}\n\n\n\nimpl Bag {\n\n fn new_ref(name: String) -> Rc<RefCell<Bag>> {\n\n Rc::new(RefCell::new(Self {\n\n name,\n\n contained_by: HashMap::new(),\n\n contains: HashMap::new(),\n\n }))\n\n }\n\n\n\n fn add_contained_by(&mut self, bag: Rc<RefCell<Bag>>) {\n\n let name = bag.borrow().name.clone();\n\n self.contained_by.insert(name, bag);\n\n }\n\n\n\n fn add_contains(&mut self, count: usize, bag: Weak<RefCell<Bag>>) {\n\n if let Some(valid_bag) = bag.upgrade() {\n\n self.contains\n\n .insert(valid_bag.borrow().name.clone(), (count, bag));\n\n }\n\n }\n\n}\n\n\n", "file_path": "rs/2020/day07/src/main.rs", "rank": 75, "score": 85449.55999302134 }, { "content": "#[derive(Clone, Debug)]\n\nstruct Instruction {\n\n on: bool,\n\n x: RangeInclusive<isize>,\n\n y: RangeInclusive<isize>,\n\n z: RangeInclusive<isize>,\n\n}\n\n\n", "file_path": "rs/2021/day22/src/main.rs", "rank": 76, "score": 85449.46214830494 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nstruct Box {\n\n lo_x: isize,\n\n lo_y: isize,\n\n lo_z: isize,\n\n hi_x: isize,\n\n hi_y: isize,\n\n hi_z: isize,\n\n}\n\n\n\nimpl Box {\n\n fn size(&self) -> usize {\n\n (self.hi_x - self.lo_x + 1) as usize\n\n * (self.hi_y - self.lo_y + 1) as usize\n\n * (self.hi_z - self.lo_z + 1) as usize\n\n }\n\n\n\n fn is_disjoint(&self, rhs: &Box) -> bool {\n\n self.lo_x > rhs.hi_x\n\n || self.hi_x < rhs.lo_x\n\n || self.lo_y > rhs.hi_y\n", "file_path": "rs/2021/day22/src/main.rs", "rank": 77, "score": 85449.3689213769 }, { "content": "#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]\n\nstruct Vector3 {\n\n x: i32,\n\n y: i32,\n\n z: i32,\n\n}\n\n\n\nimpl Vector3 {\n\n const fn new(x: i32, y: i32, z: i32) -> Vector3 {\n\n Vector3 { x, y, z }\n\n }\n\n\n\n fn face_pos_z(&self) -> Vector3 {\n\n *self\n\n }\n\n\n\n fn face_neg_z(&self) -> Vector3 {\n\n Vector3::new(-self.x, self.y, -self.z)\n\n }\n\n\n\n fn face_neg_x(&self) -> Vector3 {\n", "file_path": "rs/2021/day19/src/main.rs", "rank": 78, "score": 85449.03621734827 }, { "content": "#[derive(Clone, Copy, Eq, PartialEq, Hash)]\n\nstruct Coordinate4 {\n\n x: isize,\n\n y: isize,\n\n z: isize,\n\n w: isize,\n\n}\n\n\n\nimpl HasNeighbors<Coordinate4> for Coordinate4 {\n\n fn neighbors(&self) -> Vec<Coordinate4> {\n\n let mut result = Vec::with_capacity(80);\n\n for x in -1..=1 {\n\n for y in -1..=1 {\n\n for z in -1..=1 {\n\n for w in -1..=1 {\n\n if x == 0 && y == 0 && z == 0 && w == 0 {\n\n continue;\n\n }\n\n\n\n result.push(Coordinate4 {\n\n x: self.x + x,\n", "file_path": "rs/2020/day17/src/main.rs", "rank": 79, "score": 85445.51144218825 }, { "content": "#[derive(Copy, Clone, Eq, Hash, PartialEq)]\n\nstruct State {\n\n p1_position: u32,\n\n p1_score: u32,\n\n p2_position: u32,\n\n p2_score: u32,\n\n}\n\n\n", "file_path": "rs/2021/day21/src/main.rs", "rank": 80, "score": 85445.51144218825 }, { "content": "#[derive(Clone)]\n\nstruct Board {\n\n /// mapping of numbers on the board to their 0-based coordinate on a 5x5 grid\n\n number_to_coordinates: HashMap<u32, (usize, usize)>,\n\n /// count of seen numbers for a given row\n\n row_frequencies: [u32; 5],\n\n /// count of seen numbers for a given col\n\n col_frequencies: [u32; 5],\n\n /// keeps track of which numbers have been marked\n\n seen_numbers: HashSet<u32>,\n\n}\n\n\n\nimpl Board {\n\n fn new(numbers: Vec<u32>) -> Self {\n\n assert_eq!(numbers.len(), 25);\n\n\n\n let number_to_coordinates = {\n\n let mut result = HashMap::new();\n\n for (i, number) in numbers.into_iter().enumerate() {\n\n assert!(!result.contains_key(&number), \"Should not repeat numbers\");\n\n\n", "file_path": "rs/2021/day04/src/main.rs", "rank": 81, "score": 85445.51144218825 }, { "content": "struct Grid {\n\n grid: Vec<char>,\n\n stride: usize,\n\n size: usize,\n\n}\n\n\n\nimpl Grid {\n\n fn new() -> Self {\n\n let input_lines = read_input_as_lines(\"2020/day11/src/input.txt\");\n\n let stride = input_lines.get(0).unwrap().len();\n\n\n\n let grid = input_lines\n\n .into_iter()\n\n .flat_map(|line| line.chars().collect::<Vec<_>>())\n\n .collect::<Vec<_>>();\n\n\n\n let size = grid.len();\n\n\n\n Grid { grid, stride, size }\n\n }\n", "file_path": "rs/2020/day11/src/main.rs", "rank": 82, "score": 85445.51144218825 }, { "content": "struct Scanner {\n\n id: usize,\n\n beacons: HashSet<Vector3>,\n\n rotation_configuration: u8,\n\n}\n\n\n\nimpl Scanner {\n\n const ROTATION_CONFIGURATION_COUNT: u8 = 24;\n\n\n\n fn from_lines<'a>(lines: impl IntoIterator<Item = &'a str>) -> Self {\n\n let mut lines_iter = lines.into_iter();\n\n let id = {\n\n let (_, num_raw) = (lines_iter.next().unwrap() as &str)\n\n .split_once(\"scanner \")\n\n .unwrap();\n\n let (num_raw, _) = num_raw.split_once(\" \").unwrap();\n\n num_raw.parse::<usize>().unwrap()\n\n };\n\n let beacons = lines_iter\n\n .map(|line| {\n", "file_path": "rs/2021/day19/src/main.rs", "rank": 83, "score": 85445.51144218825 }, { "content": "#[derive(Clone)]\n\nstruct Mask {\n\n null_mask: u64,\n\n one_mask: u64,\n\n x_mask: u64,\n\n floating_combinations: Vec<u64>,\n\n}\n\n\n\nimpl Default for Mask {\n\n fn default() -> Self {\n\n Mask {\n\n null_mask: 0,\n\n one_mask: 0,\n\n x_mask: 0,\n\n floating_combinations: Vec::new(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "rs/2020/day14/src/main.rs", "rank": 84, "score": 85445.51144218825 }, { "content": "#[derive(Clone, Copy, Eq, PartialEq, Hash)]\n\nstruct Coordinate3 {\n\n x: isize,\n\n y: isize,\n\n z: isize,\n\n}\n\n\n\nimpl HasNeighbors<Coordinate3> for Coordinate3 {\n\n fn neighbors(&self) -> Vec<Coordinate3> {\n\n let mut result = Vec::with_capacity(26);\n\n for x in -1..=1 {\n\n for y in -1..=1 {\n\n for z in -1..=1 {\n\n if x == 0 && y == 0 && z == 0 {\n\n continue;\n\n }\n\n\n\n result.push(Coordinate3 {\n\n x: self.x + x,\n\n y: self.y + y,\n\n z: self.z + z,\n\n });\n\n }\n\n }\n\n }\n\n result\n\n }\n\n}\n\n\n", "file_path": "rs/2020/day17/src/main.rs", "rank": 85, "score": 85445.51144218825 }, { "content": "def rect(grid, a, b):\n\n for x in range(a):\n\n for y in range(b):\n\n grid[y][x] = \"#\"\n", "file_path": "py/2016/day08/aoc_day_08_1.py", "rank": 86, "score": 85300.1781271974 }, { "content": "def rect(grid, a, b):\n\n for x in range(a):\n\n for y in range(b):\n\n grid[y][x] = \"#\"\n", "file_path": "py/2016/day08/aoc_day_08_2.py", "rank": 87, "score": 85300.1781271974 }, { "content": "class Point:\n\n def __init__(self, x, y):\n\n self.x = x\n\n self.y = y\n\n\n\n def __add__(self, other):\n\n self.x += other.x\n\n self.y += other.y\n\n\n\n @property\n\n def position(self):\n", "file_path": "py/2017/day19/aoc_day_19.py", "rank": 88, "score": 85115.9964546137 }, { "content": "/// Takes a slice of hashable and cloneable items and counts them for you into a frequency map.\n\npub fn get_frequency<T: Clone + Hash + Eq>(chars: &[T]) -> HashMap<T, usize> {\n\n let mut result = HashMap::new();\n\n\n\n for c in chars {\n\n if let Some(entry) = result.get_mut(c) {\n\n *entry += 1;\n\n } else {\n\n result.insert(c.clone(), 0);\n\n }\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "rs/src/lib.rs", "rank": 89, "score": 84325.96128502281 }, { "content": "#[derive(Debug)]\n\nstruct SmartCounter {\n\n left: usize,\n\n right: usize,\n\n}\n\n\n\nimpl SmartCounter {\n\n fn new() -> Self {\n\n Self { left: 0, right: 0 }\n\n }\n\n\n\n fn increment_left(&mut self, amount: usize) {\n\n self.left += amount;\n\n }\n\n\n\n fn increment_right(&mut self, amount: usize) {\n\n self.right += amount;\n\n }\n\n\n\n fn get_count(&self) -> usize {\n\n max(self.left, self.right)\n\n }\n\n}\n\n\n", "file_path": "rs/2021/day14/src/main.rs", "rank": 90, "score": 84176.38250755644 }, { "content": "#[derive(Debug)]\n\nstruct ExplodeConfiguration {\n\n pair_exploder: SnailfishIndex,\n\n number_left: Option<SnailfishIndex>,\n\n number_right: Option<SnailfishIndex>,\n\n}\n\n\n", "file_path": "rs/2021/day18/src/main.rs", "rank": 91, "score": 84176.38250755644 }, { "content": "#[derive(Clone, Debug)]\n\nstruct PacketOperator {\n\n version: usize,\n\n packet_type: usize,\n\n sub_packets: Vec<Packet>,\n\n packet_length: usize,\n\n}\n\n\n", "file_path": "rs/2021/day16/src/main.rs", "rank": 92, "score": 84176.28466284004 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nstruct PacketLiteral {\n\n version: usize,\n\n value: usize,\n\n packet_length: usize,\n\n}\n\n\n", "file_path": "rs/2021/day16/src/main.rs", "rank": 93, "score": 84176.191435912 }, { "content": "struct AssignToAddress {\n\n address: u64,\n\n value: u64,\n\n}\n\n\n", "file_path": "rs/2020/day14/src/main.rs", "rank": 94, "score": 84172.33395672335 }, { "content": "fn main() {\n\n let data = read_input_as_lines(\"2021/day10/src/input.txt\")\n\n .into_iter()\n\n .filter(|line| !line.is_empty())\n\n .map(|line| line.chars().collect::<Vec<_>>())\n\n .collect::<Vec<_>>();\n\n\n\n let mut answer1 = 0;\n\n let mut autocomplete_scores = Vec::with_capacity(data.len());\n\n\n\n for line in data {\n\n let mut stack = Vec::with_capacity(line.len());\n\n\n\n let mut is_valid = true;\n\n for bracket in line {\n\n is_valid = match bracket {\n\n '(' | '[' | '{' | '<' => {\n\n stack.push(bracket);\n\n true\n\n }\n", "file_path": "rs/2021/day10/src/main.rs", "rank": 95, "score": 83894.93833464143 }, { "content": "fn main() {\n\n let graph = {\n\n let mut result = HashMap::<String, Cave>::new();\n\n for line in read_input_as_lines(\"2021/day12/src/input.txt\") {\n\n let (source, target) = line.split_once(\"-\").unwrap();\n\n let source = source.to_string();\n\n let target = target.to_string();\n\n {\n\n let cave_type = CaveType::from_str(&source).unwrap();\n\n let entry = result.entry(source.clone()).or_insert_with(|| Cave {\n\n cave_type,\n\n paths: Vec::new(),\n\n });\n\n entry.paths.push(target.clone());\n\n }\n\n {\n\n let cave_type = CaveType::from_str(&target).unwrap();\n\n let entry = result.entry(target).or_insert_with(|| Cave {\n\n cave_type,\n\n paths: Vec::new(),\n", "file_path": "rs/2021/day12/src/main.rs", "rank": 96, "score": 83894.93833464143 }, { "content": "fn main() {\n\n let mut data = read_input_as_lines(\"2021/day11/src/input.txt\")\n\n .into_iter()\n\n .map(|line| {\n\n line.chars()\n\n .map(|char| char.to_string().parse::<u8>().unwrap())\n\n .collect::<Vec<_>>()\n\n })\n\n .collect::<Vec<_>>();\n\n let row_count = data.len();\n\n let col_count = data.get(0).unwrap().len();\n\n\n\n let mut answer1 = 0;\n\n\n\n let mut step_count = 0;\n\n let answer2 = loop {\n\n let flash_count = step(&mut data);\n\n step_count += 1;\n\n\n\n if flash_count == row_count * col_count {\n", "file_path": "rs/2021/day11/src/main.rs", "rank": 97, "score": 83894.93833464143 }, { "content": "fn main() {\n\n let data = read_input_as_string(\"2021/day07/src/input.txt\")\n\n .trim()\n\n .split(',')\n\n .map(|num| num.parse::<i32>().unwrap())\n\n .collect::<Vec<_>>();\n\n\n\n let low = *data.iter().min().unwrap();\n\n let high = *data.iter().max().unwrap();\n\n\n\n let answer1: i32 = {\n\n (low..=high)\n\n .map(|alignment| data.iter().map(|crab| (alignment - crab).abs()).sum())\n\n .min()\n\n .unwrap()\n\n };\n\n\n\n let answer2: i32 = {\n\n (low..=high)\n\n .map(|alignment| {\n", "file_path": "rs/2021/day07/src/main.rs", "rank": 98, "score": 83894.93833464143 }, { "content": "fn dfs(\n\n current: &str,\n\n graph: &HashMap<String, Cave>,\n\n visited: &mut HashMap<String, usize>,\n\n can_revisit_small_cave: bool,\n\n) -> usize {\n\n if current == \"end\" {\n\n return 1;\n\n } else if current == \"start\" && visited.contains_key(\"start\") {\n\n return 0;\n\n }\n\n\n\n let cave = graph.get(current).expect(\"Missing an expected cave.\");\n\n let visit_count = if let Some(entry) = visited.get_mut(current) {\n\n *entry += 1;\n\n *entry\n\n } else {\n\n visited.insert(current.to_string(), 1);\n\n 1\n\n };\n", "file_path": "rs/2021/day12/src/main.rs", "rank": 99, "score": 83894.93833464143 } ]
Rust
rust/src/main.rs
tagadvance/chms_api_examples
b8335f5e5a9216afa357944b870edf29ad838243
#[macro_use] extern crate dotenv_codegen; extern crate dotenv; mod db; mod utils; use db::{TokenDatabase, TokenResponse}; use reqwest::Url; use serde::{Deserialize, Serialize}; use std::future::Future; use std::pin::Pin; use tide::http::url::ParseError; use tide::utils::After; use tide::{Body, Next, Redirect, Request, Response, Result}; use utils::{api, env}; #[derive(Deserialize)] struct AuthCodeParams { code: String, state: String, } #[derive(Deserialize, Serialize)] struct AuthCodeBody { grant_type: String, subdomain: String, redirect_uri: String, client_id: String, client_secret: String, code: String, } #[derive(Deserialize, Serialize)] struct RefreshTokenBody { refresh_token: String, grant_type: String, client_id: String, client_secret: String, } fn build_auth_code_redirect_url(return_url: String) -> std::result::Result<Url, ParseError> { let env = env::get_env(); Url::parse_with_params( api::AUTH_CODE_URL, &[ ("client_id", env.client_id), ("subdomain", env.subdomain), ("redirect_uri", api::REDIRECT_URL.into()), ("response_type", String::from("code")), ("state", return_url), ], ) } fn build_auth_code_body(code: String) -> AuthCodeBody { let env = env::get_env(); AuthCodeBody { grant_type: String::from("authorization_code"), redirect_uri: api::REDIRECT_URL.into(), subdomain: env.subdomain, client_id: env.client_id, client_secret: env.client_secret, code, } } fn build_refresh_token_body(refresh_token: String) -> RefreshTokenBody { let env = env::get_env(); RefreshTokenBody { refresh_token, grant_type: String::from("refresh_token"), client_id: env.client_id, client_secret: env.client_secret, } } fn perform_refresh(db: &TokenDatabase) -> Result<String> { let body = build_refresh_token_body(db.get_refresh_token().unwrap()); let response = api::post::<RefreshTokenBody, TokenResponse>(&api::make_api_url("oauth/token"), &body)?; db.handle_token_response(&response)?; println!("Tokens refreshed."); Ok(response.access_token) } fn access_token_middleware<'a>( mut request: Request<TokenDatabase>, next: Next<'a, TokenDatabase>, ) -> Pin<Box<dyn Future<Output = Result> + Send + 'a>> { Box::pin(async { if request.url().path() == "/auth" { return Ok(next.run(request).await); } match request.state().get_access_token() { Some(access_token) => { request.set_ext(access_token); Ok(next.run(request).await) } None => { println!("Missing auth tokens. Redirecting to auth url."); let url_path = request.url().path(); Ok(Redirect::new(build_auth_code_redirect_url(url_path.into())?).into()) } } }) } async fn auth_code_handler(req: Request<TokenDatabase>) -> tide::Result { let params: AuthCodeParams = req.query()?; let body = build_auth_code_body(params.code); let response = api::post::<AuthCodeBody, TokenResponse>(&api::make_api_url("oauth/token"), &body)?; req.state().handle_token_response(&response)?; println!("Auth code successfully exchanged for tokens."); Ok(Redirect::new(params.state).into()) } async fn api_passthrough_handler(request: Request<TokenDatabase>) -> tide::Result<Response> { let path = request.param("path")?; match request.ext() { None => Ok(Response::from("Token Missing")), Some(token) => { let url = &api::make_api_url(path); let response = api::get::<serde_json::Value>(url, token)?; let mut json = response.0; if response.1 == reqwest::StatusCode::UNAUTHORIZED { let refreshed_token = perform_refresh(request.state())?; json = api::get::<serde_json::Value>(url, &refreshed_token)?.0; } return Ok(Response::from(Body::from_json(&json)?)); } } } #[async_std::main] async fn main() -> tide::Result<()> { let mut app = tide::with_state(TokenDatabase::new()?); app.with(access_token_middleware); app.with(After(|mut res: Response| async { if let Some(err) = res.take_error() { let msg = format!("Error: {:?}", err); res.set_status(tide::StatusCode::InternalServerError); res.set_body(msg); }; Ok(res) })); app.at("/auth").get(auth_code_handler); app.at("api/*path").get(api_passthrough_handler); app.listen("127.0.0.1:3000").await?; Ok(()) }
#[macro_use] extern crate dotenv_codegen; extern crate dotenv; mod db; mod utils; use db::{TokenDatabase, TokenResponse}; use reqwest::Url; use serde::{Deserialize, Serialize}; use std::future::Future; use std::pin::Pin; use tide::http::url::ParseError; use tide::utils::After; use tide::{Body, Next, Redirect, Request, Response, Result}; use utils::{api, env}; #[derive(Deserialize)] struct AuthCodeParams { code: String, state: String, } #[derive(Deserialize, Serialize)] struct AuthCodeBody { grant_type: String, subdomain: String, re
rl("oauth/token"), &body)?; req.state().handle_token_response(&response)?; println!("Auth code successfully exchanged for tokens."); Ok(Redirect::new(params.state).into()) } async fn api_passthrough_handler(request: Request<TokenDatabase>) -> tide::Result<Response> { let path = request.param("path")?; match request.ext() { None => Ok(Response::from("Token Missing")), Some(token) => { let url = &api::make_api_url(path); let response = api::get::<serde_json::Value>(url, token)?; let mut json = response.0; if response.1 == reqwest::StatusCode::UNAUTHORIZED { let refreshed_token = perform_refresh(request.state())?; json = api::get::<serde_json::Value>(url, &refreshed_token)?.0; } return Ok(Response::from(Body::from_json(&json)?)); } } } #[async_std::main] async fn main() -> tide::Result<()> { let mut app = tide::with_state(TokenDatabase::new()?); app.with(access_token_middleware); app.with(After(|mut res: Response| async { if let Some(err) = res.take_error() { let msg = format!("Error: {:?}", err); res.set_status(tide::StatusCode::InternalServerError); res.set_body(msg); }; Ok(res) })); app.at("/auth").get(auth_code_handler); app.at("api/*path").get(api_passthrough_handler); app.listen("127.0.0.1:3000").await?; Ok(()) }
direct_uri: String, client_id: String, client_secret: String, code: String, } #[derive(Deserialize, Serialize)] struct RefreshTokenBody { refresh_token: String, grant_type: String, client_id: String, client_secret: String, } fn build_auth_code_redirect_url(return_url: String) -> std::result::Result<Url, ParseError> { let env = env::get_env(); Url::parse_with_params( api::AUTH_CODE_URL, &[ ("client_id", env.client_id), ("subdomain", env.subdomain), ("redirect_uri", api::REDIRECT_URL.into()), ("response_type", String::from("code")), ("state", return_url), ], ) } fn build_auth_code_body(code: String) -> AuthCodeBody { let env = env::get_env(); AuthCodeBody { grant_type: String::from("authorization_code"), redirect_uri: api::REDIRECT_URL.into(), subdomain: env.subdomain, client_id: env.client_id, client_secret: env.client_secret, code, } } fn build_refresh_token_body(refresh_token: String) -> RefreshTokenBody { let env = env::get_env(); RefreshTokenBody { refresh_token, grant_type: String::from("refresh_token"), client_id: env.client_id, client_secret: env.client_secret, } } fn perform_refresh(db: &TokenDatabase) -> Result<String> { let body = build_refresh_token_body(db.get_refresh_token().unwrap()); let response = api::post::<RefreshTokenBody, TokenResponse>(&api::make_api_url("oauth/token"), &body)?; db.handle_token_response(&response)?; println!("Tokens refreshed."); Ok(response.access_token) } fn access_token_middleware<'a>( mut request: Request<TokenDatabase>, next: Next<'a, TokenDatabase>, ) -> Pin<Box<dyn Future<Output = Result> + Send + 'a>> { Box::pin(async { if request.url().path() == "/auth" { return Ok(next.run(request).await); } match request.state().get_access_token() { Some(access_token) => { request.set_ext(access_token); Ok(next.run(request).await) } None => { println!("Missing auth tokens. Redirecting to auth url."); let url_path = request.url().path(); Ok(Redirect::new(build_auth_code_redirect_url(url_path.into())?).into()) } } }) } async fn auth_code_handler(req: Request<TokenDatabase>) -> tide::Result { let params: AuthCodeParams = req.query()?; let body = build_auth_code_body(params.code); let response = api::post::<AuthCodeBody, TokenResponse>(&api::make_api_u
random
[ { "content": "pub fn get<R: DeserializeOwned>(url: &String, token: &String) -> Result<(R, reqwest::StatusCode)> {\n\n let response = reqwest::blocking::Client::new()\n\n .get(url)\n\n .header(\"Content-Type\", \"application/json\")\n\n .header(\"Accept\", \"application/vnd.ccbchurch.v2+json\")\n\n .header(\"Authorization\", format!(\"Bearer {}\", token))\n\n .send()?;\n\n\n\n let status = response.status();\n\n let json = response.json::<R>()?;\n\n\n\n Ok((json, status))\n\n}\n\n\n", "file_path": "rust/src/utils/api.rs", "rank": 2, "score": 110269.78109229519 }, { "content": "pub fn post<T: Serialize, R: DeserializeOwned>(url: &String, json_body: &T) -> Result<R> {\n\n let json_response = reqwest::blocking::Client::new()\n\n .post(url)\n\n .json(&json_body)\n\n .header(\"Content-Type\", \"application/json\")\n\n .header(\"Accept\", \"application/vnd.ccbchurch.v2+json\")\n\n .send()?\n\n .json::<R>()?;\n\n\n\n Ok(json_response)\n\n}\n", "file_path": "rust/src/utils/api.rs", "rank": 3, "score": 97328.44683830685 }, { "content": "type DBValue = String;\n", "file_path": "rust/src/db.rs", "rank": 5, "score": 81434.54807388081 }, { "content": "pub fn get_env() -> Env {\n\n Env {\n\n client_id: String::from(dotenv!(\"CLIENT_ID\")),\n\n client_secret: String::from(dotenv!(\"CLIENT_SECRET\")),\n\n subdomain: String::from(dotenv!(\"SUBDOMAIN\")),\n\n }\n\n}\n", "file_path": "rust/src/utils/env.rs", "rank": 6, "score": 80551.91631933213 }, { "content": "fn build_token_database() -> Result<TokenDatabase> {\n\n let db = FileDatabase::<HashMap<String, DBValue>, Ron>::load_from_path_or_default(Path::new(\n\n \"./db_data.ron\",\n\n ))?;\n\n\n\n Ok(TokenDatabase { db })\n\n}\n\n\n\nimpl Clone for TokenDatabase {\n\n fn clone(&self) -> TokenDatabase {\n\n build_token_database().unwrap()\n\n }\n\n}\n\n\n\nimpl TokenDatabase {\n\n pub fn new() -> Result<TokenDatabase> {\n\n build_token_database()\n\n }\n\n\n\n fn read(&self, key: &str) -> DBReadResponse {\n", "file_path": "rust/src/db.rs", "rank": 9, "score": 67146.81803794134 }, { "content": "pub fn make_api_url(url: &str) -> String {\n\n format!(\"https://api.ccbchurch.com/{}\", url)\n\n}\n\n\n\npub const AUTH_CODE_URL: &str = \"https://oauth.ccbchurch.com/oauth/authorize\";\n\npub const REDIRECT_URL: &str = \"http://localhost:3000/auth\";\n\n\n", "file_path": "rust/src/utils/api.rs", "rank": 10, "score": 61144.68887764957 }, { "content": "pub mod env;\n\npub mod api;\n", "file_path": "rust/src/utils/mod.rs", "rank": 11, "score": 60851.08575289399 }, { "content": "pub struct Env {\n\n pub client_id: String,\n\n pub client_secret: String,\n\n pub subdomain: String,\n\n}\n\n\n", "file_path": "rust/src/utils/env.rs", "rank": 12, "score": 60799.4358095723 }, { "content": "const subdomain = process.env.SUBDOMAIN;\n", "file_path": "nodejs/index.js", "rank": 13, "score": 53939.555092407994 }, { "content": "const db = lowdb(adapter)\n", "file_path": "nodejs/index.js", "rank": 14, "score": 53436.58711651319 }, { "content": "def _db_exists():\n", "file_path": "python/app/state.py", "rank": 15, "score": 50764.41621321172 }, { "content": "def _write_db(state):\n\n with open(DB_FILENAME, \"w\") as wfile:\n", "file_path": "python/app/state.py", "rank": 16, "score": 50764.41621321172 }, { "content": "def _read_db():\n\n with open(DB_FILENAME, \"r\") as rfile:\n", "file_path": "python/app/state.py", "rank": 17, "score": 50764.41621321172 }, { "content": "type DBReadResponse = Option<DBValue>;\n\n\n\n#[derive(Deserialize, Serialize, Debug)]\n\npub struct TokenResponse {\n\n pub access_token: String,\n\n pub refresh_token: String,\n\n token_type: String,\n\n expires_in: u32,\n\n scope: String,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct TokenDatabase {\n\n db: FileDatabase<HashMap<String, DBValue>, Ron>,\n\n}\n\n\n", "file_path": "rust/src/db.rs", "rank": 19, "score": 37429.77557477253 }, { "content": "def set_state(key, value):\n\n state = _read_db()\n\n state[key] = value\n", "file_path": "python/app/state.py", "rank": 21, "score": 33403.1348314392 }, { "content": "def get_state(key, fallback=None):\n\n if key in _ENV_STATE:\n\n return os.environ[_ENV_STATE[key]]\n\n\n\n state = _read_db()\n", "file_path": "python/app/state.py", "rank": 22, "score": 33403.1348314392 }, { "content": "def init_state():\n\n if _db_exists():\n\n return\n\n\n\n _write_db(\n\n {\n\n \"access_token\": None,\n\n \"refresh_token\": None,\n\n \"token_expires_at\": 0,\n\n }\n", "file_path": "python/app/state.py", "rank": 23, "score": 33403.1348314392 }, { "content": "extern crate rustbreak;\n\n\n\nuse rustbreak::error::Result;\n\nuse rustbreak::{deser::Ron, FileDatabase};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::collections::HashMap;\n\nuse std::path::Path;\n\n\n", "file_path": "rust/src/db.rs", "rank": 24, "score": 30998.890432638524 }, { "content": " match self.db.borrow_data() {\n\n Ok(data) => match data.get(key) {\n\n Some(data) => Some(data.to_string()),\n\n _ => None,\n\n },\n\n Err(_) => None,\n\n }\n\n }\n\n\n\n fn write(&self, key: &str, value: &String) -> Result<()> {\n\n let result = self.db.write(|db| {\n\n db.insert(key.into(), value.into());\n\n });\n\n\n\n if result.is_ok() {\n\n self.db.save()?;\n\n }\n\n\n\n result\n\n }\n", "file_path": "rust/src/db.rs", "rank": 25, "score": 30997.151997431632 }, { "content": "\n\n pub fn get_access_token(&self) -> DBReadResponse {\n\n self.read(\"access_token\")\n\n }\n\n\n\n pub fn get_refresh_token(&self) -> DBReadResponse {\n\n self.read(\"refresh_token\")\n\n }\n\n\n\n pub fn handle_token_response(&self, tokens: &TokenResponse) -> Result<()> {\n\n self.write(\"access_token\", &tokens.access_token)?;\n\n self.write(\"refresh_token\", &tokens.refresh_token)?;\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "rust/src/db.rs", "rank": 26, "score": 30992.7114400144 }, { "content": " private Requests() {\n", "file_path": "java/app/src/main/java/com/churchcommunitybuilder/Requests.java", "rank": 27, "score": 30985.76606351794 }, { "content": "use reqwest::Result;\n\nuse serde::de::DeserializeOwned;\n\nuse serde::Serialize;\n\n\n", "file_path": "rust/src/utils/api.rs", "rank": 28, "score": 30279.655743200405 }, { "content": " public static final String ACCEPT_KEY = \"Accept\";\n", "file_path": "java/app/src/main/java/com/churchcommunitybuilder/Requests.java", "rank": 29, "score": 30016.58794574711 }, { "content": " public static final String ACCEPT_VALUE = \"application/vnd.ccbchurch.v2+json\";\n", "file_path": "java/app/src/main/java/com/churchcommunitybuilder/Requests.java", "rank": 30, "score": 30016.58794574711 }, { "content": " public static void addAcceptHeader(HttpRequest request) {\n\n var headers = request.getHeaders();\n\n headers.put(ACCEPT_KEY, ACCEPT_VALUE);\n", "file_path": "java/app/src/main/java/com/churchcommunitybuilder/Requests.java", "rank": 31, "score": 29554.38438792243 }, { "content": "\"\"\"This module contains methods for working with a JSON file as a kind of simple database. This\n\nallows the tokens to be persisted between runs of the server.\n\n\n\nThis should not be used as any kind of production storage, it's just a simple way for a single\n\nuser application to store data, for this example application.\n\n\"\"\"\n\nimport json\n\nimport os\n\n\n\nfrom app.constants import DB_FILENAME\n\n\n\n_ENV_STATE = {\n\n \"client_id\": \"CCB_CLIENT_ID\",\n\n \"client_key\": \"CCB_CLIENT_KEY\",\n\n}\n\n\n\n\n\ndef _db_exists():\n\n return os.path.exists(DB_FILENAME)\n\n\n\n\n\ndef _read_db():\n\n with open(DB_FILENAME, \"r\") as rfile:\n\n return json.load(rfile)\n\n\n\n\n\ndef _write_db(state):\n\n with open(DB_FILENAME, \"w\") as wfile:\n\n json.dump(state, wfile)\n\n\n\n\n\ndef init_state():\n\n if _db_exists():\n\n return\n\n\n\n _write_db(\n\n {\n\n \"access_token\": None,\n\n \"refresh_token\": None,\n\n \"token_expires_at\": 0,\n\n }\n\n )\n\n\n\n\n\ndef get_state(key, fallback=None):\n\n if key in _ENV_STATE:\n\n return os.environ[_ENV_STATE[key]]\n\n\n\n state = _read_db()\n\n return state.get(key, fallback)\n\n\n\n\n\ndef set_state(key, value):\n\n state = _read_db()\n\n state[key] = value\n\n _write_db(state)\n", "file_path": "python/app/state.py", "rank": 32, "score": 26907.164227390564 }, { "content": "\"\"\"This module contains wrappers around the requests library for making requests to the CCB\n\nAPI.\n\n\"\"\"\n\nimport requests\n\n\n\nfrom app.state import get_state\n\n\n\n\n\ndef post_json(url, data, *, auth=False):\n\n \"\"\"This method is a simple wrapper around requests.post that will add the appropriate headers\n\n needed for working with the CCB API.\n\n \"\"\"\n\n # Requests without the Accept: application/vnd.ccbchurch.vd+json header will be rejected\n\n headers = {\n\n \"Content-Type\": \"application/json\",\n\n \"Accept\": \"application/vnd.ccbchurch.v2+json\",\n\n }\n\n if auth:\n\n access_token = get_state(\"access_token\", \"\")\n\n if access_token:\n\n headers[\"Authorization\"] = f\"Bearer {access_token}\"\n\n\n\n return requests.post(url, json=data, headers=headers)\n\n\n\n\n\ndef get_json(url, params=None, *, auth=False):\n\n \"\"\"This method is a simple wrapper around requests.get that will add the appropriate headers\n\n needed for working with the CCB API.\n\n \"\"\"\n\n # Requests without the Accept: application/vnd.ccbchurch.vd+json header will be rejected\n\n headers = {\n\n \"Accept\": \"application/vnd.ccbchurch.v2+json\",\n\n }\n\n if auth:\n\n access_token = get_state(\"access_token\")\n\n if access_token:\n\n headers[\"Authorization\"] = f\"Bearer {access_token}\"\n\n return requests.get(url, params, headers=headers)\n", "file_path": "python/app/request.py", "rank": 33, "score": 26684.344390530852 }, { "content": "const dbFile = process.env.DB_FILE || 'db.json';\n", "file_path": "nodejs/index.js", "rank": 34, "score": 25959.7697715202 }, { "content": "const storeTokenResult = async (token) => {\n\n const now = getNow();\n\n await db.set('accessToken', token.access_token).write();\n\n await db.set('refreshToken', token.refresh_token).write();\n\n await db.set('tokenExpiration', token.expires_in + now).write();\n", "file_path": "nodejs/index.js", "rank": 35, "score": 25532.716953781874 }, { "content": "def post_json(url, data, *, auth=False):\n\n \"\"\"This method is a simple wrapper around requests.post that will add the appropriate headers\n\n needed for working with the CCB API.\n\n \"\"\"\n\n # Requests without the Accept: application/vnd.ccbchurch.vd+json header will be rejected\n\n headers = {\n\n \"Content-Type\": \"application/json\",\n\n \"Accept\": \"application/vnd.ccbchurch.v2+json\",\n\n }\n\n if auth:\n\n access_token = get_state(\"access_token\", \"\")\n\n if access_token:\n\n headers[\"Authorization\"] = f\"Bearer {access_token}\"\n\n\n", "file_path": "python/app/request.py", "rank": 36, "score": 25278.049625878873 }, { "content": "def get_json(url, params=None, *, auth=False):\n\n \"\"\"This method is a simple wrapper around requests.get that will add the appropriate headers\n\n needed for working with the CCB API.\n\n \"\"\"\n\n # Requests without the Accept: application/vnd.ccbchurch.vd+json header will be rejected\n\n headers = {\n\n \"Accept\": \"application/vnd.ccbchurch.v2+json\",\n\n }\n\n if auth:\n\n access_token = get_state(\"access_token\")\n\n if access_token:\n\n headers[\"Authorization\"] = f\"Bearer {access_token}\"\n", "file_path": "python/app/request.py", "rank": 37, "score": 25278.049625878873 }, { "content": "const buildAuthRedirectUrl = () => {\n\n const query = querystring.stringify({\n\n client_id: clientId,\n\n response_type: 'code',\n\n redirect_uri: 'http://localhost:8080/auth',\n\n subdomain,\n\n });\n\n return `https://oauth.ccbchurch.com/oauth/authorize?${query}`;\n", "file_path": "nodejs/index.js", "rank": 38, "score": 25037.30250933764 }, { "content": "const createTokenFromAuthCode = async (code) => {\n\n const data = await postJson('https://api.ccbchurch.com/oauth/token', {\n\n grant_type: 'authorization_code',\n\n subdomain,\n\n client_id: clientId,\n\n client_secret: clientSecret,\n\n redirect_uri: 'http://localhost:8080/auth',\n\n code,\n\n });\n\n\n\n // we successfully got an access token\n\n // now store it, the refresh token and\n\n // the expiration for use later\n\n await storeTokenResult(data);\n", "file_path": "nodejs/index.js", "rank": 39, "score": 24836.690435369455 }, { "content": "def render_request_integration_form():\n\n \"\"\"Displays a form allowing the user to initiate the integration process.\"\"\"\n", "file_path": "python/server.py", "rank": 40, "score": 24627.386424615575 }, { "content": "package com.churchcommunitybuilder;\n", "file_path": "java/app/src/main/java/com/churchcommunitybuilder/Requests.java", "rank": 41, "score": 22334.72959227269 }, { "content": " private static final String KEY_SUBDOMAIN = \"subdomain\";\n", "file_path": "java/app/src/main/java/com/churchcommunitybuilder/Main.java", "rank": 42, "score": 22190.046138926158 }, { "content": " private static AuthorizationCodeFlow createAuthorizationCodeFlow(Properties properties) throws GeneralSecurityException, IOException {\n\n var method = BearerToken.authorizationHeaderAccessMethod();\n\n var transport = GoogleNetHttpTransport.newTrustedTransport();\n\n var jsonFactory = JacksonFactory.getDefaultInstance();\n\n\n\n var tokenServerUrl = new GenericUrl(TOKEN_SERVER_URL);\n\n\n\n var clientId = properties.getProperty(KEY_CLIENT_ID);\n\n var clientSecret = properties.getProperty(KEY_CLIENT_SECRET);\n\n var clientAuthentication = new ClientJsonAuthentication(clientId, clientSecret);\n\n\n\n var authorizationServerEncodedUrl = createAuthorizationServerEncodedUrl(properties);\n\n\n\n // WARNING: Do NOT save stored credentials in git!\n\n var dataDirectory = new File(STORED_CREDENTIALS_DIRECTORY_PATH);\n\n var dataStoreFactory = new FileDataStoreFactory(dataDirectory);\n\n\n\n return new AuthorizationCodeFlow.Builder(\n\n method, transport, jsonFactory, tokenServerUrl, clientAuthentication, clientId, authorizationServerEncodedUrl)\n\n .setDataStoreFactory(dataStoreFactory)\n\n .build();\n", "file_path": "java/app/src/main/java/com/churchcommunitybuilder/Main.java", "rank": 43, "score": 21052.48007655097 }, { "content": " private final HttpRequestFactory requestFactory;\n", "file_path": "java/app/src/main/java/com/churchcommunitybuilder/RestClient.java", "rank": 44, "score": 20877.084353629736 }, { "content": " @Test\n\n void constructorHttpRequestFactoryMustNotBeNull() {\n\n assertThrows(\n\n NullPointerException.class,\n\n () -> {\n\n new RestClient(null);\n\n }\n\n );\n", "file_path": "java/app/src/test/java/com/churchcommunitybuilder/AppTest.java", "rank": 45, "score": 19205.828566480563 }, { "content": "def get_access_token(code):\n\n \"\"\"Requests an access token / refresh token pair from CCB using the authorization code\n\n obtained from the authorization process.\n\n \"\"\"\n\n url = f\"{API_BASE_URL}/oauth/token\"\n\n data = {\n\n \"grant_type\": \"authorization_code\",\n\n \"code\": code,\n\n \"client_id\": get_state(\"client_id\"),\n\n \"client_secret\": get_state(\"client_key\"),\n\n \"redirect_uri\": APP_BASE_URL + \"/auth\",\n\n }\n\n response = post_json(url, data)\n", "file_path": "python/app/token.py", "rank": 48, "score": 6.121446502540256 }, { "content": "def integration_authorized():\n\n \"\"\"Once the user has initiated the integration, CCB will redirect back to this endpoint with\n\n the authorization code as a query parameter. That code is then exchanged for an access token /\n\n refresh token pair.\n\n \"\"\"\n\n code = request.args.get(\"code\")\n\n get_access_token(code)\n", "file_path": "python/server.py", "rank": 50, "score": 4.305147581553191 }, { "content": "# Node JS Example\n\n## Requirements\n\n- Node v12\n\n## Setup\n\n- copy the `env.sample` to `.env` and add in your credentials as well as a\n\n subdomain for testing\n\n- `npm install`\n\n## Start\n\n- `npm start`\n", "file_path": "nodejs/README.md", "rank": 51, "score": 4.235700144865219 }, { "content": "def _set_access_token(data):\n\n \"\"\"Store the token data that we need to make requests.\"\"\"\n\n set_state(\"access_token\", data[\"access_token\"])\n\n set_state(\"refresh_token\", data[\"refresh_token\"])\n", "file_path": "python/app/token.py", "rank": 52, "score": 2.467543491620569 }, { "content": "def _is_token_expiring(offset=0):\n\n \"\"\"Returns true if the token will expire in the next offset seconds.\"\"\"\n\n expires_at = get_state(\"token_expires_at\")\n", "file_path": "python/app/token.py", "rank": 53, "score": 2.4117887781547145 }, { "content": "\"\"\"This module contains constants used throughout the application.\"\"\"\n\nAPP_BASE_URL = \"http://localhost:8080\"\n\nOAUTH_BASE_URL = \"https://oauth.ccbchurch.com\"\n\nAPI_BASE_URL = \"https://api.ccbchurch.com\"\n\nDB_FILENAME = \"db.json\"\n", "file_path": "python/app/constants.py", "rank": 54, "score": 2.3797784878745873 }, { "content": "def get_authorization_url():\n\n \"\"\"Builds the CCB oauth authorization URL, using your client ID.\"\"\"\n\n url_base = f\"{OAUTH_BASE_URL}/oauth/authorize\"\n\n query = urlencode(\n\n {\n\n \"client_id\": get_state(\"client_id\"),\n\n \"response_type\": \"code\",\n\n \"redirect_uri\": APP_BASE_URL + \"/auth\",\n\n }\n\n )\n", "file_path": "python/app/token.py", "rank": 55, "score": 2.254214233710322 }, { "content": "rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\n\nrem the _cmd.exe /c_ return code!\n\nif not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\n\nexit /b 1\n\n\n\n:mainEnd\n\nif \"%OS%\"==\"Windows_NT\" endlocal\n\n\n\n:omega\n", "file_path": "java/gradlew.bat", "rank": 56, "score": 2.2455610914617434 }, { "content": "def index():\n\n \"\"\"This endpoint displays the church directory if the application has been connected to CCB,\n\n otherwise it redirects the user to a page to initiate the integration.\n\n \"\"\"\n\n if not get_state(\"access_token\"):\n\n return redirect(\"/integrations\")\n\n\n\n page = request.args.get(\"page\", 1)\n\n\n\n # In a production application you'd be better served by placing the token refresh at a lower\n\n # level place, or via a request middleware, so you don't have to manually refresh it before\n\n # every request you make.\n\n check_refresh_access_token()\n\n\n\n response = get_json(\n\n f\"{API_BASE_URL}/individuals\",\n\n {\"page\": page, \"per_page\": 100},\n\n auth=True,\n\n )\n\n\n\n people = response.json()\n\n record_count = int(response.headers.get(\"X-Total\", 0))\n\n current_page = int(response.headers.get(\"X-Page\", 1))\n\n next_page = int(response.headers.get(\"X-Next-Page\", 0))\n\n last_page = int(response.headers.get(\"X-Total-Pages\", 0))\n\n\n\n return render_template(\n\n \"index.html\",\n\n people=people,\n\n record_count=record_count,\n\n current_page=current_page,\n\n next_page=next_page,\n\n last_page=last_page,\n", "file_path": "python/server.py", "rank": 57, "score": 2.0984426441801176 }, { "content": "## Notes About the Application\n\n\n\nThis application was designed solely as an example of how to connect to the\n\nCCB API with Python. There is no error handling, logging, or other things you\n\nwould normally want in a production application.\n\n\n\nAlso note: this application stores access and refresh tokens in plain text,\n\nyou _should not_ do this in production. Access tokens grant full access to\n\nresources, anyone with the access token will be able to impersonate you until\n\nthe access token expires.\n\n\n\nIf you fork this code to create an application *please do not hard code your\n\nOAuth client ID and Secret!* If you do, anyone who has access to the code base\n\nwill have access to your client credentials.\n", "file_path": "python/README.md", "rank": 58, "score": 1.9413270481866067 }, { "content": " implementation 'org.json:json:20200518'\n\n}\n\n\n\njava {\n\n toolchain {\n\n languageVersion = JavaLanguageVersion.of(10)\n\n }\n\n}\n\n\n\napplication {\n\n // Define the main class for the application.\n\n mainClass = 'com.churchcommunitybuilder.Main'\n\n}\n\n\n\njar {\n\n manifest {\n\n attributes(\n\n 'Class-Path': configurations.compile.collect { it.getName() }.join(' '),\n\n 'Main-Class': application.mainClass\n\n )\n\n }\n\n}\n\n\n\ntasks.named('test') {\n\n // Use junit platform for unit tests.\n\n useJUnitPlatform()\n\n}\n", "file_path": "java/app/build.gradle", "rank": 60, "score": 1.6935334861307263 }, { "content": "# Church Community Builder API Consumer Application Example\n\n\n\nThis is a sample CCB API Consumer application written in Python + Flask. It is\n\nintended to show how an application can connect to the CCB API via the OAuth 2\n\nprotocol.\n\n\n\n## Requirements\n\n\n\nYou must have python 3.6+ installed to run this application.\n\n\n\nYou'll need to use a terminal emulator such as bash or powershell to work with\n\nthis application.\n\n\n\nTo use the Makefile you must have some version of make installed. If you do\n\nnot or cannot install it, checkout the Makefile to see the commands. You can\n\nrun them in your terminal manually.\n\n\n\n## Setup\n\n\n\nTo install the dependencies, in your terminal run `make install`.\n\n\n\n## Running the Application\n\n\n\nTo run the application, you'll need to set your CCB API OAuth client\n\ncredentials as environment variables, e.g.:\n\n\n\n export CCB_CLIENT_ID=<your client ID>\n\n export CCB_CLIENT_KEY=<your client key / secret>\n\n\n\nWith those variables set in the environment, you can then run the application\n\nvia\n\n\n\n make run\n\n\n\nThis will start the Flask server on localhost, port 8080. You can then access\n\nthe running application at [http://localhost:8080](http://localhost:8080).\n\n\n\n## Resetting the Data Store\n\n\n\nThe application uses a simple JSON-based file storage. This is present just\n\nas a simple storage means for this sample application. Once you have gone\n\nthrough the authorization flow once, the application will use the refresh\n\ntoken to keep the acess token fresh. If you want to go through the\n\nauthorization flow again, run `make reset` to clear the database.\n\n\n\n## How the Application Works\n\n\n\nThe application will first allow you to connect up to the CCB API through the\n\nthree-legged OAuth flow. Once you finish connecting the application you will\n\nbe redirected back to the locally running application and will be presented\n\nwith a paged church directory.\n\n\n", "file_path": "python/README.md", "rank": 61, "score": 1.6415609543429293 }, { "content": "# Java Example\n\n\n\n## Requirements\n\n- Java 10+\n\n\n\n## Setup\n\n- `brew install gradle`\n\n- Copy `config-sample.properties` to `app/config.properties` and populate with your credentials and subdomain for testing\n\n- `gradle :app:test`\n\n\n\n## Start\n\n- `gradle :app:run`\n", "file_path": "java/README.md", "rank": 62, "score": 1.6269995195662075 }, { "content": "\n\ndependencies {\n\n // Use JUnit Jupiter API for testing.\n\n testImplementation 'org.junit.jupiter:junit-jupiter-api:5.6.2'\n\n\n\n // Use JUnit Jupiter Engine for testing.\n\n testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine'\n\n\n\n // This dependency is used by the application.\n\n implementation 'com.google.guava:guava:29.0-jre'\n\n\n\n // https://developers.google.com/api-client-library/java\n\n implementation 'com.google.api-client:google-api-client:1.30.11'\n\n\n\n // https://developers.google.com/api-client-library/java/google-oauth-java-client/setup#google-oauth-client\n\n implementation 'com.google.oauth-client:google-oauth-client:1.31.1'\n\n implementation 'com.google.oauth-client:google-oauth-client-java6:1.31.1'\n\n implementation 'com.google.oauth-client:google-oauth-client-jetty:1.31.1'\n\n\n\n // https://github.com/stleary/JSON-java\n", "file_path": "java/app/build.gradle", "rank": 63, "score": 1.4408137746655871 }, { "content": "/*\n\n * This file was generated by the Gradle 'init' task.\n\n *\n\n * The settings file is used to specify which projects to include in your build.\n\n *\n\n * Detailed information about configuring a multi-project build in Gradle can be found\n\n * in the user manual at https://docs.gradle.org/6.7/userguide/multi_project_builds.html\n\n */\n\n\n\nrootProject.name = 'java'\n\ninclude('app')\n", "file_path": "java/settings.gradle", "rank": 64, "score": 1.258321756663582 }, { "content": "/*\n\n * This file was generated by the Gradle 'init' task.\n\n *\n\n * This generated file contains a sample Java application project to get you started.\n\n * For more details take a look at the 'Building Java & JVM projects' chapter in the Gradle\n\n * User Manual available at https://docs.gradle.org/6.7/userguide/building_java_projects.html\n\n */\n\n\n\nplugins {\n\n // Apply the application plugin to add support for building a CLI application in Java.\n\n id 'application'\n\n id 'idea'\n\n}\n\n\n\nrepositories {\n\n // Use JCenter for resolving dependencies.\n\n jcenter()\n\n mavenCentral()\n\n google()\n\n}\n", "file_path": "java/app/build.gradle", "rank": 65, "score": 0.9610982830358163 }, { "content": "@rem\n\n@rem ##########################################################################\n\n\n\n@rem Set local scope for the variables with windows NT shell\n\nif \"%OS%\"==\"Windows_NT\" setlocal\n\n\n\nset DIRNAME=%~dp0\n\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\n\nset APP_BASE_NAME=%~n0\n\nset APP_HOME=%DIRNAME%\n\n\n\n@rem Resolve any \".\" and \"..\" in APP_HOME to make it shorter.\n\nfor %%i in (\"%APP_HOME%\") do set APP_HOME=%%~fi\n\n\n\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\n\nset DEFAULT_JVM_OPTS=\"-Xmx64m\" \"-Xms64m\"\n\n\n\n@rem Find java.exe\n\nif defined JAVA_HOME goto findJavaFromJavaHome\n\n\n", "file_path": "java/gradlew.bat", "rank": 66, "score": 0.830343409849867 }, { "content": "@rem\n\n@rem Copyright 2015 the original author or authors.\n\n@rem\n\n@rem Licensed under the Apache License, Version 2.0 (the \"License\");\n\n@rem you may not use this file except in compliance with the License.\n\n@rem You may obtain a copy of the License at\n\n@rem\n\n@rem https://www.apache.org/licenses/LICENSE-2.0\n\n@rem\n\n@rem Unless required by applicable law or agreed to in writing, software\n\n@rem distributed under the License is distributed on an \"AS IS\" BASIS,\n\n@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n@rem See the License for the specific language governing permissions and\n\n@rem limitations under the License.\n\n@rem\n\n\n\n@if \"%DEBUG%\" == \"\" @echo off\n\n@rem ##########################################################################\n\n@rem\n\n@rem Gradle startup script for Windows\n", "file_path": "java/gradlew.bat", "rank": 67, "score": 0.8145130749531528 } ]
Rust
src/lib.rs
Gingeh/brainfrick
bd7fe1a2cc7dbee0b1c81e2b585c66d320facac5
use std::collections::VecDeque; pub struct BrainFuck { memory: Vec<Cell>, pointer: usize, input: VecDeque<u8>, counter: usize, max_steps: usize, } impl BrainFuck { #[must_use] pub fn new(size: usize, input: &str, max_steps: usize) -> Self { BrainFuck { memory: vec![Cell::from(0); size], pointer: 0, counter: 0, input: queue_from(input), max_steps, } } pub fn run(&mut self, program: &str) -> Result<String, String> { let mut output = String::new(); let program: Vec<char> = program.chars().collect(); let mut steps: usize = 0; while self.counter < program.len() { let ch = program[self.counter]; match ch { '+' => self.add(), '-' => self.sub(), '<' => self.left(), '>' => self.right(), '.' => output += &self.print(), ',' => self.read(), '[' => self.start(&program)?, ']' => self.end(&program)?, _ => {} } self.counter += 1; if steps == self.max_steps { return Err(format!( "Exceeded maximum steps ({}), the program may be stuck in a loop.", self.max_steps )); } steps += 1; } Ok(output) } fn add(&mut self) { self.memory[self.pointer].add(); } fn sub(&mut self) { self.memory[self.pointer].sub(); } fn left(&mut self) { self.pointer = if self.pointer == 0 { self.memory.len() - 1 } else { self.pointer - 1 } } fn right(&mut self) { self.pointer = if self.pointer == self.memory.len() - 1 { 0 } else { self.pointer + 1 } } fn print(&mut self) -> String { self.memory[self.pointer].to_string() } fn read(&mut self) { self.memory[self.pointer] = Cell::from(self.input.pop_front().unwrap_or(0)); } fn start(&mut self, program: &[char]) -> Result<(), String> { if self.memory[self.pointer] == Cell::from(0) { let mut level: usize = 1; while level > 0 { self.counter += 1; if self.counter == program.len() { return Err(String::from("A matching \"]\" could not be found.")); } if program[self.counter] == '[' { level += 1; } else if program[self.counter] == ']' { level -= 1; } } } Ok(()) } fn end(&mut self, program: &[char]) -> Result<(), String> { if self.memory[self.pointer] != Cell::from(0) { let mut level: usize = 1; while level > 0 { if self.counter == 0 { return Err(String::from("A matching \"[\" could not be found.")); } self.counter -= 1; if program[self.counter] == '[' { level -= 1; } else if program[self.counter] == ']' { level += 1; } } } Ok(()) } } #[derive(PartialEq, Clone, Copy)] struct Cell { value: u8, } impl Cell { fn add(&mut self) { self.value = if self.value == 255 { 0 } else { self.value + 1 } } fn sub(&mut self) { self.value = if self.value == 0 { 255 } else { self.value - 1 } } } impl ToString for Cell { fn to_string(&self) -> String { String::from(self.value as char) } } impl From<u8> for Cell { fn from(value: u8) -> Self { Self { value } } } fn queue_from(input: &str) -> VecDeque<u8> { VecDeque::from(input.as_bytes().to_vec()) } #[cfg(test)] mod tests { use super::*; #[test] fn test_input() { let program = ",++.,-."; let mut engine = BrainFuck::new(256, "a", 10000); assert_eq!(engine.run(program), Ok(String::from("cÿ"))); } #[test] fn test_wraparound() { let program = "->>.<<."; let mut engine = BrainFuck::new(2, "", 10000); assert_eq!(engine.run(program), Ok(String::from("ÿÿ"))); } #[test] fn test_max_steps() { let program = "+[]"; let mut engine = BrainFuck::new(256, "", 100); assert_eq!( engine.run(program), Err(String::from( "Exceeded maximum steps (100), the program may be stuck in a loop." )) ); } #[test] fn test_invalid_character() { let program = "nothing lol"; let mut engine = BrainFuck::new(256, "", 10000); assert_eq!(engine.run(program), Ok(String::new())); } #[test] fn test_missing_bracket() { let program = "["; let mut engine = BrainFuck::new(256, "", 10000); assert_eq!( engine.run(program), Err(String::from("A matching \"]\" could not be found.")) ); let program = "+]"; let mut engine = BrainFuck::new(256, "", 10000); assert_eq!( engine.run(program), Err(String::from("A matching \"[\" could not be found.")) ); } #[test] fn test_loop() { let program = "[[-.+]]++[-]-."; let mut engine = BrainFuck::new(256, "", 10000); assert_eq!(engine.run(program), Ok(String::from("ÿ"))); } }
use std::collections::VecDeque; pub struct BrainFuck { memory: Vec<Cell>, pointer: usize, input: VecDeque<u8>, counter: usize, max_steps: usize, } impl BrainFuck { #[must_use] pub fn new(size: usize, input: &str, max_steps: usize) -> Self { BrainFuck { memory: vec![Cell::from(0); size], pointer: 0, counter: 0, input: queue_from(input), max_steps, } } pub fn run(&mut self, program: &str) -> Result<String, String> { let mut output = String::new(); let program: Vec<char> = program.chars().collect(); let mut steps: usize = 0; while self.counter < program.len() { let ch = program[self.counter]; match ch { '+
))); } #[test] fn test_max_steps() { let program = "+[]"; let mut engine = BrainFuck::new(256, "", 100); assert_eq!( engine.run(program), Err(String::from( "Exceeded maximum steps (100), the program may be stuck in a loop." )) ); } #[test] fn test_invalid_character() { let program = "nothing lol"; let mut engine = BrainFuck::new(256, "", 10000); assert_eq!(engine.run(program), Ok(String::new())); } #[test] fn test_missing_bracket() { let program = "["; let mut engine = BrainFuck::new(256, "", 10000); assert_eq!( engine.run(program), Err(String::from("A matching \"]\" could not be found.")) ); let program = "+]"; let mut engine = BrainFuck::new(256, "", 10000); assert_eq!( engine.run(program), Err(String::from("A matching \"[\" could not be found.")) ); } #[test] fn test_loop() { let program = "[[-.+]]++[-]-."; let mut engine = BrainFuck::new(256, "", 10000); assert_eq!(engine.run(program), Ok(String::from("ÿ"))); } }
' => self.add(), '-' => self.sub(), '<' => self.left(), '>' => self.right(), '.' => output += &self.print(), ',' => self.read(), '[' => self.start(&program)?, ']' => self.end(&program)?, _ => {} } self.counter += 1; if steps == self.max_steps { return Err(format!( "Exceeded maximum steps ({}), the program may be stuck in a loop.", self.max_steps )); } steps += 1; } Ok(output) } fn add(&mut self) { self.memory[self.pointer].add(); } fn sub(&mut self) { self.memory[self.pointer].sub(); } fn left(&mut self) { self.pointer = if self.pointer == 0 { self.memory.len() - 1 } else { self.pointer - 1 } } fn right(&mut self) { self.pointer = if self.pointer == self.memory.len() - 1 { 0 } else { self.pointer + 1 } } fn print(&mut self) -> String { self.memory[self.pointer].to_string() } fn read(&mut self) { self.memory[self.pointer] = Cell::from(self.input.pop_front().unwrap_or(0)); } fn start(&mut self, program: &[char]) -> Result<(), String> { if self.memory[self.pointer] == Cell::from(0) { let mut level: usize = 1; while level > 0 { self.counter += 1; if self.counter == program.len() { return Err(String::from("A matching \"]\" could not be found.")); } if program[self.counter] == '[' { level += 1; } else if program[self.counter] == ']' { level -= 1; } } } Ok(()) } fn end(&mut self, program: &[char]) -> Result<(), String> { if self.memory[self.pointer] != Cell::from(0) { let mut level: usize = 1; while level > 0 { if self.counter == 0 { return Err(String::from("A matching \"[\" could not be found.")); } self.counter -= 1; if program[self.counter] == '[' { level -= 1; } else if program[self.counter] == ']' { level += 1; } } } Ok(()) } } #[derive(PartialEq, Clone, Copy)] struct Cell { value: u8, } impl Cell { fn add(&mut self) { self.value = if self.value == 255 { 0 } else { self.value + 1 } } fn sub(&mut self) { self.value = if self.value == 0 { 255 } else { self.value - 1 } } } impl ToString for Cell { fn to_string(&self) -> String { String::from(self.value as char) } } impl From<u8> for Cell { fn from(value: u8) -> Self { Self { value } } } fn queue_from(input: &str) -> VecDeque<u8> { VecDeque::from(input.as_bytes().to_vec()) } #[cfg(test)] mod tests { use super::*; #[test] fn test_input() { let program = ",++.,-."; let mut engine = BrainFuck::new(256, "a", 10000); assert_eq!(engine.run(program), Ok(String::from("cÿ"))); } #[test] fn test_wraparound() { let program = "->>.<<."; let mut engine = BrainFuck::new(2, "", 10000); assert_eq!(engine.run(program), Ok(String::from("ÿÿ"
random
[ { "content": "#[derive(StructOpt)]\n\n#[structopt(name = \"brainfrick\", about = \"Rust implementation of Brainfuck\")]\n\nstruct Opt {\n\n /// Path to program [optional: will use stdin instead]\n\n #[structopt()]\n\n path: Option<path::PathBuf>,\n\n\n\n /// Input given to program\n\n #[structopt(short, long, default_value = \"\")]\n\n input: String,\n\n\n\n /// Number of cells\n\n #[structopt(short, long, default_value = \"30000\")]\n\n size: usize,\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 1, "score": 20675.839912076925 }, { "content": "fn main() {\n\n let opt = Opt::from_args();\n\n\n\n let mut program = String::new();\n\n match opt.path {\n\n Some(path) => {\n\n program = fs::read_to_string(path).expect(\"Failed to read file\");\n\n }\n\n None => {\n\n io::stdin()\n\n .read_to_string(&mut program)\n\n .expect(\"Failed to get stdin\");\n\n }\n\n }\n\n\n\n let mut engine = BrainFuck::new(opt.size, &opt.input, 10000);\n\n match engine.run(&program) {\n\n Ok(res) => println!(\"{}\", res),\n\n Err(msg) => {\n\n eprintln!(\"{}\", msg);\n\n process::exit(1)\n\n }\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 2, "score": 20672.85222858311 }, { "content": "use brainfrick::BrainFuck;\n\nuse std::io::Read;\n\nuse std::{fs, io, path, process};\n\nuse structopt::StructOpt;\n\n\n\n#[derive(StructOpt)]\n\n#[structopt(name = \"brainfrick\", about = \"Rust implementation of Brainfuck\")]\n", "file_path": "src/main.rs", "rank": 11, "score": 4.226747382969467 }, { "content": "# brainfrick\n\nRust implementation of Brainfuck\n\n```\n\nUSAGE:\n\n brainfrick [OPTIONS] [path]\n\n\n\nFLAGS:\n\n -h, --help Prints help information\n\n -V, --version Prints version information\n\n\n\nOPTIONS:\n\n -i, --input <input> Input given to program [default: ]\n\n -s, --size <size> Number of cells [default: 30000]\n\n\n\nARGS:\n\n <path> Path to program [optional: will use stdin instead]\n\n```\n\n## Examples\n\nFrom stdin: `cat ./path/to/code.bf | brainfrick`\\\n\nFrom path: `brainfrick ./path/to/code.bf`\n\n\n\n## Compiling\n\n```\n\ngit clone https://github.com/Gingeh/brainfrick.git\n\ncd ./brainfrick\n\ncargo build --release \n\n```\n", "file_path": "README.md", "rank": 12, "score": 4.13305441112939 }, { "content": "#![no_main]\n\nuse libfuzzer_sys::fuzz_target;\n\nuse brainfrick::BrainFuck;\n\n\n\nfuzz_target!(|data: &[u8]| {\n\n if let Ok(s) = std::str::from_utf8(data) {\n\n let mut engine = BrainFuck::new(256, \"\", 10000);\n\n engine.run(&s);\n\n }\n\n});\n", "file_path": "fuzz/fuzz_targets/fuzz_target_1.rs", "rank": 13, "score": 3.2630187369430566 } ]
Rust
src/service.rs
kfastov/moleculer-rs
c59695f4567a53d24cc9b9072d67a21f104059ff
use serde::{Deserialize, Serialize}; use serde_json::Value; use std::{collections::HashMap, marker::PhantomData}; use crate::{ channels::messages::incoming::{EventMessage, RequestMessage}, Error, ServiceBroker, }; pub type Callback<T> = fn(Context<T>) -> Result<(), Box<dyn std::error::Error>>; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Action { name: String, #[serde(default)] params: Option<Value>, #[serde(skip)] pub(crate) callback: Option<Callback<Action>>, } #[derive(Default, Debug)] pub struct EventBuilder { name: String, params: Option<Value>, callback: Option<Callback<Event>>, } #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Event { name: String, #[serde(default)] params: Option<Value>, #[serde(skip)] pub(crate) callback: Option<Callback<Event>>, } impl EventBuilder { pub fn new<S: Into<String>>(name: S) -> Self { Self { name: name.into(), ..Self::default() } } pub fn add_params(mut self, params: Value) -> Self { self.params = Some(params); self } pub fn add_callback(mut self, callback: Callback<Event>) -> Self { self.callback = Some(callback); self } pub fn build(self) -> Event { Event { name: self.name, params: self.params, callback: self.callback, } } } #[derive(Default, Debug)] pub struct ActionBuilder { name: String, params: Option<Value>, callback: Option<Callback<Action>>, } impl ActionBuilder { pub fn new<S: Into<String>>(name: S) -> Self { Self { name: name.into(), ..Self::default() } } pub fn add_params(mut self, params: Value) -> Self { self.params = Some(params); self } pub fn add_callback(mut self, callback: Callback<Action>) -> Self { self.callback = Some(callback); self } pub fn build(self) -> Action { Action { name: self.name, params: self.params, callback: self.callback, } } } #[derive(Serialize, Deserialize, Debug, Default)] #[serde(rename_all = "camelCase")] pub struct Service { name: String, version: Option<i32>, #[serde(default)] #[serde(skip_deserializing)] settings: HashMap<String, String>, #[serde(default)] metadata: Option<Value>, pub(crate) actions: HashMap<String, Action>, pub(crate) events: HashMap<String, Event>, } impl Service { pub fn new<S: Into<String>>(name: S) -> Self { Self { name: name.into(), ..Default::default() } } pub fn set_version(mut self, version: i32) -> Self { self.version = Some(version); self } pub fn add_action(mut self, action: Action) -> Self { self.actions.insert(action.name.clone(), action); self } pub fn add_event(mut self, event: Event) -> Self { self.events.insert(event.name.clone(), event); self } } #[derive(Serialize, Deserialize, Debug)] #[serde(rename_all = "lowercase")] pub enum EventType { Emit, Broadcast, } pub struct Context<T> { phantom: PhantomData<T>, pub id: String, pub broker: ServiceBroker, pub node_id: String, pub action: Option<String>, pub event_name: Option<String>, pub event_type: Option<EventType>, pub event_groups: Vec<String>, pub caller: Option<String>, pub request_id: Option<String>, pub parent_id: Option<String>, pub params: Value, pub meta: Value, pub locals: Option<Value>, pub level: i32, } impl Context<Event> { pub(crate) fn new(event_message: EventMessage, service_broker: ServiceBroker) -> Self { let event_type = if event_message.broadcast.unwrap_or(false) { EventType::Broadcast } else { EventType::Emit }; Self { phantom: PhantomData, broker: service_broker, id: event_message.id, params: event_message.data, action: None, event_type: Some(event_type), event_name: Some(event_message.event), event_groups: vec![], node_id: event_message.sender, caller: event_message.caller, parent_id: event_message.parent_id, request_id: event_message.request_id, meta: event_message.meta, level: event_message.level, locals: None, } } } impl Context<Action> { pub(crate) fn new(request_message: RequestMessage, service_broker: ServiceBroker) -> Self { Self { phantom: PhantomData, broker: service_broker, id: request_message.request_id.clone(), params: request_message.params, action: Some(request_message.action), event_type: None, event_name: None, event_groups: vec![], node_id: request_message.sender, caller: request_message.caller, parent_id: request_message.parent_id, request_id: Some(request_message.request_id), meta: request_message.meta, level: 1, locals: None, } } pub fn reply(&self, params: Value) { act_zero::send!(self .broker .addr .reply(self.node_id.clone(), self.id.clone(), params)); } } impl<T> Context<T> { pub fn emit<S: Into<String>>(&self, event: S, params: Value) { self.broker.emit(event, params) } pub fn broadcast<S: Into<String>>(&self, event: S, params: Value) { self.broker.broadcast(event, params) } pub async fn call<S: Into<String>>(self, action: S, params: Value) -> Result<Value, Error> { self.broker.call(action, params).await } }
use serde::{Deserialize, Serialize}; use serde_json::Value; use std::{collections::HashMap, marker::PhantomData}; use crate::{ channels::messages::incoming::{EventMessage, RequestMessage}, Error, ServiceBroker, }; pub type Callback<T> = fn(Context<T>) -> Result<(), Box<dyn std::error::Error>>; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Action { name: String, #[serde(default)] params: Option<Value>, #[serde(skip)] pub(crate) callback: Option<Callback<Action>>, } #[derive(Default, Debug)] pub struct EventBuilder { name: String, params: Option<Value>, callback: Option<Callback<Event>>, } #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Event { name: String, #[serde(default)] params: Option<Value>, #[serde(skip)] pub(crate) callback: Option<Callback<Event>>, } impl EventBuilder { pub fn new<S: Into<String>>(name: S) -> Self { Self { name: name.into(), ..Self::default() } } pub fn add_params(mut self, params: Value) -> Self { self.params = Some(params); self } pub fn add_callback(mut self, callback: Callback<Event>) -> Self { self.callback = Some(callback); self } pub fn build(self) -> Event { Event { name: self.name, params: self.params, callback: self.callback, } } } #[derive(Default, Debug)] pub struct ActionBuilder { name: String, params: Option<Value>, callback: Option<Callback<Action>>, } impl ActionBuilder { pub fn new<S: Into<String>>(name: S) -> Self { Self { name: name.into(), ..Self::default() } } pub fn add_params(mut self, params: Value) -> Self { self.params = Some(params); self } pub fn add_callback(mut self, callback: Callback<Action>) -> Self { self.callback = Some(callback); self } pub fn build(self) -> Action { Action { name: self.name, params: self.params, callback: self.callback, } } } #[derive(Serialize, Deserialize, Debug, Default)] #[serde(rename_all = "camelCase")] pub struct Service { name: String, version: Option<i32>, #[serde(default)] #[serde(skip_deserializing)] settings: HashMap<String, String>, #[serde(default)] metadata: Option<Value>, pub(crate) actions: HashMap<String, Action>, pub(crate) events: HashMap<String, Event>, } impl Service { pub fn new<S: Into<String>>(name: S) -> Self { Self { name: name.into(), ..Default::default() } } pub fn set_version(mut self, version: i32) -> Self { self.version = Some(version); self } pub fn add_action(mut self, action: Action) -> Self { self.actions.insert(action.name.clone(), action); self } pub fn add_event(mut self, event: Event) -> Self { self.events.insert(event.name.clone(), event); self } } #[derive(Serialize, Deserialize, Debug)] #[serde(rename_all = "lowercase")] pub enum EventType { Emit, Broadcast, } pub struct Context<T> { phantom: PhantomData<T>, pub id: String, pub broker: ServiceBroker, pub node_id: String, pub action: Option<String>, pub event_name: Option<String>, pub event_type: Option<EventType>, pub event_groups: Vec<String>, pub caller: Option<String>, pub request_id: Option<String>, pub parent_id: Option<String>, pub params: Value, pub meta: Value, pub locals: Option<Value>, pub level: i32, } impl Context<Event> { pub(crate) fn new(event_message: EventMessage, service_broker: ServiceBroker) -> Self {
Self { phantom: PhantomData, broker: service_broker, id: event_message.id, params: event_message.data, action: None, event_type: Some(event_type), event_name: Some(event_message.event), event_groups: vec![], node_id: event_message.sender, caller: event_message.caller, parent_id: event_message.parent_id, request_id: event_message.request_id, meta: event_message.meta, level: event_message.level, locals: None, } } } impl Context<Action> { pub(crate) fn new(request_message: RequestMessage, service_broker: ServiceBroker) -> Self { Self { phantom: PhantomData, broker: service_broker, id: request_message.request_id.clone(), params: request_message.params, action: Some(request_message.action), event_type: None, event_name: None, event_groups: vec![], node_id: request_message.sender, caller: request_message.caller, parent_id: request_message.parent_id, request_id: Some(request_message.request_id), meta: request_message.meta, level: 1, locals: None, } } pub fn reply(&self, params: Value) { act_zero::send!(self .broker .addr .reply(self.node_id.clone(), self.id.clone(), params)); } } impl<T> Context<T> { pub fn emit<S: Into<String>>(&self, event: S, params: Value) { self.broker.emit(event, params) } pub fn broadcast<S: Into<String>>(&self, event: S, params: Value) { self.broker.broadcast(event, params) } pub async fn call<S: Into<String>>(self, action: S, params: Value) -> Result<Value, Error> { self.broker.call(action, params).await } }
let event_type = if event_message.broadcast.unwrap_or(false) { EventType::Broadcast } else { EventType::Emit };
assignment_statement
[ { "content": "fn broadcast_name(ctx: Context<Event>) -> Result<(), Box<dyn Error>> {\n\n let msg: PrintNameMessage = serde_json::from_value(ctx.params)?;\n\n println!(\"Received broadcastName in rust\");\n\n ctx.broker\n\n .broadcast(\"testWithParam\", serde_json::to_value(&msg)?);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/respond_with_event.rs", "rank": 0, "score": 174174.39542546793 }, { "content": "type Result<T> = std::result::Result<T, self::Error>;\n\n\n\n#[derive(Error, Debug)]\n\npub(crate) enum Error {\n\n #[error(\"Unable to connect to NATS: {0}\")]\n\n UnableToConnect(std::io::Error),\n\n #[error(\"Unable to subscribe to channel ({0}): {1}\")]\n\n UnableToSubscribe(String, std::io::Error),\n\n}\n\n\n\n#[derive(Clone)]\n\npub(crate) struct Conn {\n\n pub(crate) conn: Connection,\n\n}\n\n\n\nimpl Conn {\n\n pub(crate) async fn new(nats_address: &str) -> Result<Conn> {\n\n let conn = async_nats::connect(nats_address)\n\n .await\n\n .map_err(Error::UnableToConnect)?;\n", "file_path": "src/nats.rs", "rank": 1, "score": 154549.39025381644 }, { "content": "fn emit_hi(ctx: Context<Event>) -> Result<(), Box<dyn Error>> {\n\n println!(\"Received emitHi in rust\");\n\n ctx.broker.emit(\"test\", serde_json::json!({}));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/respond_with_event.rs", "rank": 2, "score": 153106.684462037 }, { "content": "fn print_name(ctx: Context<Event>) -> Result<(), Box<dyn Error>> {\n\n let msg: PrintNameMessage = serde_json::from_value(ctx.params)?;\n\n\n\n println!(\"Hello to: {} from Rust\", msg.name);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/simple_event.rs", "rank": 3, "score": 153004.17810186688 }, { "content": "fn print_normal(_ctx: Context<Event>) -> Result<(), Box<dyn Error>> {\n\n println!(\"Hello from normal\");\n\n Ok(())\n\n}\n\n\n\nasync fn hello_from_async() {\n\n tokio::time::sleep(Duration::from_secs(5)).await;\n\n println!(\"Hello from async\")\n\n}\n", "file_path": "examples/async_in_callback.rs", "rank": 4, "score": 144070.42692471883 }, { "content": "fn print_async(_ctx: Context<Event>) -> Result<(), Box<dyn Error>> {\n\n println!(\"Starting\");\n\n tokio::spawn(async { hello_from_async().await });\n\n println!(\"Ended\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/async_in_callback.rs", "rank": 5, "score": 144070.42692471883 }, { "content": "// callback for second event, will be called whenever \"printName\" event is received\n\nfn print_name(ctx: EventContext) -> Result<(), Box<dyn Error>> {\n\n let msg: PrintNameMessage = serde_json::from_value(ctx.params)?;\n\n\n\n println!(\"Hello to: {} from Rust\", msg.name);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 6, "score": 138571.72709973285 }, { "content": "fn print_hi(_ctx: Context<Event>) -> Result<(), Box<dyn Error>> {\n\n println!(\"Hello from Rust\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/simple_event.rs", "rank": 7, "score": 131936.46713843595 }, { "content": "fn ask_node_for_answer(ctx: Context<Event>) -> Result<(), Box<dyn Error>> {\n\n tokio::spawn(async move {\n\n let a = 10;\n\n let b = 78;\n\n\n\n let response = ctx\n\n .broker\n\n .call(\"greeter.math.add.js\", json!({\"a\": a, \"b\": b}))\n\n .await;\n\n\n\n let answer = response.unwrap().to_string();\n\n\n\n println!(\"The answer to the question {} + {} is {}\", a, b, answer);\n\n });\n\n Ok(())\n\n}\n", "file_path": "examples/simple_send_request.rs", "rank": 8, "score": 117560.35148276106 }, { "content": "// callback for math action\n\nfn math_add(ctx: ActionContext) -> Result<(), Box<dyn Error>> {\n\n // get message decode using serde\n\n let msg: ActionMessage = serde_json::from_value(ctx.params.clone())?;\n\n let answer = msg.a + msg.b;\n\n\n\n // serialize reply using serde and send reply\n\n let _ = ctx.reply(answer.into());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 9, "score": 117064.39426537376 }, { "content": "// callback for first event, will be called whenever \"printHi\" event is received\n\nfn print_hi(_ctx: EventContext) -> Result<(), Box<dyn Error>> {\n\n println!(\"Hello from Rust\");\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 10, "score": 116746.79561413452 }, { "content": "type RequestId = String;\n\n\n\n#[async_trait]\n\nimpl Actor for Response {\n\n async fn started(&mut self, pid: Addr<Self>) -> ActorResult<()> {\n\n let pid_clone = pid.clone();\n\n send!(pid_clone.listen(pid));\n\n Produces::ok(())\n\n }\n\n\n\n async fn error(&mut self, error: ActorError) -> bool {\n\n error!(\"Response Actor Error: {:?}\", error);\n\n\n\n // do not stop on actor error\n\n false\n\n }\n\n}\n\npub(crate) struct Response {\n\n config: Arc<Config>,\n\n waiters: HashMap<RequestId, Addr<ResponseWaiter>>,\n", "file_path": "src/channels/response.rs", "rank": 11, "score": 116265.76453948172 }, { "content": "fn math_add(ctx: ActionContext) -> Result<(), Box<dyn Error>> {\n\n // get message decode using serde\n\n let msg: ActionMessage = serde_json::from_value(ctx.params.clone())?;\n\n let answer = msg.a + msg.b;\n\n\n\n // serialize reply using serde and send\n\n let _ = ctx.reply(answer.into());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/simple_request_reply.rs", "rank": 12, "score": 111253.1660125413 }, { "content": "#[derive(Deserialize, Serialize)]\n\nstruct PrintNameMessage {\n\n name: String,\n\n}\n", "file_path": "examples/respond_with_event.rs", "rank": 13, "score": 97461.09508977851 }, { "content": "#[derive(Deserialize)]\n\nstruct PrintNameMessage {\n\n name: String,\n\n}\n", "file_path": "examples/simple_event.rs", "rank": 14, "score": 97457.47407146184 }, { "content": "fn random_string_iter(take: usize) -> impl Iterator<Item = char> {\n\n thread_rng()\n\n .sample_iter(&Alphanumeric)\n\n .take(take)\n\n .map(char::from)\n\n}\n\n\n\npub(crate) fn gen_node_id() -> String {\n\n let random_string_length = 6;\n\n\n\n let pid = std::process::id().to_string();\n\n\n\n let hostname = hostname();\n\n\n\n let mut node_id = String::with_capacity(hostname.len() + pid.len() + random_string_length);\n\n\n\n node_id.push_str(&hostname);\n\n node_id.push('.');\n\n node_id.push_str(&pid);\n\n node_id.push('-');\n", "file_path": "src/util.rs", "rank": 15, "score": 87857.71350129711 }, { "content": "#[derive(Deserialize)]\n\nstruct ActionMessage {\n\n a: i32,\n\n b: i32,\n\n}\n\n```\n\n*/\n\n\n\nmod data_structures;\n\nmod util;\n\n\n\npub mod config;\n\npub mod service;\n\n\n\nmod broker;\n\nmod channels;\n\nmod nats;\n\n\n\nuse act_zero::runtimes::tokio::spawn_actor;\n\nuse act_zero::*;\n\nuse config::Config;\n", "file_path": "src/lib.rs", "rank": 16, "score": 76345.11465334988 }, { "content": "#[derive(Deserialize)]\n\nstruct PrintNameMessage {\n\n name: String,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 17, "score": 73638.37867265262 }, { "content": "struct NodeWatcher {\n\n node_name: NodeName,\n\n pid: WeakAddr<Self>,\n\n broker: WeakAddr<ServiceBroker>,\n\n timer: Timer,\n\n\n\n heartbeat_timeout: u32,\n\n last_heartbeat: Instant,\n\n}\n\n\n\nimpl NodeWatcher {\n\n pub(crate) fn new(\n\n name: NodeName,\n\n heartbeat_timeout: u32,\n\n broker: WeakAddr<ServiceBroker>,\n\n ) -> Self {\n\n Self {\n\n node_name: name,\n\n pid: WeakAddr::detached(),\n\n broker,\n", "file_path": "src/broker/registry.rs", "rank": 18, "score": 72937.2612627145 }, { "content": "#[derive(Deserialize)]\n\nstruct ActionMessage {\n\n a: i32,\n\n b: i32,\n\n}\n", "file_path": "examples/simple_request_reply.rs", "rank": 19, "score": 71268.40505541646 }, { "content": "fn main() {\n\n let mut opts = built::Options::default();\n\n opts.set_ci(false);\n\n opts.set_env(false);\n\n opts.set_dependencies(false);\n\n opts.set_features(false);\n\n opts.set_cfg(false);\n\n\n\n let src = std::env::var(\"CARGO_MANIFEST_DIR\").unwrap();\n\n let dst = std::path::Path::new(&std::env::var(\"OUT_DIR\").unwrap()).join(\"built.rs\");\n\n\n\n built::write_built_file_with_opts(&opts, src.as_ref(), &dst)\n\n .expect(\"Failed to acquire build-time information\");\n\n}\n", "file_path": "build.rs", "rank": 20, "score": 46944.34525472778 }, { "content": "struct ResponseWaiter {\n\n parent: WeakAddr<Response>,\n\n pid: WeakAddr<Self>,\n\n request_id: RequestId,\n\n\n\n timeout: i32,\n\n node_name: String,\n\n tx: Option<Sender<Value>>,\n\n\n\n timer: Timer,\n\n}\n\n\n\nimpl ResponseWaiter {\n\n fn new(timeout: i32, request_id: RequestId, node_name: String, tx: Sender<Value>) -> Self {\n\n Self {\n\n parent: WeakAddr::detached(),\n\n pid: WeakAddr::detached(),\n\n\n\n request_id,\n\n timeout,\n", "file_path": "src/channels/response.rs", "rank": 21, "score": 45848.09787574465 }, { "content": "fn mol(config: &Config) -> Cow<str> {\n\n if config.namespace.is_empty() {\n\n Cow::Borrowed(\"MOL\")\n\n } else {\n\n Cow::Owned(format!(\"MOL-{}\", &config.namespace))\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 22, "score": 36183.86553919628 }, { "content": " events: Events::new(),\n\n actions: Actions::new(),\n\n\n\n pid: Addr::detached(),\n\n channel_supervisor: Addr::detached(),\n\n config: Arc::new(config),\n\n }\n\n }\n\n\n\n // exposed publicly via crate::ServiceBroker\n\n pub(crate) async fn emit(&mut self, event_name: String, params: Value) -> ActorResult<()> {\n\n let node_name = self\n\n .registry\n\n .get_node_name_for_event(&event_name)\n\n .ok_or_else(|| Error::NodeNotFound(event_name.clone()))?;\n\n\n\n let node_event_channel = Channel::Event.external_channel(&self.config, node_name);\n\n\n\n let message = outgoing::EventMessage::new_for_emit(&self.config, &event_name, params);\n\n\n", "file_path": "src/broker.rs", "rank": 38, "score": 31417.138462116665 }, { "content": " Produces::ok(())\n\n }\n\n\n\n async fn error(&mut self, error: ActorError) -> bool {\n\n log::error!(\"ServiceBroker Actor Error: {:?}\", error);\n\n // do not stop on actor error\n\n false\n\n }\n\n}\n\nimpl ServiceBroker {\n\n pub(crate) fn new(config: config::Config) -> Self {\n\n Self {\n\n namespace: config.namespace.clone(),\n\n node_id: config.node_id.clone(),\n\n instance_id: config.instance_id.clone(),\n\n serializer: config.serializer.clone(),\n\n\n\n services: vec![],\n\n\n\n registry: Registry::new(),\n", "file_path": "src/broker.rs", "rank": 39, "score": 31411.43097767062 }, { "content": "#[allow(dead_code)]\n\npub(crate) struct ServiceBroker {\n\n pub(crate) namespace: String,\n\n pub(crate) node_id: String,\n\n pub(crate) instance_id: String,\n\n pub(crate) serializer: Serializer,\n\n pub(crate) services: Vec<Service>,\n\n\n\n pub(crate) events: Events,\n\n pub(crate) actions: Actions,\n\n\n\n pub(crate) registry: Registry,\n\n\n\n pid: Addr<Self>,\n\n channel_supervisor: Addr<ChannelSupervisor>,\n\n config: Arc<config::Config>,\n\n}\n\n\n\npub(crate) struct Events(HashMap<String, Event>);\n\npub(crate) struct Actions(HashMap<String, Action>);\n", "file_path": "src/broker.rs", "rank": 40, "score": 31410.083614396048 }, { "content": " .publish_to_channel(node_event_channel, serde_json::to_vec(&message)?));\n\n }\n\n\n\n Produces::ok(())\n\n }\n\n\n\n pub(crate) async fn call(\n\n &mut self,\n\n action: String,\n\n params: Value,\n\n tx: Sender<Value>,\n\n ) -> ActorResult<()> {\n\n let node_name = self\n\n .registry\n\n .get_node_name_for_action(&action)\n\n .ok_or_else(|| Error::NodeNotFound(action.clone()))?;\n\n\n\n let node_request_channel = Channel::Request.external_channel(&self.config, &node_name);\n\n let message = outgoing::RequestMessage::new(&self.config, &action, params);\n\n let serialized_message = serde_json::to_vec(&message)?;\n", "file_path": "src/broker.rs", "rank": 41, "score": 31410.04629400354 }, { "content": "use crate::{\n\n channels::{self, ChannelSupervisor},\n\n config::{self, Channel, DeserializeError, Serializer},\n\n service::{Context, Event, Service},\n\n};\n\n\n\nuse thiserror::Error;\n\n\n\nuse self::registry::Registry;\n\n\n\n#[derive(Error, Debug)]\n\npub(crate) enum Error {\n\n #[error(transparent)]\n\n Channel(#[from] channels::Error),\n\n\n\n #[error(\"Unable to deserialize to EventContext: {0}\")]\n\n EventDeserializeFail(#[from] config::DeserializeError),\n\n\n\n #[error(\"Unable to find event '{0}' in registry\")]\n\n EventNotFound(String),\n", "file_path": "src/broker.rs", "rank": 42, "score": 31408.86987861424 }, { "content": " send!(self\n\n .channel_supervisor\n\n .publish_to_channel(node_event_channel, serde_json::to_vec(&message)?));\n\n\n\n Produces::ok(())\n\n }\n\n\n\n pub(crate) async fn broadcast(&self, event_name: String, params: Value) -> ActorResult<()> {\n\n let node_names = self\n\n .registry\n\n .get_all_nodes_for_event(&event_name)\n\n .ok_or_else(|| Error::NodeNotFound(event_name.clone()))?;\n\n\n\n let message = outgoing::EventMessage::new_for_broadcast(&self.config, &event_name, params);\n\n\n\n for node_name in node_names {\n\n let node_event_channel = Channel::Event.external_channel(&self.config, node_name);\n\n\n\n send!(self\n\n .channel_supervisor\n", "file_path": "src/broker.rs", "rank": 43, "score": 31408.502584322163 }, { "content": " pub(crate) async fn add_services(&mut self, services: Vec<Service>) {\n\n for service in services {\n\n self.add_service(service).await;\n\n }\n\n }\n\n\n\n pub(crate) async fn publish_info_to_channel(&self, channel: String) -> ActorResult<()> {\n\n let info = outgoing::InfoMessage::new(&self.config, &self.services);\n\n send!(self\n\n .channel_supervisor\n\n .publish_to_channel(channel, self.serializer.serialize(info)?));\n\n\n\n Produces::ok(())\n\n }\n\n\n\n pub(crate) async fn handle_incoming_event(\n\n &self,\n\n event_message: Result<EventMessage, DeserializeError>,\n\n ) -> ActorResult<()> {\n\n let event_message = event_message?;\n", "file_path": "src/broker.rs", "rank": 44, "score": 31407.04389348706 }, { "content": "\n\n let event = self\n\n .events\n\n .get(&event_message.event)\n\n .ok_or_else(|| Error::EventNotFound(event_message.event.clone()))?;\n\n\n\n let callback = event\n\n .callback\n\n .ok_or_else(|| Error::EventCallbackNotFound(event_message.event.clone()))?;\n\n\n\n let event_context = Context::<Event>::new(event_message, self.pid.clone().into());\n\n\n\n callback(event_context).map_err(|err| Error::EventCallbackFailed(err.to_string()))?;\n\n\n\n Produces::ok(())\n\n }\n\n\n\n pub(crate) async fn handle_incoming_request(\n\n &self,\n\n request_message: Result<RequestMessage, DeserializeError>,\n", "file_path": "src/broker.rs", "rank": 45, "score": 31405.609665322063 }, { "content": " ) -> ActorResult<()> {\n\n let request_message = request_message?;\n\n\n\n let request = self\n\n .actions\n\n .get(&request_message.action)\n\n .ok_or_else(|| Error::ActionNotFound(request_message.action.clone()))?;\n\n\n\n let callback = request\n\n .callback\n\n .ok_or_else(|| Error::ActionCallbackNotFound(request_message.action.clone()))?;\n\n\n\n let request_context = Context::<Action>::new(request_message, self.pid.clone().into());\n\n\n\n callback(request_context).map_err(|err| Error::ActionCallbackFailed(err.to_string()))?;\n\n\n\n Produces::ok(())\n\n }\n\n\n\n async fn broadcast_info(&self) -> ActorResult<()> {\n\n self.publish_info_to_channel(Channel::Info.channel_to_string(&self.config))\n\n .await\n\n }\n\n}\n", "file_path": "src/broker.rs", "rank": 46, "score": 31402.426723621113 }, { "content": " pub(crate) async fn missed_heartbeat(&mut self, node_name: String) {\n\n warn!(\n\n \"Node {} expectedly disconnected (missed heartbeat)\",\n\n &node_name\n\n );\n\n self.registry.remove_node(node_name);\n\n }\n\n\n\n pub(crate) async fn handle_heartbeat_message(&mut self, heartbeat: HeartbeatMessage) {\n\n if self.node_id != heartbeat.sender {\n\n self.registry.update_node(heartbeat);\n\n }\n\n }\n\n\n\n pub(crate) async fn add_service(&mut self, service: Service) {\n\n self.services.push(service);\n\n self.events = (&self.services).into();\n\n self.actions = (&self.services).into();\n\n }\n\n\n", "file_path": "src/broker.rs", "rank": 47, "score": 31401.040444308957 }, { "content": "\n\n call!(self\n\n .channel_supervisor\n\n .start_response_waiter(node_name, message.request_id, tx))\n\n .await?;\n\n\n\n send!(self\n\n .channel_supervisor\n\n .publish_to_channel(node_request_channel, serialized_message));\n\n\n\n Produces::ok(())\n\n }\n\n\n\n pub(crate) async fn reply(&self, node: String, id: String, reply: Value) -> ActorResult<()> {\n\n let message = outgoing::ResponseMessage::new(&self.config, &id, reply);\n\n\n\n let reply_channel = Channel::Response.external_channel(&self.config, node);\n\n\n\n send!(self\n\n .channel_supervisor\n", "file_path": "src/broker.rs", "rank": 48, "score": 31399.845342315642 }, { "content": "}\n\n\n\nimpl Actions {\n\n fn new() -> Self {\n\n Actions(HashMap::new())\n\n }\n\n\n\n fn get(&self, key: &str) -> Option<&Action> {\n\n self.0.get(key)\n\n }\n\n}\n\n\n\nimpl From<&Vec<Service>> for Actions {\n\n fn from(services: &Vec<Service>) -> Self {\n\n Actions(\n\n services\n\n .iter()\n\n .flat_map(|service| service.actions.clone())\n\n .collect(),\n\n )\n", "file_path": "src/broker.rs", "rank": 49, "score": 31399.47006554253 }, { "content": "\n\nimpl Events {\n\n fn new() -> Self {\n\n Events(HashMap::new())\n\n }\n\n\n\n fn get(&self, key: &str) -> Option<&Event> {\n\n self.0.get(key)\n\n }\n\n}\n\n\n\nimpl From<&Vec<Service>> for Events {\n\n fn from(services: &Vec<Service>) -> Self {\n\n Events(\n\n services\n\n .iter()\n\n .flat_map(|service| service.events.clone())\n\n .collect(),\n\n )\n\n }\n", "file_path": "src/broker.rs", "rank": 50, "score": 31399.3106734067 }, { "content": "\n\n #[error(\"Unable to find callback function for event '{0}'\")]\n\n EventCallbackNotFound(String),\n\n\n\n #[error(\"Call back function failed to complete: {0}\")]\n\n EventCallbackFailed(String),\n\n\n\n #[error(\"Unable to find action '{0}' in registry\")]\n\n ActionNotFound(String),\n\n\n\n #[error(\"Unable to find callback function for action '{0}'\")]\n\n ActionCallbackNotFound(String),\n\n\n\n #[error(\"Call back function failed to complete: {0}\")]\n\n ActionCallbackFailed(String),\n\n\n\n #[error(\"Node not found for ('{0}') event or action\")]\n\n NodeNotFound(String),\n\n}\n\n\n", "file_path": "src/broker.rs", "rank": 51, "score": 31398.014842394146 }, { "content": " }\n\n}\n\n\n\n#[async_trait]\n\nimpl Actor for ServiceBroker {\n\n async fn started(&mut self, pid: Addr<Self>) -> ActorResult<()> {\n\n self.pid = pid.clone();\n\n\n\n let channel_supervisor = channels::start_supervisor(pid, Arc::clone(&self.config))\n\n .await\n\n .map_err(Error::Channel)?;\n\n\n\n send!(self.pid.broadcast_info());\n\n send!(channel_supervisor.broadcast_discover());\n\n\n\n self.channel_supervisor = channel_supervisor.clone();\n\n\n\n self.pid\n\n .send_fut(async move { channels::listen_for_disconnect(channel_supervisor).await });\n\n\n", "file_path": "src/broker.rs", "rank": 52, "score": 31397.88331238743 }, { "content": "mod registry;\n\n\n\nuse std::{collections::HashMap, sync::Arc};\n\n\n\nuse act_zero::*;\n\nuse async_trait::async_trait;\n\nuse log::{error, warn};\n\nuse serde_json::Value;\n\nuse tokio::sync::oneshot::Sender;\n\n\n\nuse crate::{\n\n channels::messages::{\n\n incoming::{\n\n DisconnectMessage, EventMessage, HeartbeatMessage, InfoMessage, RequestMessage,\n\n },\n\n outgoing::{self},\n\n },\n\n service::Action,\n\n};\n\n\n", "file_path": "src/broker.rs", "rank": 53, "score": 31396.325871868812 }, { "content": " .publish_to_channel(reply_channel, serde_json::to_vec(&message)?));\n\n\n\n Produces::ok(())\n\n }\n\n\n\n // private\n\n\n\n pub(crate) async fn handle_info_message(&mut self, info: InfoMessage) {\n\n if self.node_id != info.sender {\n\n self.registry\n\n .add_or_update_node(self.pid.clone(), self.config.heartbeat_timeout, info);\n\n }\n\n }\n\n\n\n pub(crate) async fn handle_disconnect_message(&mut self, disconnect: DisconnectMessage) {\n\n if self.node_id != disconnect.sender {\n\n self.registry.remove_node(disconnect.sender);\n\n }\n\n }\n\n\n", "file_path": "src/broker.rs", "rank": 54, "score": 31388.918868627712 }, { "content": "use std::{error::Error, time::Duration};\n\n\n\nuse moleculer::{\n\n config::{ConfigBuilder, Transporter},\n\n service::{Context, Event, EventBuilder, Service},\n\n ServiceBroker,\n\n};\n\n\n\n#[tokio::main]\n\nasync fn main() -> eyre::Result<()> {\n\n env_logger::init();\n\n color_eyre::install()?;\n\n\n\n let config = ConfigBuilder::default()\n\n .transporter(Transporter::nats(\"nats://localhost:4222\"))\n\n .build();\n\n\n\n let print_async = EventBuilder::new(\"printAsync\")\n\n .add_callback(print_async)\n\n .build();\n", "file_path": "examples/async_in_callback.rs", "rank": 55, "score": 30652.67758267495 }, { "content": "\n\n let print_normal = EventBuilder::new(\"printNormal\")\n\n .add_callback(print_normal)\n\n .build();\n\n\n\n let greeter_service = Service::new(\"asyncGreeter\")\n\n .add_event(print_normal)\n\n .add_event(print_async);\n\n\n\n let service_broker = ServiceBroker::new(config).add_service(greeter_service);\n\n service_broker.start().await;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/async_in_callback.rs", "rank": 56, "score": 30648.18116523656 }, { "content": "use std::error::Error;\n\n\n\nuse moleculer::{\n\n config::{ConfigBuilder, Transporter},\n\n service::{Context, Event, EventBuilder, Service},\n\n ServiceBroker,\n\n};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[tokio::main]\n\nasync fn main() -> eyre::Result<()> {\n\n env_logger::init();\n\n color_eyre::install()?;\n\n\n\n let config = ConfigBuilder::default()\n\n .transporter(Transporter::nats(\"nats://localhost:4222\"))\n\n .build();\n\n\n\n let emit_hi = EventBuilder::new(\"emitHi\").add_callback(emit_hi).build();\n\n\n", "file_path": "examples/respond_with_event.rs", "rank": 57, "score": 30216.295597886194 }, { "content": "use crate::{\n\n broker::ServiceBroker,\n\n channels::messages::incoming::EventMessage,\n\n config::{self, Channel, Config},\n\n nats::Conn,\n\n};\n\n\n\nuse act_zero::*;\n\nuse async_nats::Message;\n\nuse async_trait::async_trait;\n\nuse config::DeserializeError;\n\nuse log::{debug, error, info};\n\nuse std::sync::Arc;\n\nuse thiserror::Error;\n\n\n\n#[derive(Error, Debug)]\n\npub(crate) enum Error {}\n\n\n\n#[async_trait]\n\nimpl Actor for Event {\n", "file_path": "src/channels/event.rs", "rank": 58, "score": 30214.80381351339 }, { "content": " let broadcast_name = EventBuilder::new(\"broadcastName\")\n\n .add_callback(broadcast_name)\n\n .build();\n\n\n\n let greeter_service = Service::new(\"rustGreeter\")\n\n .add_event(emit_hi)\n\n .add_event(broadcast_name);\n\n\n\n let service_broker = ServiceBroker::new(config).add_service(greeter_service);\n\n service_broker.start().await;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/respond_with_event.rs", "rank": 59, "score": 30214.214025190973 }, { "content": " async fn started(&mut self, pid: Addr<Self>) -> ActorResult<()> {\n\n let pid_clone = pid.clone();\n\n send!(pid_clone.listen(pid));\n\n Produces::ok(())\n\n }\n\n\n\n async fn error(&mut self, error: ActorError) -> bool {\n\n error!(\"Event Actor Error: {:?}\", error);\n\n\n\n // do not stop on actor error\n\n false\n\n }\n\n}\n\npub(crate) struct Event {\n\n config: Arc<Config>,\n\n broker: WeakAddr<ServiceBroker>,\n\n conn: Conn,\n\n}\n\n\n\nimpl Event {\n", "file_path": "src/channels/event.rs", "rank": 60, "score": 30213.33224704424 }, { "content": " pub(crate) async fn new(\n\n broker: WeakAddr<ServiceBroker>,\n\n config: &Arc<Config>,\n\n conn: &Conn,\n\n ) -> Self {\n\n Self {\n\n broker,\n\n conn: conn.clone(),\n\n config: Arc::clone(config),\n\n }\n\n }\n\n\n\n pub(crate) async fn listen(&mut self, pid: Addr<Self>) {\n\n info!(\"Listening for EVENT messages\");\n\n let channel = self\n\n .conn\n\n .subscribe(&Channel::Event.channel_to_string(&self.config))\n\n .await\n\n .unwrap();\n\n\n", "file_path": "src/channels/event.rs", "rank": 61, "score": 30211.99966639979 }, { "content": "use std::error::Error;\n\n\n\nuse moleculer::{\n\n config::{ConfigBuilder, Transporter},\n\n service::{Context, Event, EventBuilder, Service},\n\n ServiceBroker,\n\n};\n\nuse serde::Deserialize;\n\n\n\n#[tokio::main]\n\nasync fn main() -> eyre::Result<()> {\n\n env_logger::init();\n\n color_eyre::install()?;\n\n\n\n let config = ConfigBuilder::default()\n\n .transporter(Transporter::nats(\"nats://localhost:4222\"))\n\n .build();\n\n\n\n let print_hi = EventBuilder::new(\"printHi\").add_callback(print_hi).build();\n\n\n", "file_path": "examples/simple_event.rs", "rank": 62, "score": 30210.434148388922 }, { "content": " pid.clone().send_fut(async move {\n\n while let Some(msg) = channel.next().await {\n\n match call!(pid.handle_message(msg)).await {\n\n Ok(_) => debug!(\"Successfully handled EVENT message\"),\n\n Err(e) => error!(\"Unable to handle EVENT message: {}\", e),\n\n }\n\n }\n\n })\n\n }\n\n\n\n async fn handle_message(&self, msg: Message) -> ActorResult<()> {\n\n let event_context: Result<EventMessage, DeserializeError> =\n\n self.config.serializer.deserialize(&msg.data);\n\n\n\n send!(self.broker.handle_incoming_event(event_context));\n\n\n\n Produces::ok(())\n\n }\n\n}\n", "file_path": "src/channels/event.rs", "rank": 63, "score": 30207.535930739698 }, { "content": " let print_name = EventBuilder::new(\"printName\")\n\n .add_callback(print_name)\n\n .build();\n\n\n\n let greeter_service = Service::new(\"rustGreeter\")\n\n .add_event(print_hi)\n\n .add_event(print_name);\n\n\n\n let service_broker = ServiceBroker::new(config).add_service(greeter_service);\n\n service_broker.start().await;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/simple_event.rs", "rank": 64, "score": 30207.37185694522 }, { "content": " pub(crate) events: HashSet<EventName>,\n\n pub(crate) actions: HashSet<ActionName>,\n\n}\n\n\n\nimpl Node {\n\n fn new(broker: Addr<ServiceBroker>, heartbeat_timeout: u32, info: &InfoMessage) -> Self {\n\n let node_watcher =\n\n NodeWatcher::new(info.sender.clone(), heartbeat_timeout, broker.downgrade());\n\n\n\n Self {\n\n node_watcher_pid: spawn_actor(node_watcher),\n\n name: info.sender.clone(),\n\n cpu: None,\n\n ip_list: info.ip_list.clone(),\n\n hostname: info.hostname.clone(),\n\n client: info.client.clone(),\n\n instance_id: info.instance_id.clone(),\n\n events: hashset![],\n\n actions: hashset![],\n\n }\n", "file_path": "src/broker/registry.rs", "rank": 65, "score": 29841.971502821034 }, { "content": "pub(crate) type ActionName = String;\n\npub(crate) type EventName = String;\n\npub(crate) type NodeName = String;\n\n\n\npub(crate) struct Registry {\n\n actions: HashMap<EventName, QueueSet<NodeName>>,\n\n events: HashMap<EventName, QueueSet<NodeName>>,\n\n nodes: HashMap<NodeName, Node>,\n\n}\n\n\n\nimpl Registry {\n\n pub(crate) fn new() -> Self {\n\n Self {\n\n actions: HashMap::new(),\n\n events: HashMap::new(),\n\n nodes: HashMap::new(),\n\n }\n\n }\n\n\n\n pub(crate) fn get_all_nodes_for_event(&self, event_name: &str) -> Option<Vec<NodeName>> {\n", "file_path": "src/broker/registry.rs", "rank": 66, "score": 29838.421706685527 }, { "content": " let event_nodes = self.events.get(event_name)?;\n\n\n\n Some(event_nodes.iter().cloned().collect())\n\n }\n\n\n\n pub(crate) fn get_node_name_for_event(&mut self, event_name: &str) -> Option<NodeName> {\n\n let event_nodes = self.events.get_mut(event_name)?;\n\n event_nodes.get_round_robin()\n\n }\n\n\n\n pub(crate) fn get_node_name_for_action(&mut self, action_name: &str) -> Option<NodeName> {\n\n let action_nodes = self.actions.get_mut(action_name)?;\n\n action_nodes.get_round_robin()\n\n }\n\n\n\n pub(crate) fn add_or_update_node(\n\n &mut self,\n\n broker: Addr<ServiceBroker>,\n\n heartbeat_timeout: u32,\n\n info: InfoMessage,\n", "file_path": "src/broker/registry.rs", "rank": 67, "score": 29829.942848260234 }, { "content": " match self.events.get_mut(event_name) {\n\n // event present from another node, add node_name to event's node_names set\n\n Some(node_names) => {\n\n node_names.insert(node.name.clone());\n\n }\n\n\n\n // first instance of event, create event name entry with node_name\n\n None => {\n\n self.events\n\n .insert(event_name.clone(), qset![node.name.clone()]);\n\n }\n\n }\n\n\n\n // insert event into node's events set\n\n node.events.insert(event_name.clone());\n\n }\n\n\n\n // get action_names from info message\n\n let action_names = info\n\n .services\n", "file_path": "src/broker/registry.rs", "rank": 68, "score": 29825.79801945481 }, { "content": " pub(crate) fn update_node(&mut self, heartbeat: HeartbeatMessage) -> Option<()> {\n\n let node = self.nodes.get_mut(&heartbeat.sender)?;\n\n node.cpu = Some(heartbeat.cpu);\n\n\n\n send!(node.node_watcher_pid.received_heartbeat());\n\n\n\n Some(())\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub(crate) struct Node {\n\n node_watcher_pid: Addr<NodeWatcher>,\n\n\n\n pub(crate) name: NodeName,\n\n pub(crate) cpu: Option<f32>,\n\n pub(crate) ip_list: Vec<String>,\n\n pub(crate) hostname: String,\n\n pub(crate) client: Client,\n\n pub(crate) instance_id: String,\n", "file_path": "src/broker/registry.rs", "rank": 69, "score": 29825.079114966342 }, { "content": " .iter()\n\n .flat_map(|service| service.actions.keys());\n\n\n\n for action_name in action_names {\n\n match self.actions.get_mut(action_name) {\n\n // action present from another node, add node_name to action's node_names set\n\n Some(node_names) => {\n\n node_names.insert(node.name.clone());\n\n }\n\n\n\n // first instance of action, create action name entry with node_name\n\n None => {\n\n self.actions\n\n .insert(action_name.clone(), qset![node.name.clone()]);\n\n }\n\n }\n\n\n\n // insert action into node's actions set\n\n node.actions.insert(action_name.clone());\n\n }\n", "file_path": "src/broker/registry.rs", "rank": 70, "score": 29823.738028854117 }, { "content": " ) {\n\n // get or insert node from/into registry\n\n let node: &mut Node = match self.nodes.get_mut(&info.sender) {\n\n Some(node) => node,\n\n None => {\n\n let node = Node::new(broker, heartbeat_timeout, &info);\n\n\n\n self.nodes.insert(info.sender.clone(), node);\n\n self.nodes\n\n .get_mut(&info.sender)\n\n .expect(\"present because just added the node\")\n\n }\n\n };\n\n\n\n // get event_names from info message\n\n let event_names = info\n\n .services\n\n .iter()\n\n .flat_map(|service| service.events.keys());\n\n for event_name in event_names {\n", "file_path": "src/broker/registry.rs", "rank": 71, "score": 29822.619454814907 }, { "content": "use crate::qset;\n\nuse maplit::hashset;\n\nuse std::{\n\n collections::{HashMap, HashSet},\n\n time::{Duration, Instant},\n\n};\n\n\n\nuse crate::{\n\n channels::messages::incoming::{Client, HeartbeatMessage, InfoMessage},\n\n data_structures::QueueSet,\n\n};\n\n\n\nuse act_zero::runtimes::tokio::spawn_actor;\n\nuse act_zero::runtimes::tokio::Timer;\n\nuse act_zero::timer::Tick;\n\nuse act_zero::*;\n\nuse async_trait::async_trait;\n\n\n\nuse super::ServiceBroker;\n\n\n", "file_path": "src/broker/registry.rs", "rank": 72, "score": 29819.680426515723 }, { "content": " }\n\n}\n\n\n\n#[async_trait]\n\nimpl Actor for NodeWatcher {\n\n async fn started(&mut self, pid: Addr<Self>) -> ActorResult<()> {\n\n self.pid = pid.downgrade();\n\n\n\n // // Start the timer\n\n self.timer.set_timeout_for_weak(\n\n self.pid.clone(),\n\n Duration::from_secs(self.heartbeat_timeout as u64),\n\n );\n\n\n\n Produces::ok(())\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl Tick for NodeWatcher {\n", "file_path": "src/broker/registry.rs", "rank": 73, "score": 29818.750085422947 }, { "content": " async fn tick(&mut self) -> ActorResult<()> {\n\n if self.timer.tick() {\n\n let now = Instant::now();\n\n\n\n if now.duration_since(self.last_heartbeat).as_secs() >= self.heartbeat_timeout as u64 {\n\n // haven't received a heartbeat recently\n\n send!(self.broker.missed_heartbeat(self.node_name.clone()))\n\n } else {\n\n // reschedule timer\n\n self.timer.set_timeout_for_weak(\n\n self.pid.clone(),\n\n Duration::from_secs(self.heartbeat_timeout as u64),\n\n );\n\n }\n\n }\n\n Produces::ok(())\n\n }\n\n}\n\n\n", "file_path": "src/broker/registry.rs", "rank": 74, "score": 29818.107066106786 }, { "content": " }\n\n\n\n pub(crate) fn remove_node(&mut self, node_name: NodeName) -> Option<()> {\n\n let node = self.nodes.remove(&node_name)?;\n\n\n\n for event_name in node.events {\n\n let node_names_left_for_event = self\n\n .events\n\n .get_mut(&event_name)\n\n // go through the node's events and remove node from each event\n\n .map(|node_names| {\n\n node_names.remove(&node_name);\n\n node_names\n\n });\n\n\n\n // if the event doesn't have any associated nodes remove the event entirely\n\n if let Some(0) = node_names_left_for_event.map(|node_names| node_names.len()) {\n\n self.events.remove(&event_name);\n\n }\n\n }\n", "file_path": "src/broker/registry.rs", "rank": 75, "score": 29817.704781223027 }, { "content": "\n\n for action_name in node.actions {\n\n let node_names_left_for_action = self\n\n .actions\n\n .get_mut(&action_name)\n\n // go through the node's actions and remove node from each action\n\n .map(|node_names| {\n\n node_names.remove(&node_name);\n\n node_names\n\n });\n\n\n\n // if the action doesn't have any associated nodes remove the action entirely\n\n if let Some(0) = node_names_left_for_action.map(|node_names| node_names.len()) {\n\n self.actions.remove(&action_name);\n\n }\n\n }\n\n\n\n Some(())\n\n }\n\n\n", "file_path": "src/broker/registry.rs", "rank": 76, "score": 29814.124981341833 }, { "content": " timer: Timer::default(),\n\n\n\n heartbeat_timeout,\n\n last_heartbeat: Instant::now(),\n\n }\n\n }\n\n\n\n pub(crate) async fn received_heartbeat(&mut self) {\n\n self.last_heartbeat = Instant::now()\n\n }\n\n}\n", "file_path": "src/broker/registry.rs", "rank": 77, "score": 29810.47892100701 }, { "content": " .add_action(math_action);\n\n\n\n // create service broker with service\n\n let service_broker = ServiceBroker::new(config).add_service(greeter_service);\n\n\n\n // start the service broker\n\n service_broker.start().await;\n\n\n\n Ok(())\n\n}\n\n\n\n\n\n// callback for first event, will be called whenever \"printHi\" event is received\n\nfn print_hi(_ctx: Context<Event>) -> Result<(), Box<dyn Error>> {\n\n println!(\"Hello from Rust\");\n\n Ok(())\n\n}\n\n\n\n// callback for second event, will be called whenever \"printName\" event is received\n\nfn print_name(ctx: Context<Event>) -> Result<(), Box<dyn Error>> {\n\n let msg: PrintNameMessage = serde_json::from_value(ctx.params)?;\n\n\n\n println!(\"Hello to: {} from Rust\", msg.name);\n\n\n\n Ok(())\n\n}\n\n\n\n// callback for math action\n\nfn math_add(ctx: Context<Action>) -> Result<(), Box<dyn Error>> {\n\n // get message decode using serde\n\n let msg: ActionMessage = serde_json::from_value(ctx.params.clone())?;\n\n let answer = msg.a + msg.b;\n\n\n\n // serialize reply using serde and send reply\n\n let _ = ctx.reply(answer.into());\n\n\n\n Ok(())\n\n}\n\n\n\n#[derive(Deserialize)]\n\nstruct PrintNameMessage {\n\n name: String,\n\n}\n\n\n\n#[derive(Deserialize)]\n\nstruct ActionMessage {\n\n a: i32,\n\n b: i32,\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 78, "score": 46.9691075694297 }, { "content": " #[serde(default)]\n\n pub(crate) groups: Option<Vec<String>>,\n\n\n\n #[serde(default)]\n\n pub(crate) broadcast: Option<bool>,\n\n }\n\n\n\n impl<'a> EventMessage<'a> {\n\n pub(crate) fn new_for_emit(config: &'a Config, event: &'a str, params: Value) -> Self {\n\n Self {\n\n event,\n\n\n\n ver: \"4\",\n\n id: Uuid::new_v4().to_string(),\n\n sender: &config.node_id,\n\n data: params,\n\n meta: json!({}),\n\n level: 1,\n\n\n\n tracing: None,\n", "file_path": "src/channels/messages.rs", "rank": 79, "score": 41.13999806118899 }, { "content": " parent_id: &None,\n\n request_id: &None,\n\n\n\n caller: &None,\n\n stream: None,\n\n seq: None,\n\n groups: None,\n\n broadcast: Some(false),\n\n }\n\n }\n\n\n\n pub(crate) fn new_for_broadcast(config: &'a Config, event: &'a str, params: Value) -> Self {\n\n Self {\n\n broadcast: Some(true),\n\n ..EventMessage::new_for_emit(config, event, params)\n\n }\n\n }\n\n }\n\n\n\n #[derive(Serialize, Debug)]\n", "file_path": "src/channels/messages.rs", "rank": 80, "score": 38.850997035590225 }, { "content": "\n\n #[serde(default)]\n\n pub(crate) stream: Option<bool>,\n\n\n\n #[serde(default)]\n\n pub(crate) seq: Option<i32>,\n\n }\n\n\n\n impl<'a> RequestMessage<'a> {\n\n pub(crate) fn new(config: &'a Config, action_name: &'a str, params: Value) -> Self {\n\n let id = Uuid::new_v4();\n\n\n\n Self {\n\n ver: \"4\",\n\n sender: &config.node_id,\n\n id: id.to_string(),\n\n\n\n params,\n\n action: action_name,\n\n\n", "file_path": "src/channels/messages.rs", "rank": 81, "score": 37.43201708974479 }, { "content": "\n\n Ok(response_value)\n\n }\n\n\n\n /// Emits a balanced event to one of the nodes.\n\n pub fn emit<S: Into<String>>(&self, event: S, params: Value) {\n\n send!(self.addr.emit(event.into(), params))\n\n }\n\n\n\n /// Emits an event to all the nodes that can handle the event.\n\n pub fn broadcast<S: Into<String>>(&self, event: S, params: Value) {\n\n send!(self.addr.broadcast(event.into(), params))\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\nimpl From<Addr<broker::ServiceBroker>> for ServiceBroker {\n\n fn from(addr: Addr<broker::ServiceBroker>) -> Self {\n\n Self { addr }\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 82, "score": 37.400000253747976 }, { "content": " }\n\n\n\n #[derive(Deserialize, Debug)]\n\n pub(crate) struct RequestMessage {\n\n pub(crate) id: String,\n\n pub(crate) sender: String,\n\n pub(crate) ver: String,\n\n\n\n pub(crate) action: String,\n\n\n\n #[serde(default)]\n\n pub(crate) params: Value,\n\n\n\n #[serde(default)]\n\n pub(crate) meta: Value,\n\n\n\n pub(crate) timeout: f32,\n\n pub(crate) level: i32,\n\n\n\n #[serde(default)]\n", "file_path": "src/channels/messages.rs", "rank": 83, "score": 35.38349924130246 }, { "content": " meta: serde_json::Value::default(),\n\n\n\n timeout: config.request_timeout as f32,\n\n level: 1,\n\n\n\n tracing: None,\n\n parent_id: None,\n\n\n\n request_id: id.to_string(),\n\n caller: None,\n\n\n\n stream: None,\n\n seq: None,\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]\n\npub(crate) struct MoleculerError {\n\n message: String,\n\n code: i8,\n\n #[serde(rename = \"type\")]\n\n type_: String,\n\n data: serde_json::Value,\n\n}\n", "file_path": "src/channels/messages.rs", "rank": 84, "score": 34.352895847391615 }, { "content": " Self {\n\n ver: \"4\",\n\n id: request_id,\n\n data: params,\n\n meta: Value::default(),\n\n sender: &config.node_id,\n\n success: true,\n\n error: None,\n\n }\n\n }\n\n }\n\n\n\n #[derive(Serialize, Debug)]\n\n pub(crate) struct RequestMessage<'a> {\n\n pub(crate) id: String,\n\n pub(crate) sender: &'a str,\n\n pub(crate) ver: &'static str,\n\n\n\n pub(crate) action: &'a str,\n\n\n", "file_path": "src/channels/messages.rs", "rank": 85, "score": 34.20766711388837 }, { "content": "/// The struct used to interact with moleculer.\n\n/// Use [`emit()`][Self::emit()], [`broadcast()`][Self::broadcast()] and [`call()`][Self::call()] functions.\n\n/// ```rust, ignore\n\n/// // emit an event\n\n/// broker.emit(\"printHi\", json!{{}});\n\n///\n\n/// // broadcast an event\n\n/// broker.broadcast(\"printHi\", json!{{}});\n\n///\n\n/// // call an action\n\n/// let result = broker.call(\"math.add\", json!{\"a\": 1, \"b\": c}).await?;\n\n/// ```\n\n#[derive(Clone)]\n\npub struct ServiceBroker {\n\n addr: Addr<broker::ServiceBroker>,\n\n}\n\n\n\n/// An alias to [service::Context\\<service::Event>][service::Context].\n\n/// In all contexts [`emit()`][service::Context::emit()], [`broadcast()`][service::Context::broadcast()]\n\n/// and [`call()`][service::Context::call()] are available.\n", "file_path": "src/lib.rs", "rank": 86, "score": 33.95808535191854 }, { "content": " }\n\n\n\n #[derive(Deserialize, Debug)]\n\n pub(crate) struct EventMessage {\n\n pub(crate) id: String,\n\n pub(crate) sender: String,\n\n pub(crate) ver: String,\n\n\n\n pub(crate) event: String,\n\n\n\n #[serde(default)]\n\n pub(crate) data: Value,\n\n\n\n #[serde(default)]\n\n pub(crate) meta: Value,\n\n pub(crate) level: i32,\n\n\n\n #[serde(default)]\n\n pub(crate) tracing: Option<bool>,\n\n\n", "file_path": "src/channels/messages.rs", "rank": 87, "score": 33.35983926370366 }, { "content": "pub(crate) mod incoming {\n\n use std::collections::HashMap;\n\n\n\n use serde::Deserialize;\n\n use serde_json::Value;\n\n\n\n use crate::service::Service;\n\n\n\n #[derive(Deserialize, Debug, Clone)]\n\n #[serde(rename_all = \"camelCase\")]\n\n pub(crate) struct Client {\n\n #[serde(rename = \"type\")]\n\n type_: String,\n\n version: String,\n\n lang_version: String,\n\n }\n\n\n\n #[derive(Deserialize, Debug)]\n\n pub(crate) struct PingMessage {\n\n pub(crate) ver: String,\n", "file_path": "src/channels/messages.rs", "rank": 88, "score": 32.943289257462006 }, { "content": "\n\n config: HashMap::new(),\n\n metadata: HashMap::new(),\n\n }\n\n }\n\n }\n\n\n\n #[derive(Serialize, Debug)]\n\n pub(crate) struct EventMessage<'a> {\n\n pub(crate) id: String,\n\n pub(crate) sender: &'a str,\n\n pub(crate) ver: &'static str,\n\n\n\n pub(crate) event: &'a str,\n\n\n\n #[serde(default)]\n\n pub(crate) data: Value,\n\n\n\n #[serde(default)]\n\n pub(crate) meta: Value,\n", "file_path": "src/channels/messages.rs", "rank": 89, "score": 32.68008682105178 }, { "content": "# moleculer-rs\n\n\n\n![Build Status](https://github.com/primcloud/moleculer-rs/workflows/Rust/badge.svg)\n\n[![Crates.io](https://img.shields.io/crates/v/moleculer.svg)](https://crates.io/crates/moleculer)\n\n[![Documentation](https://docs.rs/moleculer/badge.svg)](https://docs.rs/moleculer)\n\n[![Rust 1.52+](https://img.shields.io/badge/rust-1.52+-orange.svg)](https://www.rust-lang.org)\n\n\n\nInspired and compatible with [Moleculer JS](https://github.com/moleculerjs/moleculer)\n\n\n\nYou can currently do all the basics of `emit`, `broadcast` and `call`.\n\n\n\nHowever it only works with the `NATS` transporter and `JSON` serializer/deserializer.\n\n\n\n## Getting Started\n\n\n\nAvailable on crates: [crates.io/moleculer](https://crates.io/crates/moleculer)\n\n\n\nDocumentation available at: [docs.rs/moleculer](https://docs.rs/moleculer/)\n\n\n\n```toml\n\nmoleculer = \"0.3.3\"\n\n```\n\n\n\nSimple example showing how to receive an event, and responding to a request, for more check the [examples folder](https://github.com/primcloud/moleculer-rs/tree/master/examples)\n\n\n\n```rust\n\nuse std::error::Error;\n\nuse moleculer::{\n\n config::{ConfigBuilder, Transporter},\n\n service::{Context, Event, EventBuilder, Service},\n\n ServiceBroker,\n\n};\n\nuse serde::Deserialize;\n\n\n\n#[tokio::main]\n\nasync fn main() -> eyre::Result<()> {\n\n env_logger::init();\n\n color_eyre::install()?;\n\n\n\n // build config\n\n let config = ConfigBuilder::default().transporter(Transporter::nats(\"nats://localhost:4222\"))\n\n .build();\n\n\n\n // create the first event\n\n let print_hi = EventBuilder::new(\"printHi\").add_callback(print_hi).build();\n\n\n\n // create the second event\n\n let print_name = EventBuilder::new(\"printName\")\n\n .add_callback(print_name)\n\n .build();\n\n\n\n // create math action\n\n let math_action = ActionBuilder::new(\"mathAdd\").add_callback(math_add).build();\n\n\n\n // create a service with events and actions\n\n let greeter_service = Service::new(\"rustGreeter\")\n\n .add_event(print_hi)\n\n .add_event(print_name)\n", "file_path": "README.md", "rank": 90, "score": 31.968722279733804 }, { "content": " use super::incoming::PingMessage;\n\n use crate::{built_info, config::Config, service::Service};\n\n use serde::Serialize;\n\n use serde_json::{json, Value};\n\n use uuid::Uuid;\n\n\n\n #[derive(Serialize, Debug)]\n\n #[serde(rename_all = \"camelCase\")]\n\n pub(crate) struct Client {\n\n #[serde(rename = \"type\")]\n\n type_: &'static str,\n\n version: &'static str,\n\n lang_version: &'static str,\n\n }\n\n\n\n impl Client {\n\n fn new() -> Self {\n\n Self {\n\n type_: \"rust\",\n\n version: env!(\"CARGO_PKG_VERSION\"),\n", "file_path": "src/channels/messages.rs", "rank": 91, "score": 31.155596591636506 }, { "content": " conn: Conn,\n\n}\n\n\n\nimpl Response {\n\n pub(crate) async fn new(config: &Arc<Config>, conn: &Conn) -> Self {\n\n Self {\n\n conn: conn.clone(),\n\n config: Arc::clone(config),\n\n waiters: HashMap::new(),\n\n }\n\n }\n\n\n\n pub(crate) async fn start_response_waiter(\n\n &mut self,\n\n timeout: i32,\n\n node_name: String,\n\n request_id: RequestId,\n\n tx: Sender<Value>,\n\n ) {\n\n let response_waiter_pid = spawn_actor(ResponseWaiter::new(\n", "file_path": "src/channels/response.rs", "rank": 92, "score": 30.174354622934864 }, { "content": " /// Add all the services to the service broker at once.\n\n /// Takes a vector of services and replaces any services the broker already had.\n\n pub fn add_services(self, services: Vec<Service>) -> Self {\n\n send!(self.addr.add_services(services));\n\n self\n\n }\n\n\n\n /// Starts the service, this will run forever until your application exits.\n\n pub async fn start(self) {\n\n self.addr.termination().await\n\n }\n\n\n\n /// Request/Response style call\n\n /// Call an action directly with params serialized into\n\n /// [serde_json::Value](https://docs.rs/serde_json/1.0.64/serde_json/value/index.html) and `await` on the result\n\n pub async fn call<S: Into<String>>(self, action: S, params: Value) -> Result<Value, Error> {\n\n let (tx, rx) = oneshot::channel();\n\n\n\n send!(self.addr.call(action.into(), params, tx));\n\n let response_value = rx.await?;\n", "file_path": "src/lib.rs", "rank": 93, "score": 29.967153592413442 }, { "content": " #[serde(default)]\n\n pub(crate) params: Value,\n\n\n\n #[serde(default)]\n\n pub(crate) meta: Value,\n\n\n\n pub(crate) timeout: f32,\n\n pub(crate) level: i32,\n\n\n\n #[serde(default)]\n\n pub(crate) tracing: Option<bool>,\n\n\n\n #[serde(rename = \"parentID\", default)]\n\n pub(crate) parent_id: Option<&'a str>,\n\n\n\n #[serde(rename = \"requestID\", default)]\n\n pub(crate) request_id: String,\n\n\n\n #[serde(rename = \"caller\", default)]\n\n pub(crate) caller: Option<&'a str>,\n", "file_path": "src/channels/messages.rs", "rank": 94, "score": 29.56906237548698 }, { "content": " pub(crate) struct ResponseMessage<'a> {\n\n pub(crate) id: &'a str,\n\n pub(crate) sender: &'a str,\n\n pub(crate) ver: &'static str,\n\n\n\n #[serde(default)]\n\n pub(crate) data: Value,\n\n\n\n #[serde(default)]\n\n pub(crate) meta: Value,\n\n\n\n #[serde(default)]\n\n pub(crate) error: Option<crate::channels::messages::MoleculerError>,\n\n\n\n #[serde(default)]\n\n pub(crate) success: bool,\n\n }\n\n\n\n impl<'a> ResponseMessage<'a> {\n\n pub(crate) fn new(config: &'a Config, request_id: &'a str, params: Value) -> Self {\n", "file_path": "src/channels/messages.rs", "rank": 95, "score": 28.45416158373079 }, { "content": "use crate::{\n\n broker::ServiceBroker,\n\n channels::messages::incoming::RequestMessage,\n\n config::{self, Channel, Config},\n\n nats::Conn,\n\n};\n\n\n\nuse act_zero::*;\n\nuse async_nats::Message;\n\nuse async_trait::async_trait;\n\nuse config::DeserializeError;\n\nuse log::{debug, error, info};\n\nuse std::sync::Arc;\n\n\n\n#[async_trait]\n\nimpl Actor for Request {\n\n async fn started(&mut self, pid: Addr<Self>) -> ActorResult<()> {\n\n let pid_clone = pid.clone();\n\n send!(pid_clone.listen(pid));\n\n Produces::ok(())\n", "file_path": "src/channels/request.rs", "rank": 96, "score": 28.082166032644167 }, { "content": "pub type EventContext = service::Context<service::Event>;\n\n\n\n/// An alias to [service::Context\\<service::Action>][service::Context].\n\n/// Send a response to a request using [`reply()`][service::Context::reply()].\n\npub type ActionContext = service::Context<service::Action>;\n\n\n\nimpl ServiceBroker {\n\n /// Create new service broker, takes [Config] struct.\n\n pub fn new(config: Config) -> ServiceBroker {\n\n ServiceBroker {\n\n addr: spawn_actor(broker::ServiceBroker::new(config)),\n\n }\n\n }\n\n\n\n /// Add a service to the service broker.\n\n pub fn add_service(self, service: Service) -> Self {\n\n send!(self.addr.add_service(service));\n\n self\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 97, "score": 27.58868769656584 }, { "content": "/*!\n\nInspired and compatible with [Moleculer JS](https://github.com/moleculerjs/moleculer)\n\n\n\nYou can currently do all the basics of `emit`, `broadcast` and `call`.\n\n\n\nHowever it only works with the `NATS` transporter and `JSON` serializer/deserializer.\n\n\n\n## Getting Started\n\n\n\nSimple example showing how to receive an event, and responding to a request, for more check the\n\n[examples folder](https://github.com/primcloud/moleculer-rs/tree/master/examples).\n\n\n\n```rust\n\nuse std::error::Error;\n\nuse serde::Deserialize;\n\n\n\nuse moleculer::{\n\n config::{ConfigBuilder, Transporter},\n\n service::{EventBuilder, Service, ActionBuilder},\n\n EventContext, ActionContext, ServiceBroker,\n", "file_path": "src/lib.rs", "rank": 98, "score": 27.245432087643998 }, { "content": "\n\n#[derive(Serialize, Debug, Builder)]\n\n#[serde(rename_all = \"camelCase\")]\n\n#[builder(pattern = \"owned\")]\n\n#[builder(build_fn(name = \"build_private\", private))]\n\n#[builder(setter(into, strip_option))]\n\npub struct Config {\n\n #[builder(default = \"\\\"\\\".to_string()\")]\n\n pub(crate) namespace: String,\n\n #[serde(rename = \"nodeID\")]\n\n #[builder(default = \"util::gen_node_id()\")]\n\n pub(crate) node_id: String,\n\n #[builder(default = \"Logger::Console\")]\n\n pub(crate) logger: Logger,\n\n #[builder(default = \"log::Level::Info\")]\n\n pub(crate) log_level: log::Level,\n\n #[builder(default = \"Transporter::nats(\\\"nats://localhost:4222\\\")\")]\n\n pub(crate) transporter: Transporter,\n\n #[builder(default = \"1000 * 60 * 5\")]\n\n pub(crate) request_timeout: i32,\n", "file_path": "src/config.rs", "rank": 99, "score": 26.797228074056267 } ]
Rust
tools/butler/src/db/photos/patch.rs
hsfzxjy/omoyde
6cda092c1f895ac03e1aea2c32e315a9bad06202
use crate::prelude::*; use paste::paste; use std::borrow::Cow; use std::ptr::NonNull; #[derive(Clone, Debug)] pub struct Diff<'a, T: Clone> { old: Cow<'a, T>, new: Option<Cow<'a, T>>, } impl<'a, T: Clone> From<T> for Diff<'a, T> { fn from(v: T) -> Self { Self { old: Cow::Owned(v), new: None, } } } impl<'a, T: Clone> From<&'a T> for Diff<'a, T> { fn from(v: &'a T) -> Self { Self { old: Cow::Borrowed(v), new: None, } } } impl<'b, 'a: 'b, 'c: 'a, T: Clone> Diff<'a, T> { fn write(&self, slot: &mut T) { self.new.as_ref().map(|v| *slot = v.clone().into_owned()); } fn get_mut(&'b mut self) -> &'b mut T { if self.new.is_none() { self.new = Some(self.old.clone()); } self.new.as_mut().unwrap().to_mut() } pub fn set(&mut self, v: T) { *self.get_mut() = v; } fn current(&self) -> &T { self.new.as_ref().unwrap_or(&self.old) } fn to_owned(&'b self) -> Diff<'c, T> { Diff { old: Cow::Owned(self.old.clone().into_owned()), new: self .new .as_ref() .map(|x| Cow::Owned(x.clone().into_owned())), } } } impl<'a, T: Eq + Clone> Diff<'a, T> { fn changed(&self) -> bool { if matches!(self.old, Cow::Borrowed(_)) && matches!(self.new, None | Some(Cow::Borrowed(_))) { return false; } self.new .as_ref() .map(|new| new.ne(&self.old)) .unwrap_or(false) } fn run_if_changed<F>(&self, f: F) where F: FnOnce(&T, &T), { if self.changed() { f(&self.old, &self.new.as_ref().unwrap()) } } } macro_rules! fields { ($action: ident; $args: tt) => { fields!{@iter $action, $args, [ (metadata; PhotoMetadata), (file_hash; FileHash), (selected; bool), (status; PhotoRecordStatus), (commit_time; Option<DateTime<Utc>>) ]} }; (@iter define_struct, (), [ $(( $N: ident; $T: ty )),+ ]) => { #[derive(Debug)] pub struct PhotoRecordDiff<'a> { pub pid: PID, is_missing: bool, $( pub $N: Diff<'a, $T>, )+ } }; (@iter to_owned, ($self: ident), [ $(( $N: ident; $T: ty )),+ ]) => { PhotoRecordDiff { pid: $self.pid, is_missing: $self.is_missing, $( $N: $self.$N.to_owned(), )+ } }; (@iter new, ($arg: ident), [ $(( $N: ident; $T: ty )),+ ]) => { Self { pid: $arg.pid, is_missing: false, $( $N: (&$arg.$N).into(), )+ } }; (@iter $action: ident, $args: tt, [ $(( $N: ident; $T: ty )),+ ]) => { $( fields!(@call $action, $args, $N, $T); )+ }; (@call write, ($self: ident, $arg: ident, $changed: ident), $N: ident, $T: ty) => { if $self.$N.changed() { $self.$N.old.to_mut(); $self.$N.write(&mut $arg.$N); $changed = true; } }; (@call accessor, (), $N: ident, $T: ty) => { paste!{ pub fn $N(&self) -> &$T { self.rec_diff.$N.current() } pub fn [<set_ $N>](&mut self, v: $T) { *self.rec_diff.$N.get_mut() = v; } pub fn [<$N _mut>](&mut self) -> &mut $T { self.rec_diff.$N.get_mut() } pub fn [<with_ $N>](mut self, v: $T) -> Self { self.[<set_ $N>](v); self } pub fn [<set_ $N _with>]<F>(mut self, f: F) -> Self where F: FnOnce(&mut $T) { f(self.rec_diff.$N.get_mut()); self } } } } fields!(define_struct; ()); impl<'a> PhotoRecordDiff<'a> { fn write<'b, 'c>(&'b mut self, rec: &'c mut PhotoRecord) -> (&'c PhotoRecord, bool) { let mut changed = false; fields!(write; (self, rec, changed)); (rec, changed) } pub fn new(rec: &'a PhotoRecord) -> Self { fields!(new; (rec)) } fn is_dirty(&self) -> bool { self.file_hash.changed() || self.metadata.changed() } fn to_owned<'c: 'a>(&self) -> PhotoRecordDiff<'c> { fields!(to_owned; (self)) } } pub struct PhotoRecordPatch<'b, 'a: 'b> { commit_at_drop: bool, rec_diff: PhotoRecordDiff<'a>, rec: NonNull<PhotoRecord>, ptr: &'b TableRefMut<'a, PhotoTable>, } #[allow(dead_code)] impl<'b, 'a: 'b> PhotoRecordPatch<'b, 'a> { fields!(accessor; ()); pub(super) fn with_diff<'c: 'a>(mut self, diff: PhotoRecordDiff<'c>) -> Self { self.rec_diff = diff; self } pub fn mark_missing(&mut self) { self.rec_diff.is_missing = true } pub fn into_diff<'c>(mut self) -> PhotoRecordDiff<'c> { self.commit_at_drop = false; let ret = self.rec_diff.to_owned(); drop(self); ret } } impl<'b, 'a: 'b> Drop for PhotoRecordPatch<'b, 'a> { fn drop(&mut self) { if !self.commit_at_drop { return; } let ptr = unsafe { self.ptr.as_mut() }; if *self.selected() && self.rec_diff.is_missing { self.status_mut().handle_local_missing(); } else { let is_dirty = self.rec_diff.is_dirty(); self.status_mut().handle_dirty_mark(is_dirty); } { let status = &self.rec_diff.status; if status.changed() && *status.current() == Committed { self.set_commit_time(Some(Utc::now())) } } let (rec, changed) = self.rec_diff.write(unsafe { self.rec.as_mut() }); if changed { unsafe { self.ptr.as_mut() }.modified_flag().set(); } self.rec_diff.selected.run_if_changed(|_o, _n| { ptr.index.flip_selected(rec); }); self.rec_diff.status.run_if_changed(|o, n| { ptr.index.mutate_status(rec.pid, o.clone(), n.clone()); }); } } impl<'b, 'a: 'b> TableRecordPatch<'b, 'a> for PhotoRecordPatch<'b, 'a> { type Table = PhotoTable; fn new(rec: TableRecordMut<'a, Self::Table>, ptr: &'b TableRefMut<'a, Self::Table>) -> Self { let rec_ptr = unsafe { NonNull::new(std::mem::transmute(rec as *const _)).unwrap() }; Self { commit_at_drop: true, rec_diff: PhotoRecordDiff::new(rec), rec: rec_ptr, ptr, } } }
use crate::prelude::*; use paste::paste; use std::borrow::Cow; use std::ptr::NonNull; #[derive(Clone, Debug)] pub struct Diff<'a, T: Clone> { old: Cow<'a, T>, new: Option<Cow<'a, T>>, } impl<'a, T: Clone> From<T> for Diff<'a, T> { fn from(v: T) -> Self { Self { old: Cow::Owned(v), new: None, } } } impl<'a, T: Clone> From<&'a T> for Diff<'a, T> { fn from(v: &'a T) -> Self { Self { old: Cow::Borrowed(v), new: None, } } } impl<'b, 'a: 'b, 'c: 'a, T: Clone> Diff<'a, T> { fn write(&self, slot: &mut T) { self.new.as_ref().map(|v| *slot = v.clone().into_owned()); } fn get_mut(&'b mut self) -> &'b mut T { if self.new.is_none() { self.new = Some(self.old.clone()); } self.new.as_mut().unwrap().to_mut() } pub fn set(&mut self, v: T) { *self.get_mut() = v; } fn current(&self) -> &T { self.new.as_ref().unwrap_or(&self.old) } fn to_owned(&'b self) -> Diff<'c, T> { Diff { old: Cow::Owned(self.old.clone().into_owned()), new: s
td::mem::transmute(rec as *const _)).unwrap() }; Self { commit_at_drop: true, rec_diff: PhotoRecordDiff::new(rec), rec: rec_ptr, ptr, } } }
elf .new .as_ref() .map(|x| Cow::Owned(x.clone().into_owned())), } } } impl<'a, T: Eq + Clone> Diff<'a, T> { fn changed(&self) -> bool { if matches!(self.old, Cow::Borrowed(_)) && matches!(self.new, None | Some(Cow::Borrowed(_))) { return false; } self.new .as_ref() .map(|new| new.ne(&self.old)) .unwrap_or(false) } fn run_if_changed<F>(&self, f: F) where F: FnOnce(&T, &T), { if self.changed() { f(&self.old, &self.new.as_ref().unwrap()) } } } macro_rules! fields { ($action: ident; $args: tt) => { fields!{@iter $action, $args, [ (metadata; PhotoMetadata), (file_hash; FileHash), (selected; bool), (status; PhotoRecordStatus), (commit_time; Option<DateTime<Utc>>) ]} }; (@iter define_struct, (), [ $(( $N: ident; $T: ty )),+ ]) => { #[derive(Debug)] pub struct PhotoRecordDiff<'a> { pub pid: PID, is_missing: bool, $( pub $N: Diff<'a, $T>, )+ } }; (@iter to_owned, ($self: ident), [ $(( $N: ident; $T: ty )),+ ]) => { PhotoRecordDiff { pid: $self.pid, is_missing: $self.is_missing, $( $N: $self.$N.to_owned(), )+ } }; (@iter new, ($arg: ident), [ $(( $N: ident; $T: ty )),+ ]) => { Self { pid: $arg.pid, is_missing: false, $( $N: (&$arg.$N).into(), )+ } }; (@iter $action: ident, $args: tt, [ $(( $N: ident; $T: ty )),+ ]) => { $( fields!(@call $action, $args, $N, $T); )+ }; (@call write, ($self: ident, $arg: ident, $changed: ident), $N: ident, $T: ty) => { if $self.$N.changed() { $self.$N.old.to_mut(); $self.$N.write(&mut $arg.$N); $changed = true; } }; (@call accessor, (), $N: ident, $T: ty) => { paste!{ pub fn $N(&self) -> &$T { self.rec_diff.$N.current() } pub fn [<set_ $N>](&mut self, v: $T) { *self.rec_diff.$N.get_mut() = v; } pub fn [<$N _mut>](&mut self) -> &mut $T { self.rec_diff.$N.get_mut() } pub fn [<with_ $N>](mut self, v: $T) -> Self { self.[<set_ $N>](v); self } pub fn [<set_ $N _with>]<F>(mut self, f: F) -> Self where F: FnOnce(&mut $T) { f(self.rec_diff.$N.get_mut()); self } } } } fields!(define_struct; ()); impl<'a> PhotoRecordDiff<'a> { fn write<'b, 'c>(&'b mut self, rec: &'c mut PhotoRecord) -> (&'c PhotoRecord, bool) { let mut changed = false; fields!(write; (self, rec, changed)); (rec, changed) } pub fn new(rec: &'a PhotoRecord) -> Self { fields!(new; (rec)) } fn is_dirty(&self) -> bool { self.file_hash.changed() || self.metadata.changed() } fn to_owned<'c: 'a>(&self) -> PhotoRecordDiff<'c> { fields!(to_owned; (self)) } } pub struct PhotoRecordPatch<'b, 'a: 'b> { commit_at_drop: bool, rec_diff: PhotoRecordDiff<'a>, rec: NonNull<PhotoRecord>, ptr: &'b TableRefMut<'a, PhotoTable>, } #[allow(dead_code)] impl<'b, 'a: 'b> PhotoRecordPatch<'b, 'a> { fields!(accessor; ()); pub(super) fn with_diff<'c: 'a>(mut self, diff: PhotoRecordDiff<'c>) -> Self { self.rec_diff = diff; self } pub fn mark_missing(&mut self) { self.rec_diff.is_missing = true } pub fn into_diff<'c>(mut self) -> PhotoRecordDiff<'c> { self.commit_at_drop = false; let ret = self.rec_diff.to_owned(); drop(self); ret } } impl<'b, 'a: 'b> Drop for PhotoRecordPatch<'b, 'a> { fn drop(&mut self) { if !self.commit_at_drop { return; } let ptr = unsafe { self.ptr.as_mut() }; if *self.selected() && self.rec_diff.is_missing { self.status_mut().handle_local_missing(); } else { let is_dirty = self.rec_diff.is_dirty(); self.status_mut().handle_dirty_mark(is_dirty); } { let status = &self.rec_diff.status; if status.changed() && *status.current() == Committed { self.set_commit_time(Some(Utc::now())) } } let (rec, changed) = self.rec_diff.write(unsafe { self.rec.as_mut() }); if changed { unsafe { self.ptr.as_mut() }.modified_flag().set(); } self.rec_diff.selected.run_if_changed(|_o, _n| { ptr.index.flip_selected(rec); }); self.rec_diff.status.run_if_changed(|o, n| { ptr.index.mutate_status(rec.pid, o.clone(), n.clone()); }); } } impl<'b, 'a: 'b> TableRecordPatch<'b, 'a> for PhotoRecordPatch<'b, 'a> { type Table = PhotoTable; fn new(rec: TableRecordMut<'a, Self::Table>, ptr: &'b TableRefMut<'a, Self::Table>) -> Self { let rec_ptr = unsafe { NonNull::new(s
random
[ { "content": "pub fn pt_access_mut<'a>() -> TableAccessMut<'a, PhotoTable> {\n\n PHOTO_TABLE.lock().unwrap().access_mut()\n\n}\n\n\n\n#[allow(dead_code)]\n\npub type PhotoTableAccess<'a> = TableAccess<'a, PhotoTable>;\n\npub type PhotoTableAccessMut<'a> = TableAccessMut<'a, PhotoTable>;\n\n\n\nimpl<'b, 'a: 'b> TableEntryTrait<'b, 'a, PhotoTable> for TableEntry<'b, 'a, PhotoTable> {\n\n type Patch = PhotoRecordPatch<'b, 'a>;\n\n}\n\n\n\nimpl<'a> TableAccess<'a, PhotoTable> {\n\n pub fn summary(&self) {\n\n let ptr = unsafe { self.0.as_mut() };\n\n let ntotal = ptr.pid2rec.len();\n\n let nunc = ptr.index.count_status(Uncommitted);\n\n let ncsel = ptr.index.count_selected(Committed);\n\n let ndsel = ptr.index.count_selected(CommittedButMissing);\n\n let nmsel = ptr.index.count_selected(CommittedButModified);\n", "file_path": "tools/butler/src/db/photos/access.rs", "rank": 0, "score": 136300.48115593544 }, { "content": "pub fn mpt_access_mut<'a>() -> TableAccessMut<'a, MountPointTable> {\n\n MOUNTPOINT_TABLE.lock().unwrap().access_mut()\n\n}\n", "file_path": "tools/butler/src/db/mounts/access.rs", "rank": 1, "score": 134487.91244530442 }, { "content": "/// Writes whether the pages in the range starting at `buffer` with a length of `length` bytes\n\n/// are resident in physical memory into `residency`. The size of `residency` must be at least\n\n/// `length / page_size`. Both `buffer` and `length` must be a multiple of the page size.\n\npub fn get_resident(buffer: *const u8, length: usize, residency: &mut [bool]) {\n\n use std::thread;\n\n\n\n let result = unsafe {\n\n // Note that the type here is a signed char, unlike the libc on other\n\n // platforms. The regular version of this function is in unix.rs.\n\n let residency_char = residency.as_mut_ptr() as *mut libc::c_char;\n\n libc::mincore(buffer as *mut libc::c_void, length, residency_char)\n\n };\n\n\n\n // Any error code except EAGAIN indicates a programming error.\n\n assert!(result == libc::EAGAIN || result == 0);\n\n\n\n // In the rare occasion that the kernel is busy, yield so we don't spam the kernel with\n\n // `mincore` calls, then try again.\n\n if result == libc::EAGAIN {\n\n thread::yield_now();\n\n get_resident(buffer, length, residency)\n\n }\n\n}\n", "file_path": "tools/butler/src/_vendors/filebuffer/macos.rs", "rank": 2, "score": 125619.64700800479 }, { "content": "/// See also `unix::get_resident`.\n\npub fn get_resident(_buffer: *const u8, _length: usize, residency: &mut [bool]) {\n\n // As far as I am aware, Windows does not expose a way to query whether pages are resident.\n\n // There is no equivalent of `mincore()`. The closest thing is `VirtualQuery()`, but the\n\n // `state` value in the `MEMORY_BASIC_INFORMATION` struct that it fills does not indicate\n\n // whether the page is resident.\n\n\n\n // Lie and pretend everything is resident.\n\n for x in residency {\n\n *x = true;\n\n }\n\n}\n\n\n", "file_path": "tools/butler/src/_vendors/filebuffer/windows.rs", "rank": 3, "score": 125619.64700800479 }, { "content": "#[cfg(not(target_os = \"macos\"))]\n\npub fn get_resident(buffer: *const u8, length: usize, residency: &mut [bool]) {\n\n use std::thread;\n\n\n\n let result = unsafe {\n\n // Note: the libc on Mac OS uses a signed char instead of an unsigned\n\n // one, which is why this function is disabled on Mac OS. There is a\n\n // replacement in macos.rs.\n\n let residency_uchar = residency.as_mut_ptr() as *mut libc::c_uchar;\n\n libc::mincore(buffer as *mut libc::c_void, length, residency_uchar)\n\n };\n\n\n\n // Any error code except EAGAIN indicates a programming error.\n\n assert!(result == libc::EAGAIN || result == 0);\n\n\n\n // In the rare occasion that the kernel is busy, yield so we don't spam the kernel with\n\n // `mincore` calls, then try again.\n\n if result == libc::EAGAIN {\n\n thread::yield_now();\n\n get_resident(buffer, length, residency)\n\n }\n\n}\n\n\n", "file_path": "tools/butler/src/_vendors/filebuffer/unix.rs", "rank": 4, "score": 125619.64700800479 }, { "content": "pub fn init_yansi() {\n\n use atty::*;\n\n use yansi::*;\n\n if !is(Stream::Stdout) {\n\n Paint::disable();\n\n }\n\n}\n\n\n", "file_path": "tools/butler/src/commands/mod.rs", "rank": 5, "score": 123416.7752595141 }, { "content": "pub fn write_bins<P: AsRef<Path>>(mut metas: Vec<CompressedMeta>, path: P) -> Result<()> {\n\n metas.sort_by_cached_key(|x| x.timestamp);\n\n use byteorder::{BigEndian, WriteBytesExt};\n\n let mut writer = BufWriter::new(File::create(path)?);\n\n for meta in metas {\n\n writer.write_uint::<BigEndian>(meta.pid.into(), 3)?;\n\n writer.write_u32::<BigEndian>(meta.timestamp)?;\n\n writer.write_u8(meta.h)?;\n\n writer.write_u8(meta.w)?;\n\n }\n\n Ok(())\n\n}\n\n\n\nimpl PhotoGenerator {\n\n pub fn new<P: AsRef<Path>>(\n\n entry: &db::PhotoRecord,\n\n dst_dir: P,\n\n force: bool,\n\n quality: u8,\n\n ) -> Result<Self> {\n", "file_path": "tools/butler/src/commands/generate/generator.rs", "rank": 6, "score": 116387.79424451574 }, { "content": "pub fn handle_cli() -> Result<()> {\n\n let cli = Cli::parse();\n\n init_logger(&cli);\n\n init_yansi();\n\n dispatch_subcommands(cli.commands)\n\n}\n\n\n", "file_path": "tools/butler/src/commands/mod.rs", "rank": 7, "score": 115339.18678724486 }, { "content": "pub fn get_page_size() -> usize {\n\n // Fill the `SYSTEM_INFO` struct with zeroes. It will be filled by\n\n // `GetSystemInfo` later but Rust requires it to be initialized.\n\n let mut sysinfo: winapi::um::sysinfoapi::SYSTEM_INFO = unsafe { mem::zeroed() };\n\n unsafe { winapi::um::sysinfoapi::GetSystemInfo(&mut sysinfo); }\n\n sysinfo.dwPageSize as usize\n\n}\n", "file_path": "tools/butler/src/_vendors/filebuffer/windows.rs", "rank": 8, "score": 112174.02714244553 }, { "content": "pub fn get_page_size() -> usize {\n\n let page_size = unsafe { libc::sysconf(libc::_SC_PAGESIZE) as usize };\n\n\n\n // Assert that the page size is a power of two, which is assumed when the page size is used.\n\n assert!(page_size != 0);\n\n assert_eq!(0, page_size & (page_size - 1));\n\n\n\n page_size\n\n}\n", "file_path": "tools/butler/src/_vendors/filebuffer/unix.rs", "rank": 9, "score": 112174.02714244553 }, { "content": "pub fn print_photos<'a, I>(iter: I)\n\nwhere\n\n I: IntoIterator<Item = &'a PhotoRecord>,\n\n{\n\n print_table(iter.into_iter().map(PhotoRecordForDisplay::new))\n\n}\n", "file_path": "tools/butler/src/commands/util/display.rs", "rank": 10, "score": 101551.35302186565 }, { "content": "pub fn display_widgets<'a>(items: &Vec<Widget<'a>>) {\n\n for item in items {\n\n println!(\"{}\", item);\n\n }\n\n}\n\n\n", "file_path": "lib/widget-core/src/lib.rs", "rank": 11, "score": 97303.09097260222 }, { "content": "#[allow(dead_code)]\n\npub fn pt_access<'a>() -> TableAccess<'a, PhotoTable> {\n\n PHOTO_TABLE.lock().unwrap().access()\n\n}\n\n\n", "file_path": "tools/butler/src/db/photos/access.rs", "rank": 12, "score": 97110.66895474434 }, { "content": "pub fn mpt_access<'a>() -> TableAccess<'a, MountPointTable> {\n\n MOUNTPOINT_TABLE.lock().unwrap().access()\n\n}\n\n\n", "file_path": "tools/butler/src/db/mounts/access.rs", "rank": 13, "score": 95754.46511890771 }, { "content": "/// Requests the kernel to make the specified range of bytes resident in physical memory. `buffer`\n\n/// must be page-aligned.\n\npub fn prefetch(buffer: *const u8, length: usize) {\n\n let result = unsafe {\n\n libc::madvise(buffer as *mut libc::c_void, length, libc::MADV_WILLNEED)\n\n };\n\n\n\n // Any returned error code indicates a programming error, not a runtime error.\n\n assert_eq!(0, result);\n\n}\n\n\n", "file_path": "tools/butler/src/_vendors/filebuffer/unix.rs", "rank": 14, "score": 94408.80518755516 }, { "content": "/// See also `unix::prefetch`.\n\npub fn prefetch(buffer: *const u8, length: usize) {\n\n let mut entry = winapi::um::memoryapi::WIN32_MEMORY_RANGE_ENTRY {\n\n VirtualAddress: buffer as *mut winapi::ctypes::c_void,\n\n NumberOfBytes: length as winapi::shared::basetsd::SIZE_T,\n\n };\n\n\n\n unsafe {\n\n let current_process_handle = winapi::um::processthreadsapi::GetCurrentProcess();\n\n winapi::um::memoryapi::PrefetchVirtualMemory(\n\n current_process_handle, // Prefetch for the current process.\n\n 1, &mut entry, // An array of length 1 that contains `entry`.\n\n 0 // Reserved flag that must be 0.\n\n );\n\n }\n\n\n\n // The return value of `PrefetchVirtualMemory` is ignored. MSDN says the function may fail if\n\n // the system is under memory pressure. (It is not entirely clear whether \"fail\" means\n\n // \"returns a nonzero value\", but I assume it does.)\n\n}\n\n\n", "file_path": "tools/butler/src/_vendors/filebuffer/windows.rs", "rank": 15, "score": 94408.80518755516 }, { "content": "pub fn unmap_file(buffer: *const u8, _length: usize) {\n\n let success = unsafe {\n\n winapi::um::memoryapi::UnmapViewOfFile(buffer as *mut winapi::ctypes::c_void)\n\n };\n\n assert!(success != 0);\n\n}\n\n\n", "file_path": "tools/butler/src/_vendors/filebuffer/windows.rs", "rank": 16, "score": 93052.60135171853 }, { "content": "pub fn unmap_file(buffer: *const u8, length: usize) {\n\n let result = unsafe { libc::munmap(buffer as *mut libc::c_void, length) };\n\n\n\n // `munmap` only fails due to incorrect usage, which is a program error, not a runtime failure.\n\n assert!(result == 0);\n\n}\n\n\n\n/// Writes whether the pages in the range starting at `buffer` with a length of `length` bytes\n\n/// are resident in physical memory into `residency`. The size of `residency` must be at least\n\n/// `length / page_size`. Both `buffer` and `length` must be a multiple of the page size.\n", "file_path": "tools/butler/src/_vendors/filebuffer/unix.rs", "rank": 17, "score": 93052.60135171853 }, { "content": "pub fn serialize_widgets<'a>(items: Vec<Widget<'a>>) -> Vec<u8> {\n\n let total_size = items\n\n .iter()\n\n .fold(0usize, |accum, item| accum + item.inner.len());\n\n let mut result = Vec::with_capacity(total_size);\n\n for item in items {\n\n result.extend_from_slice(&item.inner);\n\n }\n\n result\n\n}\n\n\n", "file_path": "lib/widget-core/src/lib.rs", "rank": 18, "score": 90164.76186194728 }, { "content": "#[cfg(storage_encoding = \"utf16be\")]\n\n#[inline]\n\nfn write_utf16be_words(words: Vec<u16>, buf: &mut Vec<u8>) {\n\n for ch in words.into_iter() {\n\n buf.extend_from_slice(&ch.to_be_bytes())\n\n }\n\n}\n\n\n\n#[cfg(storage_encoding = \"utf16be\")]\n\nimpl<'a> Widget<'a> {\n\n #[inline]\n\n pub fn from_utf8_storage(storage: &'a [u8]) -> Result<(Widget<'a>, &'a [u8])> {\n\n let (mut this, rest) = Self::from_storage(storage)?;\n\n\n\n if this.header.body_is_text() {\n\n let utf16_words = utf8_buffer_to_utf16be_words(this.get_body_ptr());\n\n let new_body_len = utf16_words.len() * 2;\n\n this.header.body_len = new_body_len as u16;\n\n this.inner = {\n\n let mut s = Vec::<u8>::with_capacity(HEADER_SIZE + new_body_len);\n\n s.extend_from_slice(&storage[..HEADER_SIZE]);\n\n s[HEADER_SIZE - 2..HEADER_SIZE]\n", "file_path": "lib/widget-core/src/lib.rs", "rank": 19, "score": 89929.21179407669 }, { "content": "pub fn parse_widgets<'a>(storage: &'a [u8]) -> Result<Vec<Widget<'a>>> {\n\n parse_until(storage, Widget::from_storage, <[u8]>::is_empty).map(|x| x.0)\n\n}\n\n\n", "file_path": "lib/widget-core/src/lib.rs", "rank": 20, "score": 88467.74058162325 }, { "content": "pub trait TableAccessMutExt<'a, T>\n\nwhere\n\n Self: DerefMut<Target = T> + 'a,\n\n{\n\n fn access_mut(self) -> TableAccessMut<'a, T>;\n\n}\n\n\n\nimpl<'a, T> TableAccessMutExt<'a, T> for std::sync::MutexGuard<'a, T> {\n\n fn access_mut(self) -> TableAccessMut<'a, T> {\n\n TableAccessMut(TableRef::Guard(self))\n\n }\n\n}\n\n\n\nimpl<'a, T> TableAccessMutExt<'a, T> for &'a mut T {\n\n fn access_mut(self) -> TableAccessMut<'a, T> {\n\n TableAccessMut(TableRef::Ptr(self, PhantomData))\n\n }\n\n}\n", "file_path": "tools/butler/src/db/helpers/access.rs", "rank": 21, "score": 86527.50693113083 }, { "content": "struct MountPointScanner<'b, 'a: 'b> {\n\n loc: DirectoryLocation,\n\n access: &'b PhotoTableAccessMut<'a>,\n\n}\n\n\n\nimpl<'b, 'a: 'b> MountPointScanner<'b, 'a> {\n\n fn new(mp: &MountPointRecord, access: &'b PhotoTableAccessMut<'a>) -> Result<Self> {\n\n Ok(Self {\n\n loc: mp.into(),\n\n access,\n\n })\n\n }\n\n fn sorted_files(&self) -> Result<impl IntoIterator<Item = DirEntry>> {\n\n use std::os::unix::fs::DirEntryExt;\n\n let mut files = self\n\n .loc\n\n .path\n\n .read_dir()?\n\n .filter(is_file)\n\n .filter(is_supported_image)\n", "file_path": "tools/butler/src/commands/index/scanner.rs", "rank": 22, "score": 86062.73302197698 }, { "content": "pub fn mod_widgets<'a>(items: &'a [u8], mods: &'a [u8]) -> Result<Vec<Widget<'a>>> {\n\n let mut old_items = parse_widgets(items)?.into_iter().peekable();\n\n let (adds, dels) = parse_mods(mods)?;\n\n let mut adds = adds.into_iter().peekable();\n\n let mut dels = dels.into_iter().peekable();\n\n let mut new_items = vec![];\n\n\n\n let mut nneg = 0;\n\n let mut npos = 0;\n\n let mut prev_dt_base = 0u32;\n\n let mut ended = false;\n\n\n\n loop {\n\n let mut pushed = false;\n\n match (adds.peek(), dels.peek(), old_items.peek()) {\n\n (None, None, None) => ended = true,\n\n (_, Some(del), None) => err!(\"invalid state: del: {:?} old_item: None\", &del),\n\n (None, None, Some(_)) => {\n\n new_items.push(old_items.next().unwrap());\n\n pushed = true;\n", "file_path": "lib/widget-core/src/lib.rs", "rank": 23, "score": 81852.59094058065 }, { "content": "pub fn map_file(file: fs::File) -> io::Result<(*const u8, usize, PlatformData)> {\n\n let file_handle = file.as_raw_handle();\n\n let length = try!(file.metadata()).len();\n\n\n\n if length > usize::max_value() as u64 {\n\n return Err(io::Error::new(io::ErrorKind::Other, \"file is larger than address space\"));\n\n }\n\n\n\n let mut platform_data = PlatformData {\n\n file: file,\n\n mapping_handle: ptr::null_mut(),\n\n };\n\n\n\n // Don't try to map anything if the file is empty.\n\n if length == 0 {\n\n return Ok((ptr::null(), 0, platform_data));\n\n }\n\n\n\n // Memory-mapping a file on Windows is a two-step process: first we create a file mapping\n\n // object, and then we create a view of that mapping in the virtual address space.\n", "file_path": "tools/butler/src/_vendors/filebuffer/windows.rs", "rank": 24, "score": 79617.35053148874 }, { "content": "pub fn map_file(file: fs::File) -> io::Result<(*const u8, usize, PlatformData)> {\n\n let fd = file.as_raw_fd();\n\n let length = file.metadata()?.len();\n\n\n\n if length > usize::max_value() as u64 {\n\n return Err(io::Error::new(io::ErrorKind::Other, \"file is larger than address space\"));\n\n }\n\n\n\n // Don't try to map anything if the file is empty.\n\n if length == 0 {\n\n return Ok((ptr::null(), 0, PlatformData));\n\n }\n\n\n\n let result = unsafe {\n\n libc::mmap(ptr::null_mut(), length as usize, libc::PROT_READ, libc::MAP_PRIVATE | libc::MAP_POPULATE | libc::MAP_NONBLOCK, fd, 0)\n\n };\n\n\n\n if result == libc::MAP_FAILED {\n\n Err(io::Error::last_os_error())\n\n } else {\n\n Ok((result as *const u8, length as usize, PlatformData))\n\n }\n\n}\n\n\n", "file_path": "tools/butler/src/_vendors/filebuffer/unix.rs", "rank": 25, "score": 79617.35053148874 }, { "content": "pub trait TableEntryTrait<'b, 'a: 'b, T: Table> {\n\n type Patch: TableRecordPatch<'b, 'a, Table = T>;\n\n}\n\n\n\npub(super) type Patch<'b, 'a, T> = <TableEntry<'b, 'a, T> as TableEntryTrait<'b, 'a, T>>::Patch;\n\n\n\nimpl<'b, 'a: 'b, T: Table> TableEntry<'b, 'a, T> {\n\n pub(super) fn with_key<K>(k: K, ptr: &'b TableRef<'a, T, &'a mut T>) -> Self\n\n where\n\n K: TableKey<T>,\n\n {\n\n let handle = k.query_in(unsafe { ptr.as_mut() });\n\n Self { ptr, handle }\n\n }\n\n pub fn remove(self) -> Option<<T as Table>::Record> {\n\n let ptr = unsafe { self.ptr.as_mut() };\n\n self.handle.into_occupied().map(|x| {\n\n ptr.modified_flag().set();\n\n ptr.remove(x)\n\n })\n", "file_path": "tools/butler/src/db/helpers/entry.rs", "rank": 26, "score": 72188.31349266907 }, { "content": "#[derive(Debug)]\n\nstruct Header {\n\n ty: char,\n\n dt: Datetime,\n\n body_len: u16,\n\n}\n\n\n\nimpl Header {\n\n fn from_storage(storage: &[u8]) -> Result<(Header, &[u8])> {\n\n let a = storage;\n\n let (ty, a) = read_be!(u8char, a);\n\n let (dt, a) = Datetime::from_storage(a)?;\n\n let (body_len, a) = read_be!(u16, a);\n\n Ok((Self { ty, dt, body_len }, a))\n\n }\n\n #[inline]\n\n fn body_is_text(&self) -> bool {\n\n match self.ty {\n\n 'q' | 'm' => true,\n\n _ => false,\n\n }\n", "file_path": "lib/widget-core/src/lib.rs", "rank": 27, "score": 68765.81651460963 }, { "content": "#[derive(Debug)]\n\nstruct Datetime {\n\n base: u32,\n\n offset: i8,\n\n}\n\n\n\nimpl PartialEq for Datetime {\n\n fn eq(&self, other: &Self) -> bool {\n\n (self.base, self.offset) == (other.base, other.offset)\n\n }\n\n}\n\n\n\nimpl PartialOrd for Datetime {\n\n fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n\n (self.base, self.offset).partial_cmp(&(other.base, other.offset))\n\n }\n\n}\n\n\n\nimpl Datetime {\n\n fn from_storage(storage: &[u8]) -> Result<(Datetime, &[u8])> {\n\n let a = storage;\n", "file_path": "lib/widget-core/src/lib.rs", "rank": 28, "score": 68765.81651460963 }, { "content": "#[derive(Parser)]\n\n#[clap(author, version, about, long_about = None)]\n\nstruct Cli {\n\n #[clap(short, long, parse(from_occurrences), help = \"Less verbosity\")]\n\n quiet: usize,\n\n\n\n #[clap(short, long, parse(from_occurrences), help = \"More verbosity\")]\n\n verbose: usize,\n\n\n\n #[clap(subcommand)]\n\n commands: Commands,\n\n}\n\n\n", "file_path": "tools/butler/src/commands/mod.rs", "rank": 29, "score": 68765.53999928666 }, { "content": "struct EncodeAsThumbnail {\n\n subdir: &'static str,\n\n height: u32,\n\n width: u32,\n\n}\n\n\n", "file_path": "tools/butler/src/commands/generate/generator.rs", "rank": 30, "score": 66916.45897542797 }, { "content": "struct EncodeAsOriginal {\n\n subdir: &'static str,\n\n}\n\n\n", "file_path": "tools/butler/src/commands/generate/generator.rs", "rank": 31, "score": 66916.45897542797 }, { "content": "#[allow(drop_bounds)]\n\npub trait TableRecordPatch<'b, 'a>: Drop + Sized {\n\n type Table: Table;\n\n fn new(rec: TableRecordMut<'a, Self::Table>, ptr: &'b TableRefMut<'a, Self::Table>) -> Self;\n\n fn commit(self) {\n\n drop(self);\n\n }\n\n}\n", "file_path": "tools/butler/src/db/helpers/patch.rs", "rank": 32, "score": 65951.28745286039 }, { "content": "#[test]\n\nfn make_resident() {\n\n let fbuffer = FileBuffer::open(\"src/lib.rs\").unwrap();\n\n\n\n // Touch the first page to make it resident.\n\n assert_eq!(&fbuffer[3..13], &b\"Filebuffer\"[..]);\n\n\n\n // Now at least that part should be resident.\n\n assert_eq!(fbuffer.resident_len(3, 10), 10);\n\n}\n\n\n", "file_path": "tools/butler/src/_vendors/filebuffer/mod.rs", "rank": 33, "score": 65714.36517134965 }, { "content": "#[test]\n\nfn verify_round_up_to() {\n\n assert_eq!(1024, round_up_to(23, 1024));\n\n assert_eq!(1024, round_up_to(1024, 1024));\n\n assert_eq!(2048, round_up_to(1025, 1024));\n\n}\n\n\n", "file_path": "tools/butler/src/_vendors/filebuffer/mod.rs", "rank": 34, "score": 65714.36517134965 }, { "content": "#[test]\n\nfn open_file() {\n\n let fbuffer = FileBuffer::open(\"src/lib.rs\");\n\n assert!(fbuffer.is_ok());\n\n}\n\n\n", "file_path": "tools/butler/src/_vendors/filebuffer/mod.rs", "rank": 35, "score": 65714.36517134965 }, { "content": "#[test]\n\nfn drop_after_leak() {\n\n let mut bytes = &[0u8][..];\n\n assert_eq!(bytes[0], 0);\n\n {\n\n let fbuffer = FileBuffer::open(\"src/lib.rs\").unwrap();\n\n bytes = fbuffer.leak();\n\n }\n\n assert_eq!(&bytes[3..13], &b\"Filebuffer\"[..]);\n\n}\n\n\n", "file_path": "tools/butler/src/_vendors/filebuffer/mod.rs", "rank": 36, "score": 65714.36517134965 }, { "content": "#[test]\n\nfn prefetch_is_not_harmful() {\n\n let fbuffer = FileBuffer::open(\"src/lib.rs\").unwrap();\n\n\n\n // It is impossible to test that this actually works without root access to instruct the kernel\n\n // to drop its caches, but at least we can verify that calling `prefetch` is not harmful.\n\n fbuffer.prefetch(0, fbuffer.len());\n\n\n\n // Reading from the file should still work as normal.\n\n assert_eq!(&fbuffer[3..13], &b\"Filebuffer\"[..]);\n\n}\n\n\n", "file_path": "tools/butler/src/_vendors/filebuffer/mod.rs", "rank": 37, "score": 65714.36517134965 }, { "content": "#[test]\n\nfn verify_round_down_to() {\n\n assert_eq!(0, round_down_to(23, 1024));\n\n assert_eq!(1024, round_down_to(1024, 1024));\n\n assert_eq!(1024, round_down_to(1025, 1024));\n\n}\n\n\n\nimpl FileBuffer {\n\n /// Maps the file at `path` into memory.\n\n pub fn open<P: AsRef<Path>>(path: P) -> io::Result<FileBuffer> {\n\n // Open the `fs::File` so we get all of std's error handling for free, then use it to\n\n // extract the file descriptor. The file is closed again when `map_file` returns on\n\n // Unix-ish platforms, but `mmap` only requires the descriptor to be open for the `mmap`\n\n // call, so this is fine. On Windows, the file must be kept open for the lifetime of the\n\n // mapping, so `map_file` moves the file into the platform data.\n\n let mut open_opts = fs::OpenOptions::new();\n\n open_opts.read(true);\n\n\n\n // TODO: On Windows, set `share_mode()` to read-only. This requires the\n\n // `open_options_ext` feature that is currently unstable, but it is\n\n // required to ensure that a different process does not suddenly modify\n", "file_path": "tools/butler/src/_vendors/filebuffer/mod.rs", "rank": 38, "score": 65714.36517134965 }, { "content": "#[test]\n\nfn page_size_at_least_4096() {\n\n // There is no reason why the page size cannot be smaller, it is just that in practice there\n\n // is no platform with a smaller page size, so this tests that `get_page_size()` returns\n\n // a plausible value.\n\n assert!(get_page_size() >= 4096);\n\n}\n", "file_path": "tools/butler/src/_vendors/filebuffer/mod.rs", "rank": 39, "score": 64868.84871667337 }, { "content": "#[test]\n\nfn open_empty_file_is_fine() {\n\n FileBuffer::open(\"src/empty_file_for_testing.rs\").unwrap();\n\n}\n\n\n", "file_path": "tools/butler/src/_vendors/filebuffer/mod.rs", "rank": 40, "score": 64061.99329794316 }, { "content": "#[test]\n\nfn empty_file_prefetch_is_fine() {\n\n let fbuffer = FileBuffer::open(\"src/empty_file_for_testing.rs\").unwrap();\n\n fbuffer.prefetch(0, 0);\n\n}\n\n\n", "file_path": "tools/butler/src/_vendors/filebuffer/mod.rs", "rank": 41, "score": 64061.99329794316 }, { "content": "#[test]\n\nfn fbuffer_can_be_moved_into_thread() {\n\n use std::thread;\n\n\n\n let fbuffer = FileBuffer::open(\"src/lib.rs\").unwrap();\n\n thread::spawn(move || {\n\n assert_eq!(&fbuffer[3..13], &b\"Filebuffer\"[..]);\n\n });\n\n}\n\n\n", "file_path": "tools/butler/src/_vendors/filebuffer/mod.rs", "rank": 42, "score": 64061.99329794316 }, { "content": "#[test]\n\nfn empty_file_deref_is_fine() {\n\n let fbuffer = FileBuffer::open(\"src/empty_file_for_testing.rs\").unwrap();\n\n assert_eq!(fbuffer.iter().any(|_| true), false);\n\n}\n\n\n", "file_path": "tools/butler/src/_vendors/filebuffer/mod.rs", "rank": 43, "score": 64061.99329794316 }, { "content": " pub trait Accumulable {\n\n fn init() -> Self;\n\n fn accum(x: Self, y: Self) -> Self;\n\n }\n\n\n\n impl<T: Eq + std::hash::Hash + Clone> Accumulable for HashSet<T> {\n\n fn init() -> Self {\n\n HashSet::new()\n\n }\n\n fn accum(mut x: Self, y: Self) -> Self {\n\n x.extend(y);\n\n x\n\n }\n\n }\n\n\n\n impl<T> Accumulable for Vec<T> {\n\n fn init() -> Self {\n\n vec![]\n\n }\n\n fn accum(mut x: Self, mut y: Self) -> Self {\n", "file_path": "tools/butler/src/util.rs", "rank": 44, "score": 63770.50798653382 }, { "content": " pub trait FoldInto {\n\n type Target;\n\n\n\n fn fold_into(self) -> Self::Target;\n\n }\n\n\n\n impl<I, T> FoldInto for I\n\n where\n\n I: Iterator<Item = T>,\n\n T: Accumulable,\n\n {\n\n type Target = T;\n\n\n\n fn fold_into(self) -> T {\n\n self.fold(<T as Accumulable>::init(), <T as Accumulable>::accum)\n\n }\n\n }\n\n}\n\n\n\npub mod tabled {\n", "file_path": "tools/butler/src/util.rs", "rank": 45, "score": 63770.50798653382 }, { "content": "fn main() -> Result<()> {\n\n goto_work_directory()?;\n\n mpt_access_mut().initialize(DEFAULT_MOUNTPOINTS_DB_PATH)?;\n\n commands::handle_cli()?;\n\n mpt_access().finalize(DEFAULT_MOUNTPOINTS_DB_PATH)\n\n}\n", "file_path": "tools/butler/src/main.rs", "rank": 46, "score": 63295.031828091036 }, { "content": "#[test]\n\nfn empty_file_has_zero_resident_len() {\n\n let fbuffer = FileBuffer::open(\"src/empty_file_for_testing.rs\").unwrap();\n\n assert_eq!(fbuffer.resident_len(0, 0), 0);\n\n}\n\n\n", "file_path": "tools/butler/src/_vendors/filebuffer/mod.rs", "rank": 47, "score": 63291.20653336025 }, { "content": "#[test]\n\nfn fbuffer_can_be_shared_among_threads() {\n\n use std::sync;\n\n use std::thread;\n\n\n\n let fbuffer = FileBuffer::open(\"src/lib.rs\").unwrap();\n\n let buffer1 = sync::Arc::new(fbuffer);\n\n let buffer2 = buffer1.clone();\n\n thread::spawn(move || {\n\n assert_eq!(&buffer2[3..13], &b\"Filebuffer\"[..]);\n\n });\n\n assert_eq!(&buffer1[17..45], &b\"Fast and simple file reading\"[..]);\n\n}\n\n\n", "file_path": "tools/butler/src/_vendors/filebuffer/mod.rs", "rank": 48, "score": 63291.20653336025 }, { "content": "pub trait Canonicalize {\n\n fn resolve(self) -> CanonicalizedPath;\n\n}\n\n\n\nimpl<P: AsRef<Path>> Canonicalize for P {\n\n fn resolve(self) -> CanonicalizedPath {\n\n CanonicalizedPath::new(self)\n\n }\n\n}\n\n\n\nimpl Deref for CanonicalizedPath {\n\n type Target = Arc<Path>;\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Hash, Clone)]\n\npub struct DirectoryLocation {\n\n pub mpid: Uuid,\n", "file_path": "tools/butler/src/locations/mod.rs", "rank": 49, "score": 62833.010394073994 }, { "content": "struct ClassifiedResult<'a> {\n\n existing_pids: HashSet<PID>,\n\n new_lphotos: Vec<LocalPhoto>,\n\n photo_record_diffs: Vec<PhotoRecordDiff<'a>>,\n\n}\n\n\n\nimpl<'a> ClassifiedResult<'a> {\n\n fn new() -> Self {\n\n Self {\n\n existing_pids: HashSet::new(),\n\n new_lphotos: vec![],\n\n photo_record_diffs: vec![],\n\n }\n\n }\n\n fn into_tuple(self) -> (HashSet<PID>, Vec<LocalPhoto>, Vec<PhotoRecordDiff<'a>>) {\n\n (\n\n self.existing_pids,\n\n self.new_lphotos,\n\n self.photo_record_diffs,\n\n )\n\n }\n\n}\n\n\n", "file_path": "tools/butler/src/commands/index/scanner.rs", "rank": 50, "score": 62582.6664124916 }, { "content": "pub trait Table {\n\n type PrimaryKey: Ord + Clone;\n\n type Record: TableRecord<Table = Self>;\n\n type Index: TableIndex<Table = Self>;\n\n\n\n fn insert(&mut self, rec: Self::Record) -> &mut Self::Record {\n\n self.index_mut().insert(&rec);\n\n self.treemap_mut()\n\n .entry(rec.primary_key().clone())\n\n .or_insert(rec)\n\n }\n\n fn remove(&mut self, slot: Occupied<'_, Self>) -> Self::Record {\n\n self.index_mut().remove(slot.get());\n\n slot.remove()\n\n }\n\n fn treemap_mut(&mut self) -> &mut BTreeMap<Self::PrimaryKey, Self::Record>;\n\n fn index_mut(&mut self) -> &mut Self::Index;\n\n fn modified_flag(&self) -> &AtomicFlag;\n\n}\n\n\n", "file_path": "tools/butler/src/db/helpers/table.rs", "rank": 51, "score": 61940.43364775347 }, { "content": "#[derive(Tabled)]\n\nstruct PhotoRecordForDisplay<'a> {\n\n #[tabled(rename = \"PID\")]\n\n pid: &'a PID,\n\n #[tabled(rename = \"PATH\", display_with = \"display_location\")]\n\n location: &'a Arc<FileLocation>,\n\n #[tabled(rename = \"Status\", display_with = \"display_status\")]\n\n status: &'a PhotoRecordStatus,\n\n #[tabled(rename = \"Selected\", display_with = \"display_bool\")]\n\n selected: &'a bool,\n\n}\n\n\n\nimpl<'a> PhotoRecordForDisplay<'a> {\n\n fn new(rec: &'a PhotoRecord) -> Self {\n\n Self {\n\n pid: &rec.pid,\n\n location: &rec.location,\n\n status: &rec.status,\n\n selected: &rec.selected,\n\n }\n\n }\n\n}\n\n\n", "file_path": "tools/butler/src/commands/util/display.rs", "rank": 52, "score": 61724.66222416842 }, { "content": "fn goto_work_directory() -> Result<()> {\n\n let mut curdir = env::current_dir()?;\n\n loop {\n\n let try_config_path = curdir.join(\"config.json\");\n\n if try_config_path.exists() {\n\n break;\n\n };\n\n if let Some(p) = curdir.parent() {\n\n curdir = PathBuf::from(p);\n\n } else {\n\n panic!(\"cannot find root directory of project omoyde (the location that config.json lies in)\");\n\n }\n\n }\n\n env::set_current_dir(&curdir)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tools/butler/src/main.rs", "rank": 53, "score": 61476.34161176062 }, { "content": "pub trait TableRecord {\n\n type Table: Table;\n\n fn primary_key(&self) -> &<Self::Table as Table>::PrimaryKey;\n\n}\n\n\n", "file_path": "tools/butler/src/db/helpers/table.rs", "rank": 54, "score": 61089.624668398144 }, { "content": "pub trait TableIndex {\n\n type Table: Table;\n\n\n\n fn remove(&mut self, rec: &<Self::Table as Table>::Record);\n\n fn insert(&mut self, rec: &<Self::Table as Table>::Record);\n\n fn build(\n\n &mut self,\n\n treemap: &BTreeMap<<Self::Table as Table>::PrimaryKey, <Self::Table as Table>::Record>,\n\n ) {\n\n for rec in treemap.values() {\n\n self.insert(rec)\n\n }\n\n }\n\n}\n", "file_path": "tools/butler/src/db/helpers/index.rs", "rank": 55, "score": 61089.624668398144 }, { "content": " pub trait TableIO<'a> {\n\n fn load_from_path<P: AsRef<Path>>(&'a mut self, path: P) -> Result<()>;\n\n fn save_to_path<P: AsRef<Path>>(&self, path: P) -> Result<()>;\n\n }\n\n\n\n impl<'a, T: serde::de::DeserializeOwned + Serialize + Table> TableIO<'a> for T {\n\n fn load_from_path<P: AsRef<Path>>(&'a mut self, path: P) -> Result<()> {\n\n let path = path.as_ref();\n\n if !path.exists() {\n\n fs::create_dir_all(path.parent().unwrap())?;\n\n return Ok(());\n\n }\n\n let file = File::open(path)?;\n\n let reader = BufReader::new(file);\n\n let desered: T = bincode::deserialize_from(reader)\n\n .map_err(|e| anyhow!(\"Error reading {}: {}\", path.display(), e))?;\n\n mem::drop(mem::replace(self, desered));\n\n Ok(())\n\n }\n\n fn save_to_path<P: AsRef<Path>>(&self, path: P) -> Result<()> {\n", "file_path": "tools/butler/src/util.rs", "rank": 56, "score": 59627.47157169836 }, { "content": "fn init_logger(cli: &Cli) {\n\n use simplelog::*;\n\n\n\n let verbosity: i8 = {\n\n let default = 3i8;\n\n let verboser = cli.verbose as i8;\n\n let quieter = cli.quiet as i8;\n\n if verboser != 0 && quieter != 0 {\n\n Cli::command()\n\n .error(\n\n ErrorKind::ArgumentConflict,\n\n \"cannot use --quiet with --verbose\",\n\n )\n\n .exit()\n\n }\n\n default + verboser - quieter\n\n };\n\n\n\n CombinedLogger::init(vec![TermLogger::new(\n\n match verbosity {\n", "file_path": "tools/butler/src/commands/mod.rs", "rank": 57, "score": 57455.10094122236 }, { "content": "pub trait TableAccessExt<'a, T>\n\nwhere\n\n Self: Deref<Target = T> + 'a,\n\n{\n\n fn access(self) -> TableAccess<'a, T>;\n\n}\n\n\n\nimpl<'a, T> TableAccessExt<'a, T> for std::sync::MutexGuard<'a, T> {\n\n fn access(self) -> TableAccess<'a, T> {\n\n TableAccess(TableRef::Guard(self))\n\n }\n\n}\n\n\n\nimpl<'a, T> TableAccessExt<'a, T> for &'a T {\n\n fn access(self) -> TableAccess<'a, T> {\n\n TableAccess(TableRef::Ptr(\n\n unsafe { std::mem::transmute(self) },\n\n PhantomData,\n\n ))\n\n }\n", "file_path": "tools/butler/src/db/helpers/access.rs", "rank": 58, "score": 53805.04048649523 }, { "content": "pub trait TableKey<T: Table> {\n\n fn query_in<'a, 'b>(&'a self, table: &'b mut T) -> TableHandle<'b, T>;\n\n fn query_ref_in<'a, 'b>(&'a self, table: &'b mut T) -> Option<&'b <T as Table>::Record> {\n\n self.query_in(table).into_mut().map(|x| &*x)\n\n }\n\n}\n\n\n\npub enum TableRef<'a, T, M: 'a> {\n\n Ptr(*mut T, PhantomData<M>),\n\n Guard(std::sync::MutexGuard<'a, T>),\n\n}\n\n\n\nimpl<'b, 'a: 'b, M: 'a, T> TableRef<'a, T, M> {\n\n #[inline(always)]\n\n #[allow(mutable_transmutes)]\n\n pub(in crate::db) unsafe fn as_mut(&'b self) -> &'a mut T {\n\n use TableRef::*;\n\n match self {\n\n Ptr(x, _) => &mut **x,\n\n Guard(x) => std::mem::transmute(x.deref()),\n\n }\n\n }\n\n}\n\n\n\npub type TableRefMut<'a, T> = TableRef<'a, T, &'a mut T>;\n", "file_path": "tools/butler/src/db/helpers/table.rs", "rank": 59, "score": 53805.04048649523 }, { "content": "fn display_bool(val: &bool) -> String {\n\n match val {\n\n true => Paint::green(\"Yes\"),\n\n false => Paint::new(\"No\").dimmed(),\n\n }\n\n .to_string()\n\n}\n\n\n", "file_path": "tools/butler/src/commands/util/display.rs", "rank": 60, "score": 53409.10447529266 }, { "content": "#[inline]\n\nfn parse_until<'a, T, Parse, Predicate>(\n\n storage: &'a [u8],\n\n parse: Parse,\n\n stop: Predicate,\n\n) -> Result<(Vec<T>, &'a [u8])>\n\nwhere\n\n T: 'a,\n\n Parse: Fn(&'a [u8]) -> Result<(T, &'a [u8])>,\n\n Predicate: Fn(&[u8]) -> bool,\n\n{\n\n let mut buf = storage;\n\n let mut res = vec![];\n\n while !stop(buf) {\n\n let (item, new_buf) = parse(buf)?;\n\n buf = new_buf;\n\n res.push(item);\n\n }\n\n Ok((res, buf))\n\n}\n\n\n", "file_path": "lib/widget-core/src/lib.rs", "rank": 61, "score": 53013.10395910216 }, { "content": "fn main() -> Result<(), std::io::Error> {\n\n let crate_dir = env!(\"CARGO_MANIFEST_DIR\");\n\n let env_path = PathBuf::from(crate_dir).join(\".env\");\n\n\n\n println!(\"cargo:rerun-if-changed={}\", env_path.display());\n\n\n\n let reader = BufReader::new(File::open(env_path)?);\n\n for res in reader.lines() {\n\n let line = res?;\n\n println!(\"cargo:rustc-env={}\", line);\n\n\n\n if line.starts_with(\"OMOYDE_SYSTEM_WIDGET_ENCODING_RUST=\") {\n\n let (_, encoding) = line.split_once('=').unwrap();\n\n match encoding {\n\n \"utf8\" => (),\n\n \"utf16be\" => (),\n\n _ => panic!(\"unsupported encoding: {}\", encoding),\n\n };\n\n println!(\"cargo:rustc-cfg=storage_encoding=\\\"{}\\\"\", encoding);\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "lib/widget-core/build.rs", "rank": 62, "score": 53013.10395910216 }, { "content": "fn display_status(status: &PhotoRecordStatus) -> String {\n\n match status {\n\n Committed => Paint::green(\"committed\"),\n\n Uncommitted => Paint::new(\"untracked\").dimmed(),\n\n CommittedButModified => Paint::yellow(\"modified\"),\n\n CommittedButMissing => Paint::red(\"missing\"),\n\n }\n\n .to_string()\n\n}\n\n\n", "file_path": "tools/butler/src/commands/util/display.rs", "rank": 63, "score": 51966.47665948681 }, { "content": "fn display_location(loc: &Arc<FileLocation>) -> String {\n\n loc.filename.display().to_string()\n\n}\n\n\n", "file_path": "tools/butler/src/commands/util/display.rs", "rank": 64, "score": 49992.8339599832 }, { "content": "#[cfg(storage_encoding = \"utf16be\")]\n\n#[inline]\n\nfn utf8_buffer_to_utf16be_words(buf: &[u8]) -> Vec<u16> {\n\n String::from_utf8_lossy(buf).encode_utf16().collect()\n\n}\n\n\n", "file_path": "lib/widget-core/src/lib.rs", "rank": 65, "score": 49992.8339599832 }, { "content": "/// Rounds `size` up to the nearest multiple of `power_of_two`.\n\nfn round_up_to(size: usize, power_of_two: usize) -> usize {\n\n (size + (power_of_two - 1)) & !(power_of_two - 1)\n\n}\n\n\n", "file_path": "tools/butler/src/_vendors/filebuffer/mod.rs", "rank": 66, "score": 48378.72297693799 }, { "content": "/// Rounds `size` down to the nearest multiple of `power_of_two`.\n\nfn round_down_to(size: usize, power_of_two: usize) -> usize {\n\n size & !(power_of_two - 1)\n\n}\n\n\n", "file_path": "tools/butler/src/_vendors/filebuffer/mod.rs", "rank": 67, "score": 48378.72297693799 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let path = args().nth(1).unwrap();\n\n let file = File::open(path)?;\n\n let mut buf = vec![];\n\n BufReader::new(file).read_to_end(&mut buf)?;\n\n display_widgets(&parse_widgets(&buf)?);\n\n Ok(())\n\n}\n", "file_path": "tools/widget-decode/src/main.rs", "rank": 68, "score": 47739.64696153048 }, { "content": "#[inline]\n\nfn split<T>(storage: &[u8]) -> Result<(&[u8], &[u8])> {\n\n let len = std::mem::size_of::<T>();\n\n checked_split!(storage, len)\n\n}\n\n\n\nmacro_rules! read_be {\n\n (u8char, $a: expr) => {{\n\n let (v, a) = split::<u8>($a)?;\n\n (char::from(v[0]), a)\n\n }};\n\n ($ty: tt, $a: expr) => {{\n\n let (v, a) = split::<$ty>($a)?;\n\n ($ty::from_be_bytes(v.try_into().unwrap()), a)\n\n }};\n\n}\n\n\n", "file_path": "lib/widget-core/src/lib.rs", "rank": 69, "score": 47739.64696153048 }, { "content": "fn is_file(file: &std::io::Result<DirEntry>) -> bool {\n\n file.as_ref()\n\n .ok()\n\n .map(|f| f.file_type().ok())\n\n .flatten()\n\n .map(|t| t.is_file())\n\n .unwrap_or(false)\n\n}\n\n\n", "file_path": "tools/butler/src/commands/index/scanner.rs", "rank": 70, "score": 46358.116687769 }, { "content": "fn is_supported_image(file: &std::io::Result<DirEntry>) -> bool {\n\n file.as_ref()\n\n .ok()\n\n .map(|f| util::fs::is_supported_image(&f.path()))\n\n .unwrap_or(false)\n\n}\n\n\n", "file_path": "tools/butler/src/commands/index/scanner.rs", "rank": 71, "score": 45709.86891431996 }, { "content": "fn parse_mods(storage: &[u8]) -> Result<(Vec<Widget>, Vec<Datetime>)> {\n\n let (adds, rest) = parse_until(storage, Widget::from_utf8_storage, |buf| buf[0] == 0)?;\n\n let (dels, _) = parse_until(&rest[1..], Datetime::from_storage, <[u8]>::is_empty)?;\n\n Ok((adds, dels))\n\n}\n\n\n", "file_path": "lib/widget-core/src/lib.rs", "rank": 72, "score": 45220.69656298096 }, { "content": "class ClientFileTooOld(OmoydeException):\n", "file_path": "api/app/errors.py", "rank": 73, "score": 35879.128859944416 }, { "content": "export const E_CLIENT_FILE_TOO_OLD = \"E1001\"\n", "file_path": "web/src/services/errors.js", "rank": 74, "score": 34275.233217783345 }, { "content": " derive_select_fn() {\n\n let $selected = null\n\n return ($item) => {\n\n if ($item === $selected) return false\n\n if ($selected) $selected.classList.remove(\"active\")\n\n $item.classList.add(\"active\")\n\n $selected = $item\n\n return true\n\n }\n", "file_path": "scripts/static/picker/main.js", "rank": 75, "score": 32977.113288577784 }, { "content": " }\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)]\n\npub enum PhotoRecordStatus {\n\n Committed,\n\n CommittedButMissing,\n\n CommittedButModified,\n\n Uncommitted,\n\n}\n\n\n\npub use PhotoRecordStatus::*;\n\n\n\nimpl PhotoRecordStatus {\n\n pub fn handle_dirty_mark(&mut self, dirty: bool) {\n\n let old = self.clone();\n\n let new = match (old, dirty) {\n\n (Uncommitted, _) => Uncommitted,\n\n (Committed, true) => CommittedButModified,\n\n (Committed, false) => Committed,\n", "file_path": "tools/butler/src/db/photos/misc.rs", "rank": 80, "score": 17.039194153105345 }, { "content": "use widget_core;\n\n\n\n#[repr(C)]\n\n#[derive(Debug)]\n\npub struct FFIVec {\n\n len: usize,\n\n data: *const u8,\n\n storage: *mut Vec<u8>,\n\n}\n\n\n\nimpl FFIVec {\n\n unsafe fn raw_to_slice<'a>(ptr: *mut Self) -> &'a [u8] {\n\n let v = Box::from_raw(ptr);\n\n let slice = core::slice::from_raw_parts(v.data, v.len);\n\n std::mem::forget(v);\n\n slice\n\n }\n\n fn from_vec(vec: Vec<u8>) -> Box<Self> {\n\n let vec = Box::new(vec);\n\n Box::new(Self {\n", "file_path": "api/widget-pybind/src/lib.rs", "rank": 81, "score": 16.513119252885534 }, { "content": "use std::hash::{Hash, Hasher};\n\nuse std::lazy::SyncOnceCell;\n\n\n\nuse crate::prelude::*;\n\n\n\n#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Hash, Clone, PartialOrd, Ord)]\n\npub struct CanonicalizedPath(Arc<Path>);\n\n\n\nimpl CanonicalizedPath {\n\n pub fn new<P: AsRef<Path>>(path: P) -> Self {\n\n Self(path.as_ref().canonicalize().unwrap().into())\n\n }\n\n}\n\n\n\nimpl fmt::Display for CanonicalizedPath {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.0.display())\n\n }\n\n}\n\n\n", "file_path": "tools/butler/src/locations/mod.rs", "rank": 82, "score": 15.935371748025384 }, { "content": "use crate::prelude::*;\n\nuse crate::util::sync::AtomicFlag;\n\n\n\nlazy_static! {\n\n pub static ref MOUNTPOINT_TABLE: Mutex<MountPointTable> = Mutex::new(MountPointTable::new());\n\n}\n\n\n\nimpl Table for MountPointTable {\n\n type PrimaryKey = CanonicalizedPath;\n\n type Record = MountPointRecord;\n\n type Index = MountPointTableIndex;\n\n\n\n fn index_mut(&mut self) -> &mut MountPointTableIndex {\n\n &mut self.index\n\n }\n\n\n\n fn treemap_mut(&mut self) -> &mut BTreeMap<Self::PrimaryKey, Self::Record> {\n\n &mut self.path2rec\n\n }\n\n\n", "file_path": "tools/butler/src/db/mounts/table.rs", "rank": 83, "score": 14.989004641057104 }, { "content": " use serde::de::Deserialize;\n\n use serde::ser::Serialize;\n\n use std::fmt;\n\n use std::sync::atomic::{AtomicBool, AtomicU32, Ordering};\n\n\n\n #[derive(Debug, Default)]\n\n pub struct AtomicFlag(AtomicBool);\n\n\n\n impl AtomicFlag {\n\n pub fn new() -> Self {\n\n Self(AtomicBool::new(false))\n\n }\n\n\n\n pub fn set(&self) {\n\n self.0.store(true, Ordering::Relaxed);\n\n }\n\n\n\n pub fn get(&self) -> bool {\n\n self.0.load(Ordering::Relaxed)\n\n }\n", "file_path": "tools/butler/src/util.rs", "rank": 85, "score": 14.782545750365287 }, { "content": " rec.commit_time\n\n .map(|time| time.timestamp().to_string())\n\n .unwrap_or(\"none\".to_string())\n\n );\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<'a> TableAccessMut<'a, PhotoTable> {\n\n pub fn take_diff<'b, 'c>(&'b mut self, diff: PhotoRecordDiff<'c>) -> PhotoRecordPatch<'b, 'a>\n\n where\n\n 'a: 'b,\n\n 'c: 'a,\n\n {\n\n self.entry(diff.pid).modify().unwrap().with_diff(diff)\n\n }\n\n pub fn insert_lphoto(&mut self, file: LocalPhoto) -> PID {\n\n let rec = PhotoRecord::new(0, file);\n\n self.insert(rec).pid\n\n }\n\n}\n", "file_path": "tools/butler/src/db/photos/access.rs", "rank": 86, "score": 14.052199450725974 }, { "content": "use crate::prelude::*;\n\nuse crate::util;\n\n\n\npub type FileHash = u64;\n\npub type PID = u32;\n\n\n\n#[derive(Debug, Copy, Clone, Serialize, Deserialize, Eq, PartialEq)]\n\npub enum PhotoOrientation {\n\n D0,\n\n D90,\n\n D180,\n\n D270,\n\n}\n\n\n\nimpl From<u32> for PhotoOrientation {\n\n fn from(v: u32) -> Self {\n\n use PhotoOrientation::*;\n\n match v {\n\n 0 | 1 => D0,\n\n 3 => D180,\n", "file_path": "tools/butler/src/db/photos/misc.rs", "rank": 88, "score": 13.523240487726756 }, { "content": "use crate::prelude::*;\n\n\n\nuse super::misc::*;\n\n\n\nuse crate::_vendors::filebuffer;\n\nuse xxhash_rust::xxh3::xxh3_64;\n\n\n\n#[derive(Debug)]\n\npub struct LocalPhoto {\n\n pub location: Arc<FileLocation>,\n\n pub file_hash: Option<FileHash>,\n\n pub metadata: PhotoMetadata,\n\n mmap: Option<filebuffer::FileBuffer>,\n\n}\n\n\n\nimpl LocalPhoto {\n\n pub fn new(location: FileLocation) -> Result<Self> {\n\n let metadata = PhotoMetadata::from_path(location.filepath())?;\n\n Ok(Self {\n\n location: location.into(),\n", "file_path": "tools/butler/src/db/photos/records.rs", "rank": 89, "score": 13.397382125918616 }, { "content": "impl PhotoMetadata {\n\n pub fn fix_from(&mut self, other: &Self) {\n\n if self.etime.is_some() {\n\n println!(\"already fixed\");\n\n return;\n\n }\n\n self.etime = other.etime;\n\n }\n\n pub fn from_path<P: AsRef<Path>>(path: P) -> Result<Self> {\n\n use util::exif::{read_datetime, read_dims, read_orientation};\n\n\n\n let filepath = path.as_ref();\n\n let metadata = filepath.metadata()?;\n\n\n\n let file = File::open(&filepath)?;\n\n let mut reader = BufReader::new(file);\n\n\n\n let exif_reader = exif::Reader::new();\n\n let exif = exif_reader.read_from_container(&mut reader);\n\n\n", "file_path": "tools/butler/src/db/photos/misc.rs", "rank": 90, "score": 12.991596163965493 }, { "content": " pub fn set_alias(self, alias: Option<String>) -> Self {\n\n self.rec.alias = alias;\n\n self\n\n }\n\n pub fn set_alias_with<F>(self, f: F) -> Self\n\n where\n\n F: FnOnce(&mut Option<String>),\n\n {\n\n f(&mut self.rec.alias);\n\n self\n\n }\n\n}\n", "file_path": "tools/butler/src/db/mounts/patch.rs", "rank": 91, "score": 12.959393592107615 }, { "content": " K: Ord + Clone + std::hash::Hash,\n\n{\n\n pub fn retain<F, P>(&'b mut self, mut f: F)\n\n where\n\n TableEntry<'b, 'a, T>: TableEntryTrait<'b, 'a, T, Patch = P>,\n\n P: TableRecordPatch<'b, 'a, Table = T>,\n\n F: FnMut(&K, &mut P) -> bool,\n\n {\n\n let mut to_remove = HashSet::new();\n\n let map = unsafe { self.0.as_mut() }.treemap_mut();\n\n for (key, value) in map.iter_mut() {\n\n let mut patch = Patch::<'_, '_, T>::new(value, &self.0);\n\n if !f(&key, &mut patch) {\n\n to_remove.insert(key.clone());\n\n }\n\n patch.commit();\n\n }\n\n\n\n if to_remove.is_empty() {\n\n return;\n", "file_path": "tools/butler/src/db/helpers/access.rs", "rank": 92, "score": 12.765092927331338 }, { "content": " }\n\n}\n\n\n\nimpl<'b, 'a: 'b, T: Table> TableEntry<'b, 'a, T>\n\nwhere\n\n Self: TableEntryTrait<'b, 'a, T>,\n\n{\n\n pub fn or_insert_with<F>(self, f: F) -> Patch<'b, 'a, T>\n\n where\n\n F: FnOnce() -> <T as Table>::Record,\n\n {\n\n let this = match self.modify() {\n\n Ok(x) => return x,\n\n Err(x) => x,\n\n };\n\n let rec = f();\n\n let rec = unsafe { this.ptr.as_mut() }.insert(rec);\n\n unsafe { this.ptr.as_mut() }.modified_flag().set();\n\n <Patch<'b, 'a, T>>::new(rec, this.ptr)\n\n }\n\n pub fn modify(self) -> std::result::Result<Patch<'b, 'a, T>, Self> {\n\n match self.handle.0 {\n\n Some(Entry::Occupied(x)) => Ok(<Patch<'b, 'a, T>>::new(x.into_mut(), self.ptr)),\n\n _ => Err(self),\n\n }\n\n }\n\n}\n", "file_path": "tools/butler/src/db/helpers/entry.rs", "rank": 93, "score": 12.739031820509524 }, { "content": "use crate::prelude::*;\n\n\n\nimpl<'b, 'a: 'b> TableEntryTrait<'b, 'a, MountPointTable> for TableEntry<'b, 'a, MountPointTable> {\n\n type Patch = MountPointRecordPatch<'b, 'a>;\n\n}\n\n\n\nimpl<'b, 'a: 'b> TableAccessMut<'a, MountPointTable> {\n\n pub fn insert_or_update(&'b mut self, path: CanonicalizedPath, alias: Option<String>) {\n\n self.entry(path.clone())\n\n .or_insert_with(|| MountPointRecord::new(path, None))\n\n .set_alias_with(|x| {\n\n if alias.is_some() {\n\n *x = alias\n\n }\n\n })\n\n .commit();\n\n }\n\n}\n\n\n", "file_path": "tools/butler/src/db/mounts/access.rs", "rank": 94, "score": 12.633755110314537 }, { "content": "}\n\n\n\nimpl<'a, T: Table> From<Option<TableHandle<'a, T>>> for TableHandle<'a, T> {\n\n fn from(v: Option<TableHandle<'a, T>>) -> Self {\n\n match v {\n\n None => Self(None),\n\n Some(x) => x,\n\n }\n\n }\n\n}\n\n\n\npub type TableRecordMut<'a, T> = &'a mut <T as Table>::Record;\n", "file_path": "tools/butler/src/db/helpers/handle.rs", "rank": 95, "score": 12.548489529254406 }, { "content": "impl PhotoTable {\n\n pub fn new() -> Self {\n\n Self {\n\n modified: AtomicFlag::new(),\n\n counter: AtomicCounter::new(0),\n\n pid2rec: BTreeMap::new(),\n\n index: PhotoTableIndex::default(),\n\n }\n\n }\n\n}\n\n\n\nlazy_static! {\n\n pub static ref PHOTO_TABLE: Mutex<PhotoTable> = Mutex::new(PhotoTable::new());\n\n}\n", "file_path": "tools/butler/src/db/photos/table.rs", "rank": 96, "score": 12.523745982161964 }, { "content": " }\n\n\n\n #[derive(Debug)]\n\n pub struct AtomicCounter(AtomicU32);\n\n\n\n impl AtomicCounter {\n\n pub fn new(v: u32) -> Self {\n\n Self(AtomicU32::new(v))\n\n }\n\n\n\n #[inline(always)]\n\n pub fn get_and_incr(&self) -> u32 {\n\n self.0.fetch_add(1, Ordering::Relaxed)\n\n }\n\n\n\n #[inline(always)]\n\n pub fn get(&self) -> u32 {\n\n self.0.load(Ordering::Relaxed)\n\n }\n\n }\n", "file_path": "tools/butler/src/util.rs", "rank": 97, "score": 12.455365182415221 }, { "content": "use crate::prelude::*;\n\n\n\npub struct MountPointRecordPatch<'b, 'a: 'b> {\n\n rec: TableRecordMut<'a, MountPointTable>,\n\n _ptr: &'b TableRefMut<'a, MountPointTable>,\n\n}\n\n\n\nimpl<'b, 'a: 'b> Drop for MountPointRecordPatch<'b, 'a> {\n\n fn drop(&mut self) {}\n\n}\n\n\n\nimpl<'b, 'a: 'b> TableRecordPatch<'b, 'a> for MountPointRecordPatch<'b, 'a> {\n\n type Table = MountPointTable;\n\n fn new(rec: TableRecordMut<'a, Self::Table>, ptr: &'b TableRefMut<'a, Self::Table>) -> Self {\n\n Self { rec, _ptr: ptr }\n\n }\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl<'b, 'a: 'b> MountPointRecordPatch<'b, 'a> {\n", "file_path": "tools/butler/src/db/mounts/patch.rs", "rank": 98, "score": 11.952800144584161 }, { "content": "\n\n#[derive(Clone, Debug)]\n\npub enum PhotoQuery {\n\n PID(PID),\n\n FileLocation(Arc<FileLocation>),\n\n}\n\n\n\nimpl TableKey<PhotoTable> for PhotoQuery {\n\n fn query_in<'a, 'b>(&'a self, table: &'b mut PhotoTable) -> TableHandle<'b, PhotoTable> {\n\n use PhotoQuery::*;\n\n match self {\n\n PID(x) => x.query_in(table),\n\n FileLocation(x) => x.query_in(table),\n\n }\n\n }\n\n}\n\n\n\nimpl From<Arc<FileLocation>> for PhotoQuery {\n\n fn from(v: Arc<FileLocation>) -> Self {\n\n PhotoQuery::FileLocation(v)\n", "file_path": "tools/butler/src/db/photos/keys.rs", "rank": 99, "score": 11.608834970368033 } ]
Rust
examples/truck_loading.rs
cuviper/RsGenetic
dfbf22008072920f21f1f7c07c1bc7af689bc1f4
extern crate rand; extern crate rsgenetic; use rsgenetic::sim::*; use rsgenetic::sim::seq::Simulator; use rsgenetic::sim::select::*; use rsgenetic::pheno::*; use rand::Rng; type TruckIndex = usize; type PackageSize = i32; type Scheme = Vec<(TruckIndex, PackageSize)>; type SchemeFitness = i32; const NUM_TRUCKS: usize = 5; const CAPACITY: i32 = 10; const PACKAGES: &[i32] = &[3, 8, 2, 7, 6, 1, 3]; struct LoadingScheme { scheme: Scheme, } impl Phenotype<SchemeFitness> for LoadingScheme { fn fitness(&self) -> SchemeFitness { let mut ret: i32 = 0; let mut trucks: Vec<PackageSize> = vec![0; NUM_TRUCKS]; for load in self.scheme.clone() { trucks[load.0] += load.1; } for load in trucks { let space_left = CAPACITY - load; if space_left < 0 { return i32::min_value(); } if space_left == CAPACITY { ret += 1000; } else { ret -= space_left; } } ret } fn crossover(&self, other: &LoadingScheme) -> LoadingScheme { let mut rng = ::rand::thread_rng(); let crossover_indices = ( rng.gen::<usize>() % PACKAGES.len(), rng.gen::<usize>() % PACKAGES.len(), ); let mut crossed_over: Scheme = vec![(0, 0); PACKAGES.len()]; for i in 0..crossover_indices.0 { crossed_over[i] = self.scheme[i]; } for i in crossover_indices.0..crossover_indices.1 { crossed_over[i] = other.scheme[i]; } for i in crossover_indices.1..PACKAGES.len() { crossed_over[i] = self.scheme[i]; } LoadingScheme { scheme: crossed_over, } } fn mutate(&self) -> LoadingScheme { let mut rng = ::rand::thread_rng(); LoadingScheme { scheme: self.scheme .iter() .map(|&(_, size)| (rng.gen::<usize>() % NUM_TRUCKS, size)) .collect(), } } } impl Clone for LoadingScheme { fn clone(&self) -> LoadingScheme { LoadingScheme { scheme: self.scheme.clone(), } } } fn main() { let mut population: Vec<LoadingScheme> = Vec::with_capacity(300); let mut rng = ::rand::thread_rng(); for _ in 0..300 { let mut pheno: Scheme = Vec::with_capacity(PACKAGES.len()); for package in PACKAGES { let index = rng.gen::<usize>() % NUM_TRUCKS; pheno.push((index, *package)); } population.push(LoadingScheme { scheme: pheno }); } #[allow(deprecated)] let mut s = Simulator::builder(&mut population) .set_selector(Box::new(MaximizeSelector::new(10))) .set_max_iters(100) .build(); s.run(); let result = s.get().unwrap(); let time = s.time(); println!("Execution time: {} ns.", time.unwrap()); println!( "Result: {:?} | Fitness: {}.", result.scheme, result.fitness() ); let mut trucks: Vec<_> = vec![0; NUM_TRUCKS]; for &(index, size) in &result.scheme { trucks[index] += size; } println!("Load per truck: {:?}.", trucks); }
extern crate rand; extern crate rsgenetic; use rsgenetic::sim::*; use rsgenetic::sim::seq::Simulator; use rsgenetic::sim::select::*; use rsgenetic::pheno::*; use rand::Rng; type TruckIndex = usize; type PackageSize = i32; type Scheme = Vec<(TruckIndex, PackageSize)>; type SchemeFitness = i32; const NUM_TRUCKS: usize = 5; const CAPACITY: i32 = 10; const PACKAGES: &[i32] = &[3, 8, 2, 7, 6, 1, 3]; struct LoadingScheme { scheme: Scheme, } impl Phenotype<SchemeFitness> for LoadingScheme { fn fitness(&self) -> SchemeFitness { let mut ret: i32 = 0; let mut trucks: Vec<PackageSize> = vec![0; NUM_TRUCKS]; for load in self.scheme.clone() { trucks[load.0] += load.1; } for load in trucks { let space_left = CAPACITY - load; if space_left < 0 { return i32::min_value(); } if space_left == CAPACITY { ret += 1000; } else { ret -= space_left; } } ret } fn crossover(&self, other: &LoadingScheme) -> LoadingScheme { let mut rng = ::rand::thread_rng(); let crossover_indices = ( rng.gen::<usize>() % PACKAGES.len(), rng.gen::<usize>() % PACKAGES.len(), ); let mut crossed_over: Scheme = vec![(0, 0); PACKAGES.len()];
fn mutate(&self) -> LoadingScheme { let mut rng = ::rand::thread_rng(); LoadingScheme { scheme: self.scheme .iter() .map(|&(_, size)| (rng.gen::<usize>() % NUM_TRUCKS, size)) .collect(), } } } impl Clone for LoadingScheme { fn clone(&self) -> LoadingScheme { LoadingScheme { scheme: self.scheme.clone(), } } } fn main() { let mut population: Vec<LoadingScheme> = Vec::with_capacity(300); let mut rng = ::rand::thread_rng(); for _ in 0..300 { let mut pheno: Scheme = Vec::with_capacity(PACKAGES.len()); for package in PACKAGES { let index = rng.gen::<usize>() % NUM_TRUCKS; pheno.push((index, *package)); } population.push(LoadingScheme { scheme: pheno }); } #[allow(deprecated)] let mut s = Simulator::builder(&mut population) .set_selector(Box::new(MaximizeSelector::new(10))) .set_max_iters(100) .build(); s.run(); let result = s.get().unwrap(); let time = s.time(); println!("Execution time: {} ns.", time.unwrap()); println!( "Result: {:?} | Fitness: {}.", result.scheme, result.fitness() ); let mut trucks: Vec<_> = vec![0; NUM_TRUCKS]; for &(index, size) in &result.scheme { trucks[index] += size; } println!("Load per truck: {:?}.", trucks); }
for i in 0..crossover_indices.0 { crossed_over[i] = self.scheme[i]; } for i in crossover_indices.0..crossover_indices.1 { crossed_over[i] = other.scheme[i]; } for i in crossover_indices.1..PACKAGES.len() { crossed_over[i] = self.scheme[i]; } LoadingScheme { scheme: crossed_over, } }
function_block-function_prefix_line
[ { "content": "fn main() {\n\n let mut population = (-300..300).map(|i| MyData { x: f64::from(i) }).collect();\n\n let mut s = Simulator::builder(&mut population)\n\n .set_selector(Box::new(StochasticSelector::new(10)))\n\n .set_max_iters(50)\n\n .build();\n\n s.run();\n\n let result = s.get().unwrap();\n\n let time = s.time();\n\n println!(\"Execution time: {} ns.\", time.unwrap());\n\n println!(\"Expected result: (-3, 10).\");\n\n println!(\"Result: ({}, {}).\", result.x, result.fitness().f);\n\n}\n", "file_path": "examples/max_parabole.rs", "rank": 12, "score": 40827.2641411667 }, { "content": "fn main() {\n\n let mut population: Vec<MyPhenotype> = Vec::with_capacity(300);\n\n for i in 0..150 {\n\n population.push(MyPhenotype { variant: MyVariant::Variant1, value: i });\n\n population.push(MyPhenotype { variant: MyVariant::Variant2, value: i })\n\n }\n\n #[allow(deprecated)]\n\n let mut s = Simulator::builder(&mut population)\n\n .set_selector(Box::new(MaximizeSelector::new(10)))\n\n .set_max_iters(100)\n\n .build();\n\n s.run();\n\n let result = s.get().unwrap();\n\n let time = s.time();\n\n println!(\"Execution time: {} ns.\", time.unwrap());\n\n println!(\n\n \"Result: {:?} | Fitness: {}.\",\n\n result,\n\n result.fitness()\n\n );\n\n}\n", "file_path": "examples/enum_phenotype.rs", "rank": 13, "score": 40827.2641411667 }, { "content": "struct MyFitness {\n\n f: f64,\n\n}\n\n\n\nimpl Eq for MyFitness {}\n\n\n\nimpl PartialEq for MyFitness {\n\n fn eq(&self, other: &MyFitness) -> bool {\n\n (self.f - other.f).abs() < 0.0001\n\n }\n\n}\n\n\n\nimpl PartialOrd for MyFitness {\n\n fn partial_cmp(&self, other: &MyFitness) -> Option<Ordering> {\n\n self.f.partial_cmp(&other.f)\n\n }\n\n}\n\n\n\nimpl Ord for MyFitness {\n\n fn cmp(&self, other: &MyFitness) -> Ordering {\n", "file_path": "examples/max_parabole.rs", "rank": 14, "score": 40079.15294496935 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nstruct MyPhenotype {\n\n variant: MyVariant,\n\n value: i32,\n\n}\n\n\n", "file_path": "examples/enum_phenotype.rs", "rank": 15, "score": 40079.15294496935 }, { "content": "struct MyData {\n\n x: f64,\n\n}\n\n\n\nimpl Phenotype<MyFitness> for MyData {\n\n fn fitness(&self) -> MyFitness {\n\n // Calculate the function here, because it's what we wish to maximize.\n\n MyFitness {\n\n f: 10.0 - ((self.x + 3.0) * (self.x + 3.0)),\n\n }\n\n }\n\n\n\n fn crossover(&self, other: &MyData) -> MyData {\n\n // We take the average for crossover.\n\n MyData {\n\n x: (self.x + other.x) / 2.0,\n\n }\n\n }\n\n\n\n fn mutate(&self) -> MyData {\n", "file_path": "examples/max_parabole.rs", "rank": 16, "score": 40079.15294496935 }, { "content": "fn main() {\n\n let mut population = (-300..300).map(|i| MyData { x: f64::from(i) }).collect();\n\n let mut s = Simulator::builder(&mut population)\n\n .set_selector(Box::new(StochasticSelector::new(10)))\n\n .set_max_iters(50)\n\n .build();\n\n while let StepResult::Success = s.checked_step() {\n\n let result = s.get().unwrap();\n\n println!(\n\n \"Intermediate result: ({}, {}).\",\n\n result.x,\n\n result.fitness().f\n\n );\n\n }\n\n let result = s.get().unwrap();\n\n let time = s.time();\n\n println!(\"Execution time: {} ns.\", time.unwrap());\n\n println!(\"Expected result: (-3, 10).\");\n\n println!(\"Result: ({}, {}).\", result.x, result.fitness().f);\n\n}\n", "file_path": "examples/max_parabole_steps.rs", "rank": 17, "score": 39613.6934330872 }, { "content": "struct MyFitness {\n\n f: f64,\n\n}\n\n\n\nimpl Eq for MyFitness {}\n\n\n\nimpl PartialEq for MyFitness {\n\n fn eq(&self, other: &MyFitness) -> bool {\n\n (self.f - other.f).abs() < 0.0001\n\n }\n\n}\n\n\n\nimpl PartialOrd for MyFitness {\n\n fn partial_cmp(&self, other: &MyFitness) -> Option<Ordering> {\n\n self.f.partial_cmp(&other.f)\n\n }\n\n}\n\n\n\nimpl Ord for MyFitness {\n\n fn cmp(&self, other: &MyFitness) -> Ordering {\n", "file_path": "examples/max_parabole_steps.rs", "rank": 18, "score": 38870.44939432382 }, { "content": "struct MyData {\n\n x: f64,\n\n}\n\n\n\nimpl Phenotype<MyFitness> for MyData {\n\n fn fitness(&self) -> MyFitness {\n\n // Calculate the function here, because it's what we wish to maximize.\n\n MyFitness {\n\n f: 10.0 - ((self.x + 3.0) * (self.x + 3.0)),\n\n }\n\n }\n\n\n\n fn crossover(&self, other: &MyData) -> MyData {\n\n // We take the average for crossover.\n\n MyData {\n\n x: (self.x + other.x) / 2.0,\n\n }\n\n }\n\n\n\n fn mutate(&self) -> MyData {\n", "file_path": "examples/max_parabole_steps.rs", "rank": 19, "score": 38870.44939432382 }, { "content": "//! from using primitive types as fitness values.\n\n//!\n\n//! Implemented types:\n\n//!\n\n//! * `i8`\n\n//! * `i16`\n\n//! * `i32`\n\n//! * `i64`\n\n//! * `u8`\n\n//! * `u16`\n\n//! * `u32`\n\n//! * `u64`\n\n//! * `usize`\n\n\n\nuse pheno::Fitness;\n\n\n\nmacro_rules! implement_fitness_int {\n\n ( $($t:ty),* ) => {\n\n $(\n\n impl Fitness for $t {\n", "file_path": "src/sim/types.rs", "rank": 20, "score": 26279.955250010928 }, { "content": " fn zero() -> $t {\n\n 0\n\n }\n\n\n\n fn abs_diff(&self, other: &$t) -> $t {\n\n if self > other {\n\n self - other\n\n } else {\n\n other - self\n\n }\n\n }\n\n }\n\n )*\n\n }\n\n}\n\n\n\nimplement_fitness_int!(i8, i16, i32, i64, u8, u16, u32, u64, usize);\n", "file_path": "src/sim/types.rs", "rank": 21, "score": 26275.183672659445 }, { "content": "// file: types.rs\n\n//\n\n// Copyright 2015-2017 The RsGenetic Developers\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// \thttp://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! This module provides implementations of the `Fitness` trait for\n\n//! some primitive types, such as `i32`, `i64` etcetera.\n\n//! This is because Rust does not allow programmers to implement\n\n//! a foreign trait for a foreign type, which would stop you as a library user\n", "file_path": "src/sim/types.rs", "rank": 22, "score": 26272.720795183803 }, { "content": "### Implementing the `Phenotype` trait\n\n\n\nNote that we use an integer type as the fitness type parameter\n\nto make this example more simple. Replace it with your custom type\n\nif needed. In this example, we try to find individuals with\n\ntwo integer components that sum to a target value.\n\n\n\nThis example is far-fetched, but simplified to show how\n\neasy it is to define new individuals and implement\n\nthe `Phenotype` trait.\n\n\n\n```rust\n\nuse rsgenetic::pheno::*;\n\n\n\nconst TARGET: i32 = 100;\n\n\n\n#[derive(Copy, Clone)]\n\nstruct MyPheno {\n\n x: i32,\n\n y: i32,\n\n}\n\n\n\nimpl Phenotype<i32> for MyPheno {\n\n // How fit is this individual?\n\n fn fitness(&self) -> i32 {\n\n TARGET - (self.x + self.y)\n\n }\n\n\n\n // Have two individuals create a new individual\n\n fn crossover(&self, other: &MyPheno) -> MyPheno {\n\n MyPheno {\n\n x: self.x,\n\n y: other.y,\n\n }\n\n }\n\n\n\n // Mutate an individual, changing its state\n\n fn mutate(&self) -> MyPheno {\n\n MyPheno {\n\n x: self.x + 1,\n\n y: self.y - 1,\n\n }\n\n }\n\n}\n\n```\n\n\n\n### Creating and running a `Simulator`\n\n\n\n```rust\n\nuse rsgenetic::sim::*;\n\nuse rsgenetic::sim::seq::Simulator;\n\nuse rsgenetic::sim::select::*;\n\n\n\n// (Assuming the above definition of `MyPheno` is in scope)\n\n// [ ... ]\n\n\n\nfn main() {\n\n let mut population = (0..100).map(|i| MyPheno { x: i, y: 100 - i }).collect();\n\n let mut s = Simulator::builder(&mut population)\n\n .set_selector(Box::new(StochasticSelector::new(10)))\n\n .set_max_iters(50)\n\n .build();\n\n s.run();\n\n let result = s.get().unwrap(); // The best individual\n\n}\n\n```\n\n\n\nSee the `examples` directory in the repository for more elaborate examples.\n\n\n", "file_path": "README.md", "rank": 23, "score": 20.712159793784597 }, { "content": "extern crate rand;\n\nextern crate rsgenetic;\n\n\n\nuse rsgenetic::sim::*;\n\nuse rsgenetic::sim::seq::Simulator;\n\nuse rsgenetic::sim::select::*;\n\nuse rsgenetic::pheno::*;\n\nuse rand::distributions::{IndependentSample, Range};\n\nuse std::cmp::Ordering;\n\n\n", "file_path": "examples/max_parabole_steps.rs", "rank": 24, "score": 16.799977199340795 }, { "content": "# RsGenetic\n\n[![Build Status](https://travis-ci.org/m-decoster/RsGenetic.svg?branch=master)](https://travis-ci.org/m-decoster/RsGenetic)\n\n[![Crates Version](https://img.shields.io/crates/v/rsgenetic.svg)](https://crates.io/crates/rsgenetic/)\n\n[![License MIT](https://img.shields.io/badge/License-MIT-blue.svg)](./LICENSE)\n\n[![License Apache](https://img.shields.io/badge/license-Apache--2.0-blue.svg)](./LICENSE)\n\n\n\n## Summary and Features\n\nRsGenetic is a framework for executing genetic algorithms in Rust. It is designed to have a simple but modular API.\n\n\n\n## Examples and Documentation\n\nDocumentation is available [here](https://docs.rs/rsgenetic/1.7.0/rsgenetic/). \n\n\n\n### Implementing the `Fitness` trait\n\n\n\nNote that, if your fitness type is an integer type, you\n\ndo not need to write a wrapper struct around this integer. See\n\nthe `types` module documentation for more details.\n\n\n\n```rust\n\nuse rsgenetic::pheno::*;\n\nuse std::cmp::Ordering;\n\n\n\n#[derive(Eq, PartialEq, PartialOrd, Ord)]\n\nstruct MyFitness {\n\n value: i32,\n\n}\n\n\n\nimpl Fitness for MyFitness {\n\n // The zero value for our custom type\n\n fn zero() -> MyFitness {\n\n MyFitness { value: 0 }\n\n }\n\n\n\n // The absolute difference between two instances\n\n fn abs_diff(&self, other: &MyFitness) -> MyFitness {\n\n MyFitness {\n\n value: (self.value - other.value).abs()\n\n }\n\n }\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 25, "score": 15.569143688185003 }, { "content": "// file: max_parabole.rs\n\n//\n\n// Copyright 2015-2017 The RsGenetic Developers\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// \thttp://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! This simple example shows how to use a simulator\n\n//! that finds the maximum of the function f(x) = 10-(x+3)^2 (which is (-3,10)).\n\nextern crate rand;\n\nextern crate rsgenetic;\n\n\n\nuse rsgenetic::sim::*;\n\nuse rsgenetic::sim::seq::Simulator;\n\nuse rsgenetic::sim::select::*;\n\nuse rsgenetic::pheno::*;\n\nuse rand::distributions::{IndependentSample, Range};\n\nuse std::cmp::Ordering;\n\n\n", "file_path": "examples/max_parabole.rs", "rank": 26, "score": 13.162410156030518 }, { "content": " return Err(format!(\n\n \"Invalid parameter `participants`: {}. Should be larger than \\\n\n zero and less than the population size.\",\n\n self.participants\n\n ));\n\n }\n\n\n\n let mut result: Parents<&T> = Vec::new();\n\n let mut rng = ::rand::thread_rng();\n\n for _ in 0..(self.count / 2) {\n\n let mut tournament: Vec<&T> = Vec::with_capacity(self.participants);\n\n for _ in 0..self.participants {\n\n let index = rng.gen_range::<usize>(0, population.len());\n\n tournament.push(&population[index]);\n\n }\n\n tournament.sort_by(|x, y| y.fitness().cmp(&x.fitness()));\n\n result.push((tournament[0], tournament[1]));\n\n }\n\n Ok(result)\n\n }\n", "file_path": "src/sim/select/tournament.rs", "rank": 27, "score": 12.14065505536938 }, { "content": "where\n\n T: Phenotype<F>,\n\n F: Fitness,\n\n{\n\n /// Kill off phenotypes using stochastic universal sampling.\n\n fn kill_off(&mut self, count: usize) {\n\n let ratio = self.population.len() / count;\n\n let mut i = ::rand::thread_rng().gen_range::<usize>(0, self.population.len());\n\n for _ in 0..count {\n\n self.population.swap_remove(i);\n\n i += ratio;\n\n i %= self.population.len();\n\n }\n\n }\n\n}\n\n\n\n/// A `Builder` for the `Simulator` type.\n\n#[derive(Debug)]\n\npub struct SimulatorBuilder<'a, T, F>\n\nwhere\n", "file_path": "src/sim/seq.rs", "rank": 28, "score": 11.814402145349261 }, { "content": "\n\nimpl<T, F> Selector<T, F> for StochasticSelector\n\nwhere\n\n T: Phenotype<F>,\n\n F: Fitness,\n\n{\n\n fn select<'a>(&self, population: &'a [T]) -> Result<Parents<&'a T>, String> {\n\n if self.count == 0 || self.count % 2 != 0 || self.count >= population.len() {\n\n return Err(format!(\n\n \"Invalid parameter `count`: {}. Should be larger than zero, a \\\n\n multiple of two and less than the population size.\",\n\n self.count\n\n ));\n\n }\n\n\n\n let ratio = population.len() / self.count;\n\n let mut result: Parents<&T> = Vec::new();\n\n let mut i = ::rand::thread_rng().gen_range::<usize>(0, population.len());\n\n let mut selected = 0;\n\n while selected < self.count {\n", "file_path": "src/sim/select/stochastic.rs", "rank": 29, "score": 10.268015255628255 }, { "content": " // Shift x with a random number.\n\n // (This RNG code should reside somewhere else, not in this function, but it's just an\n\n // example).\n\n\n\n // Because we don't want to have too big mutations, we limit the range to -1, +1.\n\n // Smaller values can cause slower convergence, but larger values may cause completely\n\n // wrong values.\n\n let between = Range::new(-1.0, 1.0);\n\n let mut rng = rand::thread_rng();\n\n let offset = between.ind_sample(&mut rng);\n\n MyData { x: self.x + offset }\n\n }\n\n}\n\n\n\nimpl Clone for MyData {\n\n fn clone(&self) -> MyData {\n\n MyData { x: self.x }\n\n }\n\n}\n\n\n", "file_path": "examples/max_parabole.rs", "rank": 30, "score": 10.199497676879243 }, { "content": " // Shift x with a random number.\n\n // (This RNG code should reside somewhere else, not in this function, but it's just an\n\n // example).\n\n\n\n // Because we don't want to have too big mutations, we limit the range to -1, +1.\n\n // Smaller values can cause slower convergence, but larger values may cause completely\n\n // wrong values.\n\n let between = Range::new(-1.0, 1.0);\n\n let mut rng = rand::thread_rng();\n\n let offset = between.ind_sample(&mut rng);\n\n MyData { x: self.x + offset }\n\n }\n\n}\n\n\n\nimpl Clone for MyData {\n\n fn clone(&self) -> MyData {\n\n MyData { x: self.x }\n\n }\n\n}\n\n\n", "file_path": "examples/max_parabole_steps.rs", "rank": 31, "score": 10.199497676879247 }, { "content": "//! define how crossover and mutation work, present a fitness function, choose some settings\n\n//! and this library takes care of the rest.\n\n//!\n\n//! # Installation\n\n//!\n\n//! You can use this library by adding the following lines to your `Cargo.toml` file:\n\n//!\n\n//! ```ignore\n\n//! [dependencies]\n\n//! rsgenetic = \"^1.7.0\"\n\n//! ```\n\n//!\n\n//! and adding `extern crate rsgenetic;` to your crate root.\n\n//!\n\n//! # Features\n\n//! ## Available Simulators\n\n//!\n\n//! There is currently only one, sequential, simulator. This simulator will run\n\n//! the genetic algorithm on a single thread.\n\n//!\n", "file_path": "src/lib.rs", "rank": 32, "score": 9.759106360977505 }, { "content": "//!\n\n//! ```\n\n//! use rsgenetic::pheno::*;\n\n//! use std::cmp::Ordering;\n\n//!\n\n//! #[derive(Eq, PartialEq, PartialOrd, Ord)]\n\n//! struct MyFitness {\n\n//! value: i32,\n\n//! }\n\n//!\n\n//! impl Fitness for MyFitness {\n\n//! // The zero value for our custom type\n\n//! fn zero() -> MyFitness {\n\n//! MyFitness { value: 0 }\n\n//! }\n\n//!\n\n//! // The absolute difference between two instances\n\n//! fn abs_diff(&self, other: &MyFitness) -> MyFitness {\n\n//! MyFitness {\n\n//! value: (self.value - other.value).abs()\n", "file_path": "src/lib.rs", "rank": 33, "score": 9.222618797268563 }, { "content": "// file: enum_phenotype.rs\n\n//\n\n// Copyright 2015-2018 The RsGenetic Developers\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// \thttp://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! This example shows how to provide several crossover or mutation implementations\n\n//! for a single `Phenotype`.\n\n//! \n\n//! This example was created in reference to [issue 30](https://github.com/m-decoster/RsGenetic/issues/30).\n\nextern crate rsgenetic;\n\n\n\nuse rsgenetic::sim::*;\n\nuse rsgenetic::sim::seq::Simulator;\n\nuse rsgenetic::sim::select::*;\n\nuse rsgenetic::pheno::*;\n\n\n\n#[derive(Clone, Copy, Debug)]\n", "file_path": "examples/enum_phenotype.rs", "rank": 34, "score": 9.014580876717027 }, { "content": "//! // [ ... ]\n\n//!\n\n//! fn main() {\n\n//! let mut population = (0..100).map(|i| MyPheno { x: i, y: 100 - i }).collect();\n\n//! let mut s = Simulator::builder(&mut population)\n\n//! .set_selector(Box::new(StochasticSelector::new(10)))\n\n//! .set_max_iters(50)\n\n//! .build();\n\n//! s.run();\n\n//! let result = s.get().unwrap(); // The best individual\n\n//! }\n\n//! ```\n\n//!\n\n//! See the `examples` directory in the repository for more elaborate examples.\n\n\n\n#![deny(missing_docs, missing_debug_implementations, missing_copy_implementations, trivial_casts,\n\n trivial_numeric_casts, unsafe_code, unstable_features, unused_import_braces,\n\n unused_qualifications)]\n\n\n\nextern crate rand;\n", "file_path": "src/lib.rs", "rank": 35, "score": 8.574640681715142 }, { "content": "//! obtain by calling `Simulator::builder()`.\n\n\n\nuse pheno::Phenotype;\n\nuse pheno::Fitness;\n\nuse rand::Rng;\n\nuse super::*;\n\nuse super::select::*;\n\nuse super::iterlimit::*;\n\nuse super::earlystopper::*;\n\nuse std::time::Instant;\n\nuse std::marker::PhantomData;\n\n\n\n/// A sequential implementation of `::sim::Simulation`.\n\n/// The genetic algorithm is run in a single thread.\n\n#[derive(Debug)]\n\npub struct Simulator<'a, T, F>\n\nwhere\n\n T: 'a + Phenotype<F>,\n\n F: Fitness,\n\n{\n", "file_path": "src/sim/seq.rs", "rank": 36, "score": 7.462986190166096 }, { "content": "extern crate rayon;\n\n\n\n/// Contains the definition of a Phenotype.\n\npub mod pheno;\n\n/// Contains implementations of Simulators, which can run genetic algorithms.\n\npub mod sim;\n\n/// Contains code used by unit tests.\n\n#[cfg(test)]\n\nmod test;\n", "file_path": "src/lib.rs", "rank": 37, "score": 7.225363558104565 }, { "content": "//! }\n\n//! }\n\n//! }\n\n//! ```\n\n//!\n\n//! ## Implementing the `Phenotype` trait\n\n//!\n\n//! Note that we use an integer type as the fitness type parameter\n\n//! to make this example more simple. Replace it with your custom type\n\n//! if needed. In this example, we try to find individuals with\n\n//! two integer components that sum to a target value.\n\n//!\n\n//! This example is far-fetched, but simplified to show how\n\n//! easy it is to define new individuals and implement\n\n//! the `Phenotype` trait.\n\n//!\n\n//! ```\n\n//! use rsgenetic::pheno::*;\n\n//!\n\n//! const TARGET: i32 = 100;\n", "file_path": "src/lib.rs", "rank": 38, "score": 7.151974197990227 }, { "content": " /// Update the `EarlyStopper` with a new fitness value.\n\n pub fn update(&mut self, fitness: F) {\n\n if self.previous.abs_diff(&fitness) < self.delta {\n\n self.previous = fitness;\n\n self.iter_limit.inc();\n\n } else {\n\n self.iter_limit.reset();\n\n }\n\n }\n\n\n\n /// Returns whether the `Simulator` should stop.\n\n pub fn reached(&self) -> bool {\n\n self.iter_limit.reached()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::EarlyStopper;\n\n use test::MyFitness;\n", "file_path": "src/sim/earlystopper.rs", "rank": 39, "score": 6.746479476197257 }, { "content": " ///\n\n /// The `Simulator` will stop running after this number of iterations.\n\n ///\n\n /// Returns itself for chaining purposes.\n\n pub fn set_max_iters(mut self, i: u64) -> Self {\n\n self.sim.iter_limit = IterLimit::new(i);\n\n self\n\n }\n\n\n\n /// Set early stopping. If for `n_iters` iterations, the change in the highest fitness\n\n /// is smaller than `delta`, the simulator will stop running.\n\n ///\n\n /// Returns itself for chaining purposes.\n\n pub fn set_early_stop(mut self, delta: F, n_iters: u64) -> Self {\n\n self.sim.earlystopper = Some(EarlyStopper::new(delta, n_iters));\n\n self\n\n }\n\n}\n\n\n\nimpl<'a, T, F> Builder<Simulator<'a, T, F>> for SimulatorBuilder<'a, T, F>\n", "file_path": "src/sim/seq.rs", "rank": 40, "score": 6.743827811737146 }, { "content": "//!\n\n//! // Mutate an individual, changing its state\n\n//! fn mutate(&self) -> MyPheno {\n\n//! MyPheno {\n\n//! x: self.x + 1,\n\n//! y: self.y - 1,\n\n//! }\n\n//! }\n\n//! }\n\n//! ```\n\n//!\n\n//! ## Creating and running a `Simulator`\n\n//!\n\n//! ```ignore\n\n//!\n\n//! use rsgenetic::sim::*;\n\n//! use rsgenetic::sim::seq::Simulator;\n\n//! use rsgenetic::sim::select::*;\n\n//!\n\n//! // (Assuming the above definition of `MyPheno` is in scope)\n", "file_path": "src/lib.rs", "rank": 41, "score": 6.738399835937505 }, { "content": "// file: stochastic.rs\n\n//\n\n// Copyright 2015-2017 The RsGenetic Developers\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// \thttp://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse pheno::{Fitness, Phenotype};\n\nuse super::*;\n\nuse rand::Rng;\n\n\n", "file_path": "src/sim/select/stochastic.rs", "rank": 42, "score": 6.597308277116825 }, { "content": "// file: tournament.rs\n\n//\n\n// Copyright 2015-2017 The RsGenetic Developers\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// \thttp://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse pheno::{Fitness, Phenotype};\n\nuse super::*;\n\nuse rand::Rng;\n\n\n", "file_path": "src/sim/select/tournament.rs", "rank": 43, "score": 6.597308277116825 }, { "content": "//!\n\n//! #[derive(Copy, Clone)]\n\n//! struct MyPheno {\n\n//! x: i32,\n\n//! y: i32,\n\n//! }\n\n//!\n\n//! impl Phenotype<i32> for MyPheno {\n\n//! // How fit is this individual?\n\n//! fn fitness(&self) -> i32 {\n\n//! TARGET - (self.x + self.y)\n\n//! }\n\n//!\n\n//! // Have two individuals create a new individual\n\n//! fn crossover(&self, other: &MyPheno) -> MyPheno {\n\n//! MyPheno {\n\n//! x: self.x,\n\n//! y: other.y,\n\n//! }\n\n//! }\n", "file_path": "src/lib.rs", "rank": 44, "score": 6.3434859333726425 }, { "content": "/// Runs several tournaments, and selects best performing phenotypes from each tournament.\n\n#[derive(Copy, Clone, Debug)]\n\npub struct TournamentSelector {\n\n count: usize,\n\n participants: usize,\n\n}\n\n\n\nimpl TournamentSelector {\n\n /// Create and return a tournament selector.\n\n ///\n\n /// Such a selector runs `count / 2` tournaments, each with `participants` participants.\n\n /// From each tournament, the best 2 phenotypes are selected, yielding\n\n /// `count` parents.\n\n ///\n\n /// * `count`: must be larger than zero, a multiple of two and less than the population size.\n\n /// * `participants`: must be larger than one and less than the population size.\n\n #[deprecated(note=\"The `TournamentSelector` requires at least 2 participants. This is not enforced\n\n by the `new` function. You should use `new_checked` instead.\",\n\n since=\"1.7.11\")]\n\n pub fn new(count: usize, participants: usize) -> TournamentSelector {\n", "file_path": "src/sim/select/tournament.rs", "rank": 45, "score": 6.225022872325465 }, { "content": " T: 'a + Phenotype<F>,\n\n F: Fitness,\n\n{\n\n sim: Simulator<'a, T, F>,\n\n}\n\n\n\nimpl<'a, T, F> SimulatorBuilder<'a, T, F>\n\nwhere\n\n T: Phenotype<F>,\n\n F: Fitness,\n\n{\n\n /// Set the selector of the resulting `Simulator`.\n\n ///\n\n /// Returns itself for chaining purposes.\n\n pub fn set_selector(mut self, sel: Box<Selector<T, F>>) -> Self {\n\n self.sim.selector = sel;\n\n self\n\n }\n\n\n\n /// Set the maximum number of iterations of the resulting `Simulator`.\n", "file_path": "src/sim/seq.rs", "rank": 46, "score": 6.108951100214021 }, { "content": "\n\n/// Selects best performing phenotypes from the population.\n\n#[derive(Clone, Copy, Debug)]\n\n#[deprecated(note=\"The `MaximizeSelector` has bad performance due to sorting. For better performance with potentially different results, \\\n\n use the `UnstableMaximizeSelector`.\",\n\n since=\"1.7.7\")]\n\npub struct MaximizeSelector {\n\n count: usize,\n\n}\n\n\n\nimpl MaximizeSelector {\n\n /// Create and return a maximizing selector.\n\n ///\n\n /// Such a selector selects only the `count` best performing phenotypes\n\n /// as parents.\n\n ///\n\n /// * `count`: must be larger than zero, a multiple of two and less than the population size.\n\n pub fn new(count: usize) -> MaximizeSelector {\n\n MaximizeSelector { count: count }\n\n }\n", "file_path": "src/sim/select/max.rs", "rank": 47, "score": 5.962393625423227 }, { "content": "/// Selects phenotypes at random, starting from a random index and taking equidistant jumps.\n\n///\n\n/// Commonly known as *Stochastic Universal Sampling*.\n\n#[derive(Clone, Copy, Debug)]\n\npub struct StochasticSelector {\n\n count: usize,\n\n}\n\n\n\nimpl StochasticSelector {\n\n /// Create and return a stochastic selector.\n\n ///\n\n /// Such a selector selects elements using stochastic universal sampling,\n\n /// yielding parents with low, medium and high fitness values. In total,\n\n /// `count` parents are selected.\n\n ///\n\n /// * `count`: must be larger than zero, a multiple of 2 and less than the population size.\n\n pub fn new(count: usize) -> StochasticSelector {\n\n StochasticSelector { count: count }\n\n }\n\n}\n", "file_path": "src/sim/select/stochastic.rs", "rank": 48, "score": 5.924080711946562 }, { "content": "/// Selects best performing phenotypes from the population.\n\n#[derive(Clone, Copy, Debug)]\n\npub struct UnstableMaximizeSelector {\n\n count: usize,\n\n}\n\n\n\nimpl UnstableMaximizeSelector {\n\n /// Create and return a maximizing selector with unstable parallel sorting.\n\n ///\n\n /// Such a selector selects only the `count` best performing phenotypes\n\n /// as parents.\n\n ///\n\n /// * `count`: must be larger than zero, a multiple of two and less than the population size.\n\n pub fn new(count: usize) -> UnstableMaximizeSelector {\n\n UnstableMaximizeSelector { count: count }\n\n }\n\n}\n\n\n\nimpl<T, F> Selector<T, F> for UnstableMaximizeSelector\n\nwhere\n", "file_path": "src/sim/select/max_unstable.rs", "rank": 49, "score": 5.13309965744843 }, { "content": "\n\n /// Reset the number of iterations to zero.\n\n pub fn reset(&mut self) {\n\n self.cur = 0;\n\n }\n\n\n\n /// Get the current number of iterations.\n\n pub fn get(&self) -> u64 {\n\n self.cur\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::IterLimit;\n\n\n\n #[test]\n\n fn test_iter_limit_reset() {\n\n let mut limit = IterLimit::new(5);\n\n for _ in 0..4 {\n", "file_path": "src/sim/iterlimit.rs", "rank": 50, "score": 5.015723805220119 }, { "content": "}\n\n\n\nimpl<T, F> Selector<T, F> for MaximizeSelector\n\nwhere\n\n T: Phenotype<F>,\n\n F: Fitness,\n\n{\n\n fn select<'a>(&self, population: &'a [T]) -> Result<Parents<&'a T>, String> {\n\n if self.count == 0 || self.count % 2 != 0 || self.count * 2 >= population.len() {\n\n return Err(format!(\n\n \"Invalid parameter `count`: {}. Should be larger than zero, a \\\n\n multiple of two and less than half the population size.\",\n\n self.count\n\n ));\n\n }\n\n\n\n let mut borrowed: Vec<&T> = population.iter().collect();\n\n borrowed.sort_by(|x, y| y.fitness().cmp(&x.fitness()));\n\n let mut index = 0;\n\n let mut result: Parents<&T> = Vec::new();\n", "file_path": "src/sim/select/max.rs", "rank": 51, "score": 4.748371297026048 }, { "content": "\n\n impl MyFitness {\n\n fn new(f: i64) -> MyFitness {\n\n MyFitness { f: f }\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_early_stopper_reset() {\n\n let mut stopper = EarlyStopper::new(MyFitness::new(10), 5);\n\n for _ in 0..4 {\n\n stopper.update(MyFitness::new(1));\n\n }\n\n assert_eq!(stopper.reached(), false);\n\n stopper.update(MyFitness::new(20));\n\n assert_eq!(stopper.reached(), false);\n\n }\n\n\n\n #[test]\n\n fn test_early_stopper_reached() {\n\n let mut stopper = EarlyStopper::new(MyFitness::new(10), 5);\n\n for _ in 0..5 {\n\n stopper.update(MyFitness::new(1));\n\n }\n\n assert!(stopper.reached());\n\n }\n\n}\n", "file_path": "src/sim/earlystopper.rs", "rank": 52, "score": 4.463501958348168 }, { "content": "use std::cmp;\n\n\n\n#[derive(Clone, Copy, Debug, PartialOrd, Ord, PartialEq, Eq)]\n\npub struct MyFitness {\n\n pub f: i64,\n\n}\n\n\n\nimpl Fitness for MyFitness {\n\n fn zero() -> Self {\n\n MyFitness { f: 0 }\n\n }\n\n\n\n fn abs_diff(&self, other: &Self) -> Self {\n\n MyFitness {\n\n f: (self.f - other.f).abs(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy)]\n", "file_path": "src/test.rs", "rank": 53, "score": 4.41906919936639 }, { "content": " population: &'a mut Vec<T>,\n\n iter_limit: IterLimit,\n\n selector: Box<Selector<T, F>>,\n\n earlystopper: Option<EarlyStopper<F>>,\n\n duration: Option<NanoSecond>,\n\n error: Option<String>,\n\n phantom: PhantomData<&'a T>,\n\n}\n\n\n\nimpl<'a, T, F> Simulation<'a, T, F> for Simulator<'a, T, F>\n\nwhere\n\n T: Phenotype<F>,\n\n F: Fitness,\n\n{\n\n type B = SimulatorBuilder<'a, T, F>;\n\n\n\n /// Create builder.\n\n #[allow(deprecated)]\n\n fn builder(population: &'a mut Vec<T>) -> SimulatorBuilder<'a, T, F> {\n\n SimulatorBuilder {\n", "file_path": "src/sim/seq.rs", "rank": 54, "score": 4.24223077913617 }, { "content": " max: u64,\n\n /// Current number of iterations.\n\n cur: u64,\n\n}\n\n\n\nimpl IterLimit {\n\n /// Create a new iteration limiter.\n\n pub fn new(max: u64) -> IterLimit {\n\n IterLimit { max: max, cur: 0 }\n\n }\n\n\n\n /// Increment the number of iterations.\n\n pub fn inc(&mut self) {\n\n self.cur += 1;\n\n }\n\n\n\n /// Check if the maximum has been reached.\n\n pub fn reached(&self) -> bool {\n\n self.cur >= self.max\n\n }\n", "file_path": "src/sim/iterlimit.rs", "rank": 55, "score": 4.233965615280045 }, { "content": " let mut population: Vec<Test> = (0..100).map(|i| Test { f: i }).collect();\n\n let mut s = seq::Simulator::builder(&mut population)\n\n .set_selector(Box::new(selector))\n\n .build();\n\n s.kill_off(10);\n\n assert_eq!(s.population.len(), 90);\n\n }\n\n\n\n #[test]\n\n fn test_max_iters() {\n\n let selector = MaximizeSelector::new(2);\n\n let mut population: Vec<Test> = (0..100).map(|i| Test { f: i }).collect();\n\n let mut s = seq::Simulator::builder(&mut population)\n\n .set_selector(Box::new(selector))\n\n .set_max_iters(2)\n\n .build();\n\n s.run();\n\n assert!(s.iterations() <= 2);\n\n }\n\n\n", "file_path": "src/sim/seq.rs", "rank": 56, "score": 3.4960352325814177 }, { "content": " #[test]\n\n fn test_early_stopping() {\n\n let selector = MaximizeSelector::new(2);\n\n let mut population: Vec<Test> = (0..100).map(|_| Test { f: 0 }).collect();\n\n let mut s = seq::Simulator::builder(&mut population)\n\n .set_selector(Box::new(selector))\n\n .set_early_stop(MyFitness { f: 10 }, 5)\n\n .set_max_iters(10)\n\n .build();\n\n s.run();\n\n assert!(s.iterations() <= 5);\n\n }\n\n\n\n #[test]\n\n fn test_selector_error_propagate() {\n\n let selector = MaximizeSelector::new(0);\n\n let mut population: Vec<Test> = (0..100).map(|i| Test { f: i }).collect();\n\n let mut s = seq::Simulator::builder(&mut population)\n\n .set_selector(Box::new(selector))\n\n .build();\n", "file_path": "src/sim/seq.rs", "rank": 57, "score": 3.427859818529518 }, { "content": "where\n\n T: Phenotype<F>,\n\n F: Fitness,\n\n{\n\n fn build(self) -> Simulator<'a, T, F> {\n\n self.sim\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\n#[allow(deprecated)]\n\nmod tests {\n\n use sim::*;\n\n use sim::select::*;\n\n use test::Test;\n\n use test::MyFitness;\n\n\n\n #[test]\n\n fn test_kill_off_count() {\n\n let selector = MaximizeSelector::new(2);\n", "file_path": "src/sim/seq.rs", "rank": 58, "score": 3.2458257625050573 }, { "content": "//! Each of the selection algorithms provided has a parameter `count`, which indicates the\n\n//! number of selected parents.\n\n\n\nmod max;\n\nmod max_unstable;\n\nmod tournament;\n\nmod stochastic;\n\n\n\nuse pheno::{Fitness, Phenotype};\n\nuse std::fmt::Debug;\n\n\n\n#[allow(deprecated)]\n\npub use self::max::MaximizeSelector;\n\npub use self::max_unstable::UnstableMaximizeSelector;\n\npub use self::tournament::TournamentSelector;\n\npub use self::stochastic::StochasticSelector;\n\n\n\n/// `Parents` come in a `Vec` of two `T`'s.\n\npub type Parents<T> = Vec<(T, T)>;\n\n\n\n/// A `Selector` can select `Parents` for a new iteration of a `Simulation`.\n", "file_path": "src/sim/select/mod.rs", "rank": 59, "score": 3.188644435549688 }, { "content": " /// * `StepResult::Failure` when an error occurred. Check the result of `get()`.\n\n /// * `StepResult::Done` on convergence or reaching the maximum iterations.\n\n ///\n\n /// Be careful to check for failures when running `step()` in a loop,\n\n /// to avoid infinite loops. To run the simulation until convergence or until\n\n /// reaching a maximum number of iterations, consider using `run()` instead:\n\n #[deprecated(note=\"To encourage checking the `StepResult` while maintaining backwards \\\n\n compatibility, this function has been deprecated in favour of `checked_step`.\",\n\n since=\"1.7.0\")]\n\n fn step(&mut self) -> StepResult;\n\n /// Make one step in the simulation. This function returns a `StepResult`:\n\n ///\n\n /// * `StepResult::Success` when a step was successful, but the simulation is not done.\n\n /// * `StepResult::Failure` when an error occurred. Check the result of `get()`.\n\n /// * `StepResult::Done` on convergence or reaching the maximum iterations.\n\n ///\n\n /// Unlike `step`, this function will panic if it is called after a failure.\n\n /// To avoid this panic, match `StepResult` for `StepResult::Failure` and exit gracefully.\n\n fn checked_step(&mut self) -> StepResult;\n\n /// Get the result of the latest step or of a complete run.\n", "file_path": "src/sim/mod.rs", "rank": 60, "score": 3.182580981245344 }, { "content": " index += 2;\n\n }\n\n Ok(result)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use sim::select::*;\n\n use pheno::*;\n\n use test::Test;\n\n\n\n #[test]\n\n fn test_count_zero() {\n\n let selector = UnstableMaximizeSelector::new(0);\n\n let population: Vec<Test> = (0..100).map(|i| Test { f: i }).collect();\n\n assert!(selector.select(&population).is_err());\n\n }\n\n\n\n #[test]\n", "file_path": "src/sim/select/max_unstable.rs", "rank": 61, "score": 3.038358758869565 }, { "content": " while index < self.count {\n\n result.push((borrowed[index], borrowed[index + 1]));\n\n index += 2;\n\n }\n\n Ok(result)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use sim::select::*;\n\n use pheno::*;\n\n use test::Test;\n\n\n\n #[test]\n\n fn test_count_zero() {\n\n let selector = MaximizeSelector::new(0);\n\n let population: Vec<Test> = (0..100).map(|i| Test { f: i }).collect();\n\n assert!(selector.select(&population).is_err());\n\n }\n", "file_path": "src/sim/select/max.rs", "rank": 62, "score": 2.9565504993415748 }, { "content": "// file: earlystopper.rs\n\n//\n\n// Copyright 2015-2017 The RsGenetic Developers\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// \thttp://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse super::iterlimit::*;\n\nuse pheno::Fitness;\n\n\n\n/// Used for early stopping.\n", "file_path": "src/sim/earlystopper.rs", "rank": 63, "score": 2.9415024255436935 }, { "content": "// file: max.rs\n\n//\n\n// Copyright 2015-2017 The RsGenetic Developers\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// \thttp://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse pheno::{Fitness, Phenotype};\n\nuse super::*;\n\nuse rayon::prelude::*;\n\n\n", "file_path": "src/sim/select/max_unstable.rs", "rank": 64, "score": 2.9415024255436935 }, { "content": " ///\n\n /// This function will either return the best performing individual,\n\n /// or an error string indicating what went wrong.\n\n fn get(&'a self) -> SimResult<'a, T>;\n\n /// Get the number of nanoseconds spent running, or `None` in case of an overflow.\n\n ///\n\n /// When `Self` is `par::Simulator`, i.e. a parallel simulator is used,\n\n /// the duration is the average duration of all child simulators.\n\n fn time(&self) -> Option<NanoSecond>;\n\n /// Get the number of iterations the `Simulator` has executed so far.\n\n ///\n\n /// When `Self` is `par::Simulator`, i.e. a parallel simulator is used,\n\n /// this returns the number of iterations made by the parallel simulator itself.\n\n fn iterations(&self) -> u64;\n\n /// Get the current population.\n\n ///\n\n /// Using this function clones the population out of the `Simulation`, so use\n\n /// it sparingly.\n\n fn population(&self) -> Vec<T>;\n\n}\n", "file_path": "src/sim/mod.rs", "rank": 65, "score": 2.8741338972790498 }, { "content": " T: Phenotype<F>,\n\n F: Fitness,\n\n T: Send,\n\n T: Sync,\n\n{\n\n fn select<'a>(&self, population: &'a [T]) -> Result<Parents<&'a T>, String> {\n\n if self.count == 0 || self.count % 2 != 0 || self.count * 2 >= population.len() {\n\n return Err(format!(\n\n \"Invalid parameter `count`: {}. Should be larger than zero, a \\\n\n multiple of two and less than half the population size.\",\n\n self.count\n\n ));\n\n }\n\n\n\n let mut borrowed: Vec<&T> = population.iter().collect();\n\n borrowed.par_sort_unstable_by(|x, y| y.fitness().cmp(&x.fitness()));\n\n let mut index = 0;\n\n let mut result: Parents<&T> = Vec::new();\n\n while index < self.count {\n\n result.push((borrowed[index], borrowed[index + 1]));\n", "file_path": "src/sim/select/max_unstable.rs", "rank": 66, "score": 2.8600821915749055 }, { "content": " result.push((\n\n &population[i],\n\n &population[(i + ratio - 1) % population.len()],\n\n ));\n\n i += ratio - 1;\n\n i %= population.len();\n\n selected += 2;\n\n }\n\n Ok(result)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use sim::select::*;\n\n use test::Test;\n\n\n\n #[test]\n\n fn test_count_zero() {\n\n let selector = StochasticSelector::new(0);\n", "file_path": "src/sim/select/stochastic.rs", "rank": 67, "score": 2.796899784654765 }, { "content": " }\n\n };\n\n // Create children from the selected parents and mutate them.\n\n children = parents\n\n .iter()\n\n .map(|&(a, b)| a.crossover(b).mutate())\n\n .collect();\n\n }\n\n // Kill off parts of the population at random to make room for the children\n\n self.kill_off(children.len());\n\n self.population.append(&mut children);\n\n\n\n if let Some(ref mut stopper) = self.earlystopper {\n\n let highest_fitness = self.population\n\n .iter()\n\n .max_by_key(|x| x.fitness())\n\n .unwrap()\n\n .fitness();\n\n stopper.update(highest_fitness);\n\n }\n", "file_path": "src/sim/seq.rs", "rank": 68, "score": 2.7895049035022406 }, { "content": " s.run();\n\n assert!(s.get().is_err());\n\n }\n\n\n\n #[test]\n\n fn test_population_get() {\n\n let selector = MaximizeSelector::new(0);\n\n let mut population: Vec<Test> = (0..100).map(|i| Test { f: i }).collect();\n\n let population_len = population.len();\n\n let s = seq::Simulator::builder(&mut population)\n\n .set_selector(Box::new(selector))\n\n .build();\n\n let gotten_population = s.population();\n\n assert!(gotten_population.len() == population_len);\n\n }\n\n}\n", "file_path": "src/sim/seq.rs", "rank": 69, "score": 2.7730028631360417 }, { "content": "// file: max.rs\n\n//\n\n// Copyright 2015-2017 The RsGenetic Developers\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// \thttp://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n#![allow(deprecated)]\n\n\n\nuse pheno::{Fitness, Phenotype};\n\nuse super::*;\n", "file_path": "src/sim/select/max.rs", "rank": 70, "score": 2.713052681902802 }, { "content": "// file: test.rs\n\n//\n\n// Copyright 2015-2017 The RsGenetic Developers\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// \thttp://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n// This is a private module containing code used in\n\n// several tests across the library.\n\n\n\nuse pheno::*;\n", "file_path": "src/test.rs", "rank": 71, "score": 2.701926317557729 }, { "content": " self.partial_cmp(other).unwrap_or(Ordering::Equal)\n\n }\n\n}\n\n\n\nimpl Fitness for MyFitness {\n\n fn zero() -> MyFitness {\n\n MyFitness { f: 0.0 }\n\n }\n\n\n\n fn abs_diff(&self, other: &MyFitness) -> MyFitness {\n\n MyFitness {\n\n f: (self.f - other.f).abs(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/max_parabole.rs", "rank": 72, "score": 2.681461163321555 }, { "content": " self.partial_cmp(other).unwrap_or(Ordering::Equal)\n\n }\n\n}\n\n\n\nimpl Fitness for MyFitness {\n\n fn zero() -> MyFitness {\n\n MyFitness { f: 0.0 }\n\n }\n\n\n\n fn abs_diff(&self, other: &MyFitness) -> MyFitness {\n\n MyFitness {\n\n f: (self.f - other.f).abs(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/max_parabole_steps.rs", "rank": 73, "score": 2.681461163321555 }, { "content": "/// A `Builder` can create new instances of an object.\n\n/// For this library, only `Simulation` objects use this `Builder`.\n\npub trait Builder<T: ?Sized> {\n\n /// Return the result.\n\n fn build(self) -> T\n\n where\n\n T: Sized;\n\n}\n\n\n\n/// Simulation run time is defined in nanoseconds.\n\npub type NanoSecond = i64;\n\n/// The result of a simulation, containing the best phenotype\n\n/// or an error message.\n\npub type SimResult<'a, T> = Result<&'a T, &'a str>;\n\n\n\n/// The result of running a single step.\n\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\n\npub enum StepResult {\n\n /// The step was successful, but the simulation has not finished.\n\n Success,\n\n /// The step was not successful.\n\n Failure,\n", "file_path": "src/sim/mod.rs", "rank": 74, "score": 2.6703863216329298 }, { "content": "pub struct Test {\n\n pub f: i64,\n\n}\n\n\n\nimpl Phenotype<MyFitness> for Test {\n\n fn fitness(&self) -> MyFitness {\n\n MyFitness { f: self.f.abs() }\n\n }\n\n\n\n fn crossover(&self, t: &Test) -> Test {\n\n Test {\n\n f: cmp::min(self.f, t.f),\n\n }\n\n }\n\n\n\n fn mutate(&self) -> Test {\n\n if self.f < 0 {\n\n Test { f: self.f + 1 }\n\n } else if self.f > 0 {\n\n Test { f: self.f - 1 }\n\n } else {\n\n *self\n\n }\n\n }\n\n}\n", "file_path": "src/test.rs", "rank": 75, "score": 2.537667664644937 }, { "content": "// file: mod.rs\n\n//\n\n// Copyright 2015-2017 The RsGenetic Developers\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// \thttp://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse pheno::{Fitness, Phenotype};\n\n\n\npub mod seq;\n\npub mod select;\n\npub mod types;\n\nmod iterlimit;\n\nmod earlystopper;\n\n\n\n/// A `Builder` can create new instances of an object.\n\n/// For this library, only `Simulation` objects use this `Builder`.\n", "file_path": "src/sim/mod.rs", "rank": 76, "score": 2.5355515443091 }, { "content": " TournamentSelector {\n\n count: count,\n\n participants: participants,\n\n }\n\n }\n\n\n\n /// Create and return a tournament selector.\n\n ///\n\n /// Such a selector runs `count / 2` tournaments, each with `participants` participants.\n\n /// From each tournament, the best 2 phenotypes are selected, yielding\n\n /// `count` parents.\n\n ///\n\n /// * `count`: must be larger than zero, a multiple of two and less than the population size.\n\n /// * `participants`: must be larger than one and less than the population size.\n\n pub fn new_checked(count: usize, participants: usize) -> Result<TournamentSelector, String> {\n\n if count == 0 || count % 2 != 0 || participants < 2 {\n\n Err(String::from(\"count must be larger than zero and a multiple of two; participants must be larger than one\",),)\n\n } else {\n\n Ok(TournamentSelector {\n\n count: count,\n", "file_path": "src/sim/select/tournament.rs", "rank": 77, "score": 2.5225789247519224 }, { "content": "}\n\n\n\n#[cfg(test)]\n\n#[allow(deprecated)]\n\nmod tests {\n\n use sim::select::*;\n\n use test::Test;\n\n\n\n #[test]\n\n fn test_count_zero() {\n\n let selector = TournamentSelector::new(0, 1);\n\n let population: Vec<Test> = (0..100).map(|i| Test { f: i }).collect();\n\n assert!(selector.select(&population).is_err());\n\n }\n\n\n\n #[test]\n\n fn test_participants_zero() {\n\n let selector = TournamentSelector::new(2, 0);\n\n let population: Vec<Test> = (0..100).map(|i| Test { f: i }).collect();\n\n assert!(selector.select(&population).is_err());\n", "file_path": "src/sim/select/tournament.rs", "rank": 78, "score": 2.495030346410954 }, { "content": "// file: seq.rs\n\n//\n\n// Copyright 2015-2017 The RsGenetic Developers\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// \thttp://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! Contains a sequential implementation of `::sim::Simulation`,\n\n//! called a `Simulator`.\n\n//!\n\n//! To use a `Simulator`, you need a `SimulatorBuilder`, which you can\n", "file_path": "src/sim/seq.rs", "rank": 79, "score": 2.2753063726801925 }, { "content": " match self.error {\n\n Some(ref e) => Err(e),\n\n None => Ok(self.population.iter().max_by_key(|x| x.fitness()).unwrap()),\n\n }\n\n }\n\n\n\n fn iterations(&self) -> u64 {\n\n self.iter_limit.get()\n\n }\n\n\n\n fn time(&self) -> Option<NanoSecond> {\n\n self.duration\n\n }\n\n\n\n fn population(&self) -> Vec<T> {\n\n self.population.clone()\n\n }\n\n}\n\n\n\nimpl<'a, T, F> Simulator<'a, T, F>\n", "file_path": "src/sim/seq.rs", "rank": 80, "score": 2.2698784828424214 }, { "content": "// file: mod.rs\n\n//\n\n// Copyright 2015-2017 The RsGenetic Developers\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// \thttp://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! The selection module provides a trait that can be implemented\n\n//! to implement new selection algorithms. This module also provides a couple\n\n//! of useful selection algorithms.\n\n//!\n", "file_path": "src/sim/select/mod.rs", "rank": 81, "score": 2.251974683050576 }, { "content": "/// A `Fitness` value is used to determine the quality of a `Phenotype`.\n\n/// `Fitness` values should have an ordering.\n\n///\n\n/// **Make sure the following statement holds:**\n\n/// A `Phenotype` with a `Fitness` value of `f1` performs better than\n\n/// another `Phenotype` with a `Fitness` value of `f2` iff `f1 > f2`.\n\npub trait Fitness: Ord + Eq {\n\n /// Get the zero value of this `Fitness` value.\n\n /// The internal value should be 0.\n\n fn zero() -> Self;\n\n /// Get the absolute difference between two `Fitness` values.\n\n fn abs_diff(&self, other: &Self) -> Self;\n\n}\n\n\n", "file_path": "src/pheno.rs", "rank": 82, "score": 2.2129440859642497 }, { "content": "// file: max_parabole_steps.rs\n\n//\n\n// Copyright 2015-2017 The RsGenetic Developers\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// \thttp://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! This simple example shows how to use a simulator\n\n//! that finds the maximum of the function f(x) = 10-(x+3)^2 (which is (-3,10)).\n\n//! This example is the same as the `max_parabole` example, but it runs in steps\n\n//! and prints out intermediate results.\n", "file_path": "examples/max_parabole_steps.rs", "rank": 83, "score": 2.2067179571050257 }, { "content": " if self.error.is_some() {\n\n panic!(\"Attempt to step a Simulator after an error!\")\n\n } else {\n\n self.step()\n\n }\n\n }\n\n\n\n #[allow(deprecated)]\n\n fn run(&mut self) -> RunResult {\n\n // Loop until Failure or Done.\n\n loop {\n\n match self.step() {\n\n StepResult::Success => {}\n\n StepResult::Failure => return RunResult::Failure,\n\n StepResult::Done => return RunResult::Done,\n\n }\n\n }\n\n }\n\n\n\n fn get(&'a self) -> SimResult<'a, T> {\n", "file_path": "src/sim/seq.rs", "rank": 84, "score": 2.1454980043231258 }, { "content": " limit.inc();\n\n }\n\n assert_eq!(limit.reached(), false);\n\n limit.reset();\n\n assert_eq!(limit.reached(), false);\n\n }\n\n\n\n #[test]\n\n fn test_iter_limit_reached() {\n\n let mut limit = IterLimit::new(5);\n\n for _ in 0..5 {\n\n limit.inc();\n\n }\n\n assert!(limit.reached());\n\n for _ in 0..10 {\n\n limit.inc();\n\n }\n\n assert!(limit.reached());\n\n assert_eq!(limit.get(), 15);\n\n }\n\n}\n", "file_path": "src/sim/iterlimit.rs", "rank": 85, "score": 2.1454980043231258 }, { "content": "\n\n self.iter_limit.inc();\n\n self.duration = match self.duration {\n\n Some(x) => {\n\n let elapsed = time_start.elapsed();\n\n let y = elapsed.as_secs() as NanoSecond * 1_000_000_000\n\n + u64::from(elapsed.subsec_nanos()) as NanoSecond;\n\n Some(x + y)\n\n }\n\n None => None,\n\n };\n\n\n\n StepResult::Success // Not done yet, but successful\n\n } else {\n\n StepResult::Done\n\n }\n\n }\n\n\n\n #[allow(deprecated)]\n\n fn checked_step(&mut self) -> StepResult {\n", "file_path": "src/sim/seq.rs", "rank": 86, "score": 2.1454980043231258 }, { "content": " );\n\n return StepResult::Failure;\n\n }\n\n\n\n let should_stop = match self.earlystopper {\n\n Some(ref x) => self.iter_limit.reached() || x.reached(),\n\n None => self.iter_limit.reached(),\n\n };\n\n\n\n if !should_stop {\n\n time_start = Instant::now();\n\n\n\n let mut children: Vec<T>;\n\n {\n\n // Perform selection\n\n let parents = match self.selector.select(self.population) {\n\n Ok(parents) => parents,\n\n Err(e) => {\n\n self.error = Some(e);\n\n return StepResult::Failure;\n", "file_path": "src/sim/seq.rs", "rank": 87, "score": 2.1260359396975534 }, { "content": "//! ## Available Selection Types\n\n//!\n\n//! There are currently four selection types available:\n\n//!\n\n//! * Maximize\n\n//! * Tournament\n\n//! * Stochastic\n\n//!\n\n//! There is a short explanation for each of these below. For more information, look at the\n\n//! documentation of individual selectors.\n\n//!\n\n//! ### Maximize\n\n//!\n\n//! Maximize takes 1 parameter: the count. This is half the number of parents\n\n//! that will be selected. Selection happens by taking the top `count` individuals,\n\n//! ranked by fitness. The resulting number of parents is `count`.\n\n//!\n\n//! ### Tournament\n\n//!\n\n//! Tournament takes 2 parameters: the number of tournaments (`count`) and `participators`,\n", "file_path": "src/lib.rs", "rank": 88, "score": 2.125643124539526 }, { "content": "// file: pheno.rs\n\n//\n\n// Copyright 2015-2017 The RsGenetic Developers\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// \thttp://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n/// A `Fitness` value is used to determine the quality of a `Phenotype`.\n\n/// `Fitness` values should have an ordering.\n\n///\n\n/// **Make sure the following statement holds:**\n\n/// A `Phenotype` with a `Fitness` value of `f1` performs better than\n\n/// another `Phenotype` with a `Fitness` value of `f2` iff `f1 > f2`.\n", "file_path": "src/pheno.rs", "rank": 89, "score": 2.121450648097877 }, { "content": " sim: Simulator {\n\n population: population,\n\n iter_limit: IterLimit::new(100),\n\n selector: Box::new(MaximizeSelector::new(3)),\n\n earlystopper: None,\n\n duration: Some(0),\n\n error: None,\n\n phantom: PhantomData::default(),\n\n },\n\n }\n\n }\n\n\n\n fn step(&mut self) -> StepResult {\n\n let time_start;\n\n\n\n if self.population.is_empty() {\n\n self.error = Some(\n\n \"Tried to run a simulator without a population, or the \\\n\n population was empty.\"\n\n .to_string(),\n", "file_path": "src/sim/seq.rs", "rank": 90, "score": 2.0881521916630277 }, { "content": " participants: participants,\n\n })\n\n }\n\n }\n\n}\n\n\n\nimpl<T, F> Selector<T, F> for TournamentSelector\n\nwhere\n\n T: Phenotype<F>,\n\n F: Fitness,\n\n{\n\n fn select<'a>(&self, population: &'a [T]) -> Result<Parents<&'a T>, String> {\n\n if self.count == 0 || self.count % 2 != 0 || self.count * 2 >= population.len() {\n\n return Err(format!(\n\n \"Invalid parameter `count`: {}. Should be larger than zero, a \\\n\n multiple of two and less than half the population size.\",\n\n self.count\n\n ));\n\n }\n\n if self.participants == 0 || self.participants >= population.len() {\n", "file_path": "src/sim/select/tournament.rs", "rank": 91, "score": 2.0355482884677736 }, { "content": "#[derive(Copy, Clone, Debug)]\n\npub struct EarlyStopper<F: Fitness> {\n\n /// Minimum difference required for early stopping.\n\n delta: F,\n\n /// Previously recorded fitness value.\n\n previous: F,\n\n /// The number of iterations before stopping early.\n\n iter_limit: IterLimit,\n\n}\n\n\n\nimpl<F: Fitness> EarlyStopper<F> {\n\n /// Create a new `EarlyStopper`.\n\n pub fn new(delta: F, n_iters: u64) -> EarlyStopper<F> {\n\n EarlyStopper {\n\n delta: delta,\n\n previous: F::zero(),\n\n iter_limit: IterLimit::new(n_iters),\n\n }\n\n }\n\n\n", "file_path": "src/sim/earlystopper.rs", "rank": 92, "score": 1.775834081743425 }, { "content": "// file: iterlimit.rs\n\n//\n\n// Copyright 2015-2017 The RsGenetic Developers\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// \thttp://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n/// An iteration limiter.\n\n#[derive(Copy, Clone, Debug)]\n\npub struct IterLimit {\n\n /// Maximum number of iterations allowed.\n", "file_path": "src/sim/iterlimit.rs", "rank": 93, "score": 1.5886326033788496 }, { "content": "// file: lib.rs\n\n//\n\n// Copyright 2015-2017 The RsGenetic Developers\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// \thttp://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! # `RsGenetic`\n\n//!\n\n//! `RsGenetic` provides a simple framework for genetic algorithms.\n\n//! You need to provide the definition of a Phenotype (also known as an Individual),\n", "file_path": "src/lib.rs", "rank": 94, "score": 1.5548893450233092 }, { "content": "## Note\n\n\n\nThis library is currently in maintenance mode. There have been some indications that the API\n\nneeds an update to be more flexible, which would require an incrementation of the major version number (#23, #30).\n\nUnfortunately, I currently do not have the time to implement such a redesign. I will however continue to reply to issues\n\nand merge pull requests, but features might not be implemented by me, depending on their size.\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n * Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)\n\n * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\n### Contribution\n\n\n\nContributions are always welcome. Take a look at the issues for any enhancements that need to be\n\ndone or bugs that need to be fixed. If you encounter any bugs while using the library, feel free to\n\nopen an issue and/or fix the bug, and submit pull requests.\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted\n\nfor inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any\n\nadditional terms or conditions.\n", "file_path": "README.md", "rank": 95, "score": 1.23906889579718 } ]
Rust
src/bin/cmd/wallet_tests.rs
icook/grin
5915580ab372dd85c0cc615d9df1c9aa6329f0f0
#[cfg(test)] mod wallet_tests { use chrono; use clap; use grin_util as util; use grin_wallet; use serde; use grin_wallet::test_framework::{self, LocalWalletClient, WalletProxy}; use clap::{App, ArgMatches}; use grin_util::Mutex; use std::sync::Arc; use std::thread; use std::time::Duration; use std::{env, fs}; use grin_config::GlobalWalletConfig; use grin_core::global; use grin_core::global::ChainTypes; use grin_keychain::ExtKeychain; use grin_wallet::{LMDBBackend, WalletBackend, WalletConfig, WalletInst, WalletSeed}; use super::super::wallet_args; fn clean_output_dir(test_dir: &str) { let _ = fs::remove_dir_all(test_dir); } fn setup(test_dir: &str) { util::init_test_logger(); clean_output_dir(test_dir); global::set_mining_mode(ChainTypes::AutomatedTesting); } pub fn config_command_wallet( dir_name: &str, wallet_name: &str, ) -> Result<(), grin_wallet::Error> { let mut current_dir; let mut default_config = GlobalWalletConfig::default(); current_dir = env::current_dir().unwrap_or_else(|e| { panic!("Error creating config file: {}", e); }); current_dir.push(dir_name); current_dir.push(wallet_name); let _ = fs::create_dir_all(current_dir.clone()); let mut config_file_name = current_dir.clone(); config_file_name.push("grin-wallet.toml"); if config_file_name.exists() { return Err(grin_wallet::ErrorKind::ArgumentError( "grin-wallet.toml already exists in the target directory. Please remove it first" .to_owned(), ))?; } default_config.update_paths(&current_dir); default_config .write_to_file(config_file_name.to_str().unwrap()) .unwrap_or_else(|e| { panic!("Error creating config file: {}", e); }); println!( "File {} configured and created", config_file_name.to_str().unwrap(), ); Ok(()) } pub fn initial_setup_wallet(dir_name: &str, wallet_name: &str) -> WalletConfig { let mut current_dir; current_dir = env::current_dir().unwrap_or_else(|e| { panic!("Error creating config file: {}", e); }); current_dir.push(dir_name); current_dir.push(wallet_name); let _ = fs::create_dir_all(current_dir.clone()); let mut config_file_name = current_dir.clone(); config_file_name.push("grin-wallet.toml"); GlobalWalletConfig::new(config_file_name.to_str().unwrap()) .unwrap() .members .unwrap() .wallet } fn get_wallet_subcommand<'a>( wallet_dir: &str, wallet_name: &str, args: ArgMatches<'a>, ) -> ArgMatches<'a> { match args.subcommand() { ("wallet", Some(wallet_args)) => { if let ("init", Some(init_args)) = wallet_args.subcommand() { if init_args.is_present("here") { let _ = config_command_wallet(wallet_dir, wallet_name); } } wallet_args.to_owned() } _ => ArgMatches::new(), } } fn instantiate_wallet( mut wallet_config: WalletConfig, node_client: LocalWalletClient, passphrase: &str, account: &str, ) -> Result<Arc<Mutex<WalletInst<LocalWalletClient, ExtKeychain>>>, grin_wallet::Error> { wallet_config.chain_type = None; let _ = WalletSeed::from_file(&wallet_config, passphrase)?; let mut db_wallet = LMDBBackend::new(wallet_config.clone(), passphrase, node_client)?; db_wallet.set_parent_key_id_by_name(account)?; info!("Using LMDB Backend for wallet"); Ok(Arc::new(Mutex::new(db_wallet))) } fn execute_command( app: &App, test_dir: &str, wallet_name: &str, client: &LocalWalletClient, arg_vec: Vec<&str>, ) -> Result<String, grin_wallet::Error> { let args = app.clone().get_matches_from(arg_vec); let args = get_wallet_subcommand(test_dir, wallet_name, args.clone()); let mut config = initial_setup_wallet(test_dir, wallet_name); config.chain_type = None; wallet_args::wallet_command(&args, config.clone(), client.clone()) } fn command_line_test_impl(test_dir: &str) -> Result<(), grin_wallet::Error> { setup(test_dir); let mut wallet_proxy: WalletProxy<LocalWalletClient, ExtKeychain> = WalletProxy::new(test_dir); let chain = wallet_proxy.chain.clone(); let yml = load_yaml!("../grin.yml"); let app = App::from_yaml(yml); let arg_vec = vec!["grin", "wallet", "-p", "password", "init", "-h"]; let client1 = LocalWalletClient::new("wallet1", wallet_proxy.tx.clone()); execute_command(&app, test_dir, "wallet1", &client1, arg_vec.clone())?; assert!(execute_command(&app, test_dir, "wallet1", &client1, arg_vec.clone()).is_err()); let client1 = LocalWalletClient::new("wallet1", wallet_proxy.tx.clone()); let config1 = initial_setup_wallet(test_dir, "wallet1"); let wallet1 = instantiate_wallet(config1.clone(), client1.clone(), "password", "default")?; wallet_proxy.add_wallet("wallet1", client1.get_send_instance(), wallet1.clone()); let client2 = LocalWalletClient::new("wallet2", wallet_proxy.tx.clone()); execute_command(&app, test_dir, "wallet2", &client2, arg_vec.clone())?; let config2 = initial_setup_wallet(test_dir, "wallet2"); let wallet2 = instantiate_wallet(config2.clone(), client2.clone(), "password", "default")?; wallet_proxy.add_wallet("wallet2", client2.get_send_instance(), wallet2.clone()); thread::spawn(move || { if let Err(e) = wallet_proxy.run() { error!("Wallet Proxy error: {}", e); } }); let arg_vec = vec![ "grin", "wallet", "-p", "password", "account", "-c", "mining", ]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; let arg_vec = vec![ "grin", "wallet", "-p", "password", "account", "-c", "account_1", ]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; let arg_vec = vec![ "grin", "wallet", "-p", "password", "account", "-c", "account_1", ]; execute_command(&app, test_dir, "wallet2", &client2, arg_vec.clone())?; assert!(execute_command(&app, test_dir, "wallet2", &client2, arg_vec).is_err()); let arg_vec = vec![ "grin", "wallet", "-p", "password", "account", "-c", "account_2", ]; execute_command(&app, test_dir, "wallet2", &client2, arg_vec)?; let arg_vec = vec!["grin", "wallet", "-p", "password", "account"]; execute_command(&app, test_dir, "wallet2", &client2, arg_vec)?; let arg_vec = vec!["grin", "wallet", "-p", "password", "account"]; execute_command(&app, test_dir, "wallet2", &client2, arg_vec)?; let wallet1 = instantiate_wallet(config1.clone(), client1.clone(), "password", "default")?; grin_wallet::controller::owner_single_use(wallet1.clone(), |api| { api.set_active_account("mining")?; Ok(()) })?; let mut bh = 10u64; let _ = test_framework::award_blocks_to_wallet(&chain, wallet1.clone(), bh as usize); let arg_vec = vec!["grin", "wallet", "-p", "password", "-a", "mining", "info"]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; let file_name = format!("{}/tx1.part_tx", test_dir); let response_file_name = format!("{}/tx1.part_tx.response", test_dir); let arg_vec = vec![ "grin", "wallet", "-p", "password", "-a", "mining", "send", "-m", "file", "-d", &file_name, "-g", "Love, Yeast", "10", ]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; let arg_vec = vec![ "grin", "wallet", "-p", "password", "-a", "account_1", "receive", "-i", &file_name, "-g", "Thanks, Yeast!", ]; execute_command(&app, test_dir, "wallet2", &client2, arg_vec.clone())?; assert!(execute_command(&app, test_dir, "wallet2", &client2, arg_vec).is_err()); let arg_vec = vec![ "grin", "wallet", "-p", "password", "finalize", "-i", &response_file_name, ]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; bh += 1; let wallet1 = instantiate_wallet(config1.clone(), client1.clone(), "password", "default")?; grin_wallet::controller::owner_single_use(wallet1.clone(), |api| { api.set_active_account("mining")?; let (refreshed, txs) = api.retrieve_txs(true, None, None)?; assert!(refreshed); assert_eq!(txs.len(), bh as usize); Ok(()) })?; let _ = test_framework::award_blocks_to_wallet(&chain, wallet1.clone(), 10); bh += 10; let arg_vec = vec!["grin", "wallet", "-p", "password", "-a", "mining", "info"]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; let arg_vec = vec![ "grin", "wallet", "-p", "password", "-a", "account_1", "info", ]; execute_command(&app, test_dir, "wallet2", &client1, arg_vec)?; let wallet2 = instantiate_wallet(config2.clone(), client2.clone(), "password", "default")?; grin_wallet::controller::owner_single_use(wallet2.clone(), |api| { api.set_active_account("account_1")?; let (_, wallet1_info) = api.retrieve_summary_info(true, 1)?; assert_eq!(wallet1_info.last_confirmed_height, bh); assert_eq!(wallet1_info.amount_currently_spendable, 10_000_000_000); Ok(()) })?; let arg_vec = vec![ "grin", "wallet", "-p", "password", "-a", "mining", "send", "-m", "file", "-d", &file_name, "-g", "Love, Yeast, Smallest", "-s", "smallest", "10", ]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; let arg_vec = vec![ "grin", "wallet", "-p", "password", "-a", "mining", "receive", "-i", &file_name, "-g", "Thanks, Yeast!", ]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec.clone())?; let arg_vec = vec![ "grin", "wallet", "-p", "password", "finalize", "-i", &response_file_name, ]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; bh += 1; let wallet1 = instantiate_wallet(config1.clone(), client1.clone(), "password", "default")?; grin_wallet::controller::owner_single_use(wallet1.clone(), |api| { api.set_active_account("mining")?; let (refreshed, txs) = api.retrieve_txs(true, None, None)?; assert!(refreshed); assert_eq!(txs.len(), bh as usize + 1); Ok(()) })?; let arg_vec = vec![ "grin", "wallet", "-p", "password", "-a", "mining", "send", "-m", "self", "-d", "mining", "-g", "Self love", "-o", "75", "-s", "smallest", "10", ]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; bh += 1; let wallet1 = instantiate_wallet(config1.clone(), client1.clone(), "password", "default")?; grin_wallet::controller::owner_single_use(wallet1.clone(), |api| { api.set_active_account("mining")?; let (refreshed, txs) = api.retrieve_txs(true, None, None)?; assert!(refreshed); assert_eq!(txs.len(), bh as usize + 2); Ok(()) })?; let arg_vec = vec![ "grin", "wallet", "-p", "password", "-a", "mining", "send", "-m", "file", "-d", &file_name, "-g", "Ain't sending", "10", ]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; let arg_vec = vec!["grin", "wallet", "-p", "password", "check_repair"]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; let arg_vec = vec!["grin", "wallet", "-p", "password", "-a", "mining", "txs"]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; let arg_vec = vec![ "grin", "wallet", "-p", "password", "-a", "mining", "outputs", ]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; thread::sleep(Duration::from_millis(200)); Ok(()) } #[test] fn wallet_command_line() { let test_dir = "target/test_output/command_line"; if let Err(e) = command_line_test_impl(test_dir) { panic!("Libwallet Error: {} - {}", e, e.backtrace().unwrap()); } } }
#[cfg(test)] mod wallet_tests { use chrono; use clap; use grin_util as util; use grin_wallet; use serde; use grin_wallet::test_framework::{self, LocalWalletClient, WalletProxy}; use clap::{App, ArgMatches}; use grin_util::Mutex; use std::sync::Arc; use std::thread; use std::time::Duration; use std::{env, fs}; use grin_config::GlobalWalletConfig; use grin_core::global; use grin_core::global::ChainTypes; use grin_keychain::ExtKeychain; use grin_wallet::{LMDBBackend, WalletBackend, WalletConfig, WalletInst, WalletSeed}; use super::super::wallet_args; fn clean_output_dir(test_dir: &str) { let _ = fs::remove_dir_all(test_dir); } fn setup(test_dir: &str) { util::init_test_logger(); clean_output_dir(test_dir); global::set_mining_mode(ChainTypes::AutomatedTesting); } pub fn config_command_wallet( dir_name: &str, wallet_name: &str, ) -> Result<(), grin_wallet::Error> { let mut current_dir; let mut default_config = GlobalWalletConfig::default(); current_dir = env::current_dir().unwrap_or_else(|e| { panic!("Error creating config file: {}", e); }); current_dir.push(dir_name); current_dir.push(wallet_name); let _ = fs::create_dir_all(current_dir.clone()); let mut config_file_name = current_dir.clone(); config_file_name.push("grin-wallet.toml"); if config_file_name.exists() { return Err(grin_wallet::ErrorKind::ArgumentError( "grin-wallet.toml already exists in the target directory. Please remove it first" .to_owned(), ))?; } default_config.update_paths(&current_dir); default_config .write_to_file(config_file_name.to_str().unwrap()) .unwrap_or_else(|e| { panic!("Error creating config file: {}", e); }); println!( "File {} configured and created", config_file_name.to_str().unwrap(), ); Ok(()) } pub fn initial_setup_wallet(dir_name: &str, wallet_name: &str) -> WalletConfig { let mut current_dir; current_dir = env::current_dir().unwrap_or_else(|e| { panic!("Error creating config file: {}", e); }); current_dir.push(dir_name); current_dir.push(wallet_name); let _ = fs::create_dir_all(current_dir.clone()); let mut config_file_name = current_dir.clone(); config_file_name.push("grin-wallet.toml"); GlobalWalletConfig::new(config_file_name.to_str().unwrap()) .unwrap() .members .unwrap() .wallet } fn get_wallet_subcommand<'a>( wallet_dir: &str, wallet_name: &str, args: ArgMatches<'a>, ) -> ArgMatches<'a> { match args.subcommand() { ("wallet", Some(wallet_args)) => { if let ("init", Some(init_args)) = wallet_args.subcommand() { if init_args.is_present("here") { let _ = config_command_wallet(wallet_dir, wallet_name); } } wallet_args.to_owned() } _ => ArgMatches::new(), } } fn instantiate_wallet( mut wallet_config: WalletConfig, node_client: LocalWalletClient, passphrase: &str, account: &str, ) -> Result<Arc<Mutex<WalletInst<LocalWalletClient, ExtKeychain>>>, grin_wallet::Error> { wallet_config.chain_type = None; let _ = WalletSeed::from_file(&wallet_config, passphrase)?; let mut db_wallet = LMDBBackend::new(wallet_config.clone(), passphrase, node_client)?; db_wallet.set_parent_key_id_by_name(account)?; info!("Using LMDB Backend for wallet"); Ok(Arc::new(Mutex::new(db_wallet))) } fn execute_command( app: &App, test_dir: &str, wallet_name: &str, client: &LocalWalletClient, arg_vec: Vec<&str>, ) -> Result<String, grin_wallet::Error> { let args = app.clone().get_matches_from(arg_vec); let args = get_wallet_subcommand(test_dir, wallet_name, args.clone()); let mut config = initial_setup_wallet(test_dir, wallet_name); config.chain_type = None; wallet_args::wallet_command(&args, config.clone(), client.clone()) } fn command_line_test_impl(test_dir: &str) -> Result<(), grin_wallet::Error> { setup(test_dir); let mut wallet_proxy: WalletProxy<LocalWalletClient, ExtKeychain> = WalletProxy::new(test_dir); let chain = wallet_proxy.chain.clone(); let yml = load_yaml!("../grin.yml"); let app = App::from_yaml(yml); let arg_vec = vec!["grin", "wallet", "-p", "password", "init", "-h"]; let client1 = LocalWalletClient::new("wallet1", wallet_proxy.tx.clone()); execute_command(&app, test_dir, "wallet1", &client1, arg_vec.clone())?; assert!(execute_command(&app, test_dir, "wallet1", &client1, arg_vec.clone()).is_err()); let client1 = LocalWalletClient::new("wallet1", wallet_proxy.tx.clone()); let config1 = initial_setup_wallet(test_dir, "wallet1"); let wallet1 = instantiate_wallet(config1.clone(), client1.clone(), "password", "default")?; wallet_proxy.add_wallet("wallet1", client1.get_send_instance(), wallet1.clone()); let client2 = LocalWalletClient::new("wallet2", wallet_proxy.tx.clone()); execute_command(&app, test_dir, "wallet2", &client2, arg_vec.clone())?; let config2 = initial_setup_wallet(test_dir, "wallet2"); let wallet2 = instantiate_wallet(config2.clone(), client2.clone(), "password", "default")?; wallet_proxy.add_wallet("wallet2", client2.get_send_instance(), wallet2.clone()); thread::spawn(move || { if let Err(e) = wallet_proxy.run() { error!("Wallet Proxy error: {}", e); } }); let arg_vec = vec![ "grin", "wallet", "-p", "password", "account", "-c", "mining", ]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; let arg_vec = vec![ "grin", "wallet", "-p", "password", "account", "-c", "account_1", ]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; let arg_vec = vec![ "grin", "wallet", "-p", "password", "account", "-c", "account_1", ]; execute_command(&app, test_dir, "wallet2", &client2, arg_vec.clone())?; assert!(execute_command(&app, test_dir, "wallet2", &client2, arg_vec).is_err()); let arg_vec = vec![ "grin", "wallet", "-p", "password", "account", "-c", "account_2", ]; execute_command(&app, test_dir, "wallet2", &client2, arg_vec)?; let arg_vec = vec!["grin", "wallet", "-p", "password", "account"]; execute_command(&app, test_dir, "wallet2", &client2, arg_vec)?; let arg_vec = vec!["grin", "wallet", "-p", "password", "account"]; execute_command(&app, test_dir, "wallet2", &client2, arg_vec)?; let wallet1 = instantiate_wallet(config1.clone(), client1.clone(), "password", "default")?; grin_wallet::controller::owner_single_use(wallet1.clone(), |api| { api.set_active_account("mining")?; Ok(()) })?; let mut bh = 10u64; let _ = test_framework::award_blocks_to_wallet(&chain, wallet1.clone(), bh as usize); let arg_vec = vec!["grin", "wallet", "-p", "password", "-a", "mining", "info"]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; let file_name = format!("{}/tx1.part_tx", test_dir); let response_file_name = format!("{}/tx1.part_tx.response", test_dir); let arg_vec = vec![ "grin", "wallet", "-p", "password", "-a", "mining", "send", "-m", "file", "-d", &file_name, "-g", "Love, Yeast", "10", ]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; let arg_vec = vec![ "grin", "wallet", "-p", "password", "-a", "account_1", "receive", "-i", &file_name, "-g", "Thanks, Yeast!", ]; execute_command(&app, test_dir, "wallet2", &client2, arg_vec.clone())?; assert!(execute_command(&app, test_dir, "wallet2", &client2, arg_vec).is_err()); let arg_vec = vec![ "grin", "wallet", "-p", "password", "finalize", "-i", &response_file_name, ]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; bh += 1; let wallet1 = instantiate_wallet(config1.clone(), client1.clone(), "password", "default")?; grin_wallet::controller::owner_single_use(wallet1.clone(), |api| { api.set_active_account("mining")?; let (refreshed, txs) = api.retrieve_txs(true, None, None)?; assert!(refreshed); assert_eq!(txs.len(), bh as usize); Ok(()) })?; let _ = test_framework::award_blocks_to_wallet(&chain, wallet1.clone(), 10); bh += 10; let arg_vec = vec!["grin", "wallet", "-p", "password", "-a", "mining", "info"]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; let arg_vec = vec![ "grin", "wallet", "-p", "password", "-a", "account_1", "info", ]; execute_command(&app, test_dir, "wallet2", &client1, arg_vec)?; let wallet2 = instantiate_wallet(config2.clone(), client2.clone(), "password", "default")?; grin_wallet::controller::owner_single_use(wallet2.clone(), |api| { api.set_active_account("account_1")?; let (_, wallet1_info) = api.retrieve_summary_info(true, 1)?; assert_eq!(wallet1_info.last_confirmed_height, bh); assert_eq!(wallet1_info.amount_currently_spendable, 10_000_000_000); Ok(()) })?; let arg_vec = vec![ "grin", "wallet", "-p", "password", "-a", "mini
e_command(&app, test_dir, "wallet1", &client1, arg_vec)?; let arg_vec = vec!["grin", "wallet", "-p", "password", "check_repair"]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; let arg_vec = vec!["grin", "wallet", "-p", "password", "-a", "mining", "txs"]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; let arg_vec = vec![ "grin", "wallet", "-p", "password", "-a", "mining", "outputs", ]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; thread::sleep(Duration::from_millis(200)); Ok(()) } #[test] fn wallet_command_line() { let test_dir = "target/test_output/command_line"; if let Err(e) = command_line_test_impl(test_dir) { panic!("Libwallet Error: {} - {}", e, e.backtrace().unwrap()); } } }
ng", "send", "-m", "file", "-d", &file_name, "-g", "Love, Yeast, Smallest", "-s", "smallest", "10", ]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; let arg_vec = vec![ "grin", "wallet", "-p", "password", "-a", "mining", "receive", "-i", &file_name, "-g", "Thanks, Yeast!", ]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec.clone())?; let arg_vec = vec![ "grin", "wallet", "-p", "password", "finalize", "-i", &response_file_name, ]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; bh += 1; let wallet1 = instantiate_wallet(config1.clone(), client1.clone(), "password", "default")?; grin_wallet::controller::owner_single_use(wallet1.clone(), |api| { api.set_active_account("mining")?; let (refreshed, txs) = api.retrieve_txs(true, None, None)?; assert!(refreshed); assert_eq!(txs.len(), bh as usize + 1); Ok(()) })?; let arg_vec = vec![ "grin", "wallet", "-p", "password", "-a", "mining", "send", "-m", "self", "-d", "mining", "-g", "Self love", "-o", "75", "-s", "smallest", "10", ]; execute_command(&app, test_dir, "wallet1", &client1, arg_vec)?; bh += 1; let wallet1 = instantiate_wallet(config1.clone(), client1.clone(), "password", "default")?; grin_wallet::controller::owner_single_use(wallet1.clone(), |api| { api.set_active_account("mining")?; let (refreshed, txs) = api.retrieve_txs(true, None, None)?; assert!(refreshed); assert_eq!(txs.len(), bh as usize + 2); Ok(()) })?; let arg_vec = vec![ "grin", "wallet", "-p", "password", "-a", "mining", "send", "-m", "file", "-d", &file_name, "-g", "Ain't sending", "10", ]; execut
random
[ { "content": "/// Returns a list of account to BIP32 path mappings\n\npub fn accounts<T: ?Sized, C, K>(wallet: &mut T) -> Result<Vec<AcctPathMapping>, Error>\n\nwhere\n\n\tT: WalletBackend<C, K>,\n\n\tC: NodeClient,\n\n\tK: Keychain,\n\n{\n\n\tOk(wallet.acct_path_iter().collect())\n\n}\n\n\n", "file_path": "wallet/src/libwallet/internal/keys.rs", "rank": 0, "score": 524528.4183088264 }, { "content": "/// Check whether seed file exists\n\npub fn wallet_seed_exists(config: &WalletConfig) -> Result<(), Error> {\n\n\tlet res = WalletSeed::seed_file_exists(&config)?;\n\n\tOk(res)\n\n}\n\n\n", "file_path": "wallet/src/command.rs", "rank": 1, "score": 512140.1242666476 }, { "content": "pub fn recover(config: &WalletConfig, args: RecoverArgs) -> Result<(), Error> {\n\n\tif args.recovery_phrase.is_none() {\n\n\t\tlet res = WalletSeed::from_file(config, &args.passphrase);\n\n\t\tif let Err(e) = res {\n\n\t\t\terror!(\"Error loading wallet seed (check password): {}\", e);\n\n\t\t\treturn Err(e);\n\n\t\t}\n\n\t\tlet _ = res.unwrap().show_recovery_phrase();\n\n\t} else {\n\n\t\tlet res = WalletSeed::recover_from_phrase(\n\n\t\t\t&config,\n\n\t\t\t&args.recovery_phrase.as_ref().unwrap(),\n\n\t\t\t&args.passphrase,\n\n\t\t);\n\n\t\tif let Err(e) = res {\n\n\t\t\terror!(\"Error recovering seed - {}\", e);\n\n\t\t\treturn Err(e);\n\n\t\t}\n\n\t}\n\n\tOk(())\n\n}\n\n\n\n/// Arguments for listen command\n\npub struct ListenArgs {\n\n\tpub method: String,\n\n}\n\n\n", "file_path": "wallet/src/command.rs", "rank": 2, "score": 497379.6450439276 }, { "content": "fn collect_chain_outputs<T, C, K>(wallet: &mut T) -> Result<Vec<OutputResult>, Error>\n\nwhere\n\n\tT: WalletBackend<C, K>,\n\n\tC: NodeClient,\n\n\tK: Keychain,\n\n{\n\n\tlet batch_size = 1000;\n\n\tlet mut start_index = 1;\n\n\tlet mut result_vec: Vec<OutputResult> = vec![];\n\n\tloop {\n\n\t\tlet (highest_index, last_retrieved_index, outputs) = wallet\n\n\t\t\t.w2n_client()\n\n\t\t\t.get_outputs_by_pmmr_index(start_index, batch_size)?;\n\n\t\twarn!(\n\n\t\t\t\"Checking {} outputs, up to index {}. (Highest index: {})\",\n\n\t\t\toutputs.len(),\n\n\t\t\thighest_index,\n\n\t\t\tlast_retrieved_index,\n\n\t\t);\n\n\n\n\t\tresult_vec.append(&mut identify_utxo_outputs(wallet, outputs.clone())?);\n\n\n\n\t\tif highest_index == last_retrieved_index {\n\n\t\t\tbreak;\n\n\t\t}\n\n\t\tstart_index = last_retrieved_index + 1;\n\n\t}\n\n\tOk(result_vec)\n\n}\n\n\n", "file_path": "wallet/src/libwallet/internal/restore.rs", "rank": 3, "score": 496930.78302160534 }, { "content": "/// Adds an new parent account path with a given label\n\npub fn new_acct_path<T: ?Sized, C, K>(wallet: &mut T, label: &str) -> Result<Identifier, Error>\n\nwhere\n\n\tT: WalletBackend<C, K>,\n\n\tC: NodeClient,\n\n\tK: Keychain,\n\n{\n\n\tlet label = label.to_owned();\n\n\tif let Some(_) = wallet.acct_path_iter().find(|l| l.label == label) {\n\n\t\treturn Err(ErrorKind::AccountLabelAlreadyExists(label.clone()).into());\n\n\t}\n\n\n\n\t// We're always using paths at m/k/0 for parent keys for output derivations\n\n\t// so find the highest of those, then increment (to conform with external/internal\n\n\t// derivation chains in BIP32 spec)\n\n\n\n\tlet highest_entry = wallet.acct_path_iter().max_by(|a, b| {\n\n\t\t<u32>::from(a.path.to_path().path[0]).cmp(&<u32>::from(b.path.to_path().path[0]))\n\n\t});\n\n\n\n\tlet return_id = {\n", "file_path": "wallet/src/libwallet/internal/keys.rs", "rank": 4, "score": 478171.92802772997 }, { "content": "pub fn listen(config: &WalletConfig, args: &ListenArgs, g_args: &GlobalArgs) -> Result<(), Error> {\n\n\tlet mut params = HashMap::new();\n\n\tparams.insert(\"api_listen_addr\".to_owned(), config.api_listen_addr());\n\n\tif let Some(t) = g_args.tls_conf.as_ref() {\n\n\t\tparams.insert(\"certificate\".to_owned(), t.certificate.clone());\n\n\t\tparams.insert(\"private_key\".to_owned(), t.private_key.clone());\n\n\t}\n\n\tlet adapter = match args.method.as_str() {\n\n\t\t\"http\" => HTTPWalletCommAdapter::new(),\n\n\t\t\"keybase\" => KeybaseWalletCommAdapter::new(),\n\n\t\t_ => NullWalletCommAdapter::new(),\n\n\t};\n\n\n\n\tlet res = adapter.listen(\n\n\t\tparams,\n\n\t\tconfig.clone(),\n\n\t\t&g_args.password.clone().unwrap(),\n\n\t\t&g_args.account,\n\n\t\tg_args.node_api_secret.clone(),\n\n\t);\n\n\tif let Err(e) = res {\n\n\t\treturn Err(ErrorKind::LibWallet(e.kind(), e.cause_string()).into());\n\n\t}\n\n\tOk(())\n\n}\n\n\n", "file_path": "wallet/src/command.rs", "rank": 5, "score": 478139.4252514676 }, { "content": "/// Update the stored transaction (this update needs to happen when the TX is finalised)\n\npub fn update_stored_tx<T: ?Sized, C, K>(wallet: &mut T, slate: &Slate) -> Result<(), Error>\n\nwhere\n\n\tT: WalletBackend<C, K>,\n\n\tC: NodeClient,\n\n\tK: Keychain,\n\n{\n\n\t// finalize command\n\n\tlet tx_vec = updater::retrieve_txs(wallet, None, Some(slate.id), None)?;\n\n\tlet mut tx = None;\n\n\t// don't want to assume this is the right tx, in case of self-sending\n\n\tfor t in tx_vec {\n\n\t\tif t.tx_type == TxLogEntryType::TxSent {\n\n\t\t\ttx = Some(t.clone());\n\n\t\t\tbreak;\n\n\t\t}\n\n\t}\n\n\tlet tx = match tx {\n\n\t\tSome(t) => t,\n\n\t\tNone => return Err(ErrorKind::TransactionDoesntExist(slate.id.to_string()))?,\n\n\t};\n", "file_path": "wallet/src/libwallet/internal/tx.rs", "rank": 6, "score": 477063.31800595333 }, { "content": "/// Restore a wallet\n\npub fn restore<T, C, K>(wallet: &mut T) -> Result<(), Error>\n\nwhere\n\n\tT: WalletBackend<C, K>,\n\n\tC: NodeClient,\n\n\tK: Keychain,\n\n{\n\n\t// Don't proceed if wallet_data has anything in it\n\n\tlet is_empty = wallet.iter().next().is_none();\n\n\tif !is_empty {\n\n\t\terror!(\"Not restoring. Please back up and remove existing db directory first.\");\n\n\t\treturn Ok(());\n\n\t}\n\n\n\n\twarn!(\"Starting restore.\");\n\n\n\n\tlet result_vec = collect_chain_outputs(wallet)?;\n\n\n\n\twarn!(\n\n\t\t\"Identified {} wallet_outputs as belonging to this wallet\",\n\n\t\tresult_vec.len(),\n", "file_path": "wallet/src/libwallet/internal/restore.rs", "rank": 7, "score": 474769.0283361067 }, { "content": "/// Check / repair wallet contents\n\n/// assume wallet contents have been freshly updated with contents\n\n/// of latest block\n\npub fn check_repair<T, C, K>(wallet: &mut T) -> Result<(), Error>\n\nwhere\n\n\tT: WalletBackend<C, K>,\n\n\tC: NodeClient,\n\n\tK: Keychain,\n\n{\n\n\t// First, get a definitive list of outputs we own from the chain\n\n\twarn!(\"Starting wallet check.\");\n\n\tlet chain_outs = collect_chain_outputs(wallet)?;\n\n\twarn!(\n\n\t\t\"Identified {} wallet_outputs as belonging to this wallet\",\n\n\t\tchain_outs.len(),\n\n\t);\n\n\n\n\t// Now, get all outputs owned by this wallet (regardless of account)\n\n\tlet wallet_outputs = {\n\n\t\tlet res = updater::retrieve_outputs(&mut *wallet, true, None, None)?;\n\n\t\tres\n\n\t};\n\n\n", "file_path": "wallet/src/libwallet/internal/restore.rs", "rank": 8, "score": 469020.26066963706 }, { "content": "pub fn init(g_args: &GlobalArgs, args: InitArgs) -> Result<(), Error> {\n\n\tWalletSeed::init_file(&args.config, args.list_length, &args.password)?;\n\n\tinfo!(\"Wallet seed file created\");\n\n\tlet client_n = HTTPNodeClient::new(\n\n\t\t&args.config.check_node_api_http_addr,\n\n\t\tg_args.node_api_secret.clone(),\n\n\t);\n\n\tlet _: LMDBBackend<HTTPNodeClient, keychain::ExtKeychain> =\n\n\t\tLMDBBackend::new(args.config.clone(), &args.password, client_n)?;\n\n\tinfo!(\"Wallet database backend created\");\n\n\tOk(())\n\n}\n\n\n\n/// Argument for recover\n\npub struct RecoverArgs {\n\n\tpub recovery_phrase: Option<String>,\n\n\tpub passphrase: String,\n\n}\n\n\n", "file_path": "wallet/src/command.rs", "rank": 9, "score": 462756.51951419906 }, { "content": "/// dispatch a db wallet\n\npub fn create_wallet<C, K>(dir: &str, n_client: C) -> Arc<Mutex<dyn WalletInst<C, K>>>\n\nwhere\n\n\tC: NodeClient + 'static,\n\n\tK: keychain::Keychain + 'static,\n\n{\n\n\tlet mut wallet_config = WalletConfig::default();\n\n\twallet_config.data_file_dir = String::from(dir);\n\n\tlet _ = WalletSeed::init_file(&wallet_config, 32, \"\");\n\n\tlet mut wallet = LMDBBackend::new(wallet_config.clone(), \"\", n_client)\n\n\t\t.unwrap_or_else(|e| panic!(\"Error creating wallet: {:?} Config: {:?}\", e, wallet_config));\n\n\twallet.open_with_credentials().unwrap_or_else(|e| {\n\n\t\tpanic!(\n\n\t\t\t\"Error initializing wallet: {:?} Config: {:?}\",\n\n\t\t\te, wallet_config\n\n\t\t)\n\n\t});\n\n\tArc::new(Mutex::new(wallet))\n\n}\n", "file_path": "wallet/src/test_framework/mod.rs", "rank": 10, "score": 447079.3201307168 }, { "content": "/// Get next available key in the wallet for a given parent\n\npub fn next_available_key<T: ?Sized, C, K>(wallet: &mut T) -> Result<Identifier, Error>\n\nwhere\n\n\tT: WalletBackend<C, K>,\n\n\tC: NodeClient,\n\n\tK: Keychain,\n\n{\n\n\tlet child = wallet.next_child()?;\n\n\tOk(child)\n\n}\n\n\n", "file_path": "wallet/src/libwallet/internal/keys.rs", "rank": 11, "score": 439822.83917837986 }, { "content": "fn request_with_retry(url: &str) -> Result<Vec<String>, api::Error> {\n\n\tlet mut tries = 0;\n\n\tloop {\n\n\t\tlet res = api::client::get::<Vec<String>>(url, None);\n\n\t\tif res.is_ok() {\n\n\t\t\treturn res;\n\n\t\t}\n\n\t\tif tries > 5 {\n\n\t\t\treturn res;\n\n\t\t}\n\n\t\ttries += 1;\n\n\t\tthread::sleep(time::Duration::from_millis(500));\n\n\t}\n\n}\n", "file_path": "api/tests/rest.rs", "rank": 12, "score": 438973.0474135498 }, { "content": "/// Decode base58-encoded string into a byte vector\n\npub fn from(data: &str) -> Result<Vec<u8>, Error> {\n\n\t// 11/15 is just over log_256(58)\n\n\tlet mut scratch = vec![0u8; 1 + data.len() * 11 / 15];\n\n\t// Build in base 256\n\n\tfor d58 in data.bytes() {\n\n\t\t// Compute \"X = X * 58 + next_digit\" in base 256\n\n\t\tif d58 as usize > BASE58_DIGITS.len() {\n\n\t\t\treturn Err(Error::BadByte(d58));\n\n\t\t}\n\n\t\tlet mut carry = match BASE58_DIGITS[d58 as usize] {\n\n\t\t\tSome(d58) => d58 as u32,\n\n\t\t\tNone => {\n\n\t\t\t\treturn Err(Error::BadByte(d58));\n\n\t\t\t}\n\n\t\t};\n\n\t\tfor d256 in scratch.iter_mut().rev() {\n\n\t\t\tcarry += *d256 as u32 * 58;\n\n\t\t\t*d256 = carry as u8;\n\n\t\t\tcarry /= 256;\n\n\t\t}\n", "file_path": "keychain/src/base58.rs", "rank": 13, "score": 435511.1020866492 }, { "content": "/// Decode a base58check-encoded string\n\npub fn from_check(data: &str) -> Result<Vec<u8>, Error> {\n\n\tlet mut ret: Vec<u8> = from(data)?;\n\n\tif ret.len() < 4 {\n\n\t\treturn Err(Error::TooShort(ret.len()));\n\n\t}\n\n\tlet ck_start = ret.len() - 4;\n\n\tlet expected = sha256d_hash(&ret[..ck_start]);\n\n\tlet expected = into_le_low_u32(&expected);\n\n\tlet actual = LittleEndian::read_u32(&ret[ck_start..(ck_start + 4)]);\n\n\tif expected != actual {\n\n\t\treturn Err(Error::BadChecksum(expected, actual));\n\n\t}\n\n\n\n\tret.truncate(ck_start);\n\n\tOk(ret)\n\n}\n\n\n", "file_path": "keychain/src/base58.rs", "rank": 14, "score": 429197.38670035463 }, { "content": "/// Converts a mnemonic to entropy\n\npub fn to_entropy(mnemonic: &str) -> Result<Vec<u8>, Error> {\n\n\tlet words: Vec<String> = mnemonic.split_whitespace().map(|s| s.into()).collect();\n\n\n\n\tlet sizes: [usize; 5] = [12, 15, 18, 21, 24];\n\n\tif !sizes.contains(&words.len()) {\n\n\t\treturn Err(Error::InvalidLength(words.len()));\n\n\t}\n\n\n\n\t// u11 vector of indexes for each word\n\n\tlet mut indexes: Vec<u16> = r#try!(words.iter().map(|x| search(x)).collect());\n\n\tlet checksum_bits = words.len() / 3;\n\n\tlet mask = ((1 << checksum_bits) - 1) as u8;\n\n\tlet last = indexes.pop().unwrap();\n\n\tlet checksum = (last as u8) & mask;\n\n\n\n\tlet datalen = ((11 * words.len()) - checksum_bits) / 8 - 1;\n\n\tlet mut entropy: Vec<u8> = vec![0; datalen];\n\n\t// set the last byte to the data part of the last word\n\n\tentropy.push((last >> checksum_bits) as u8);\n\n\t// start setting bits from this index\n", "file_path": "keychain/src/mnemonic.rs", "rank": 15, "score": 429197.3867003546 }, { "content": "/// Matches any output with a potential spending input, eliminating them\n\n/// from the Vec. Provides a simple way to cut-through a block or aggregated\n\n/// transaction. The elimination is stable with respect to the order of inputs\n\n/// and outputs.\n\npub fn cut_through(inputs: &mut Vec<Input>, outputs: &mut Vec<Output>) -> Result<(), Error> {\n\n\t// assemble output commitments set, checking they're all unique\n\n\tlet mut out_set = HashSet::new();\n\n\tlet all_uniq = { outputs.iter().all(|o| out_set.insert(o.commitment())) };\n\n\tif !all_uniq {\n\n\t\treturn Err(Error::AggregationError);\n\n\t}\n\n\n\n\tlet in_set = inputs\n\n\t\t.iter()\n\n\t\t.map(|inp| inp.commitment())\n\n\t\t.collect::<HashSet<_>>();\n\n\n\n\tlet to_cut_through = in_set.intersection(&out_set).collect::<HashSet<_>>();\n\n\n\n\t// filter and sort\n\n\tinputs.retain(|inp| !to_cut_through.contains(&inp.commitment()));\n\n\toutputs.retain(|out| !to_cut_through.contains(&out.commitment()));\n\n\tinputs.sort();\n\n\toutputs.sort();\n\n\tOk(())\n\n}\n\n\n", "file_path": "core/src/core/transaction.rs", "rank": 16, "score": 427131.7571497024 }, { "content": "/// Call the wallet API to create a coinbase output for the given block_fees.\n\n/// Will retry based on default \"retry forever with backoff\" behavior.\n\npub fn create_coinbase(dest: &str, block_fees: &BlockFees) -> Result<CbData, Error> {\n\n\tlet url = format!(\"{}/v1/wallet/foreign/build_coinbase\", dest);\n\n\tmatch single_create_coinbase(&url, &block_fees) {\n\n\t\tErr(e) => {\n\n\t\t\terror!(\n\n\t\t\t\t\"Failed to get coinbase from {}. Run grin wallet listen?\",\n\n\t\t\t\turl\n\n\t\t\t);\n\n\t\t\terror!(\"Underlying Error: {}\", e.cause().unwrap());\n\n\t\t\terror!(\"Backtrace: {}\", e.backtrace().unwrap());\n\n\t\t\tErr(e)?\n\n\t\t}\n\n\t\tOk(res) => Ok(res),\n\n\t}\n\n}\n\n\n", "file_path": "wallet/src/node_clients/http.rs", "rank": 17, "score": 425687.43579649535 }, { "content": "/// Various tests on accounts within the same wallet\n\nfn accounts_test_impl(test_dir: &str) -> Result<(), libwallet::Error> {\n\n\tsetup(test_dir);\n\n\t// Create a new proxy to simulate server and wallet responses\n\n\tlet mut wallet_proxy: WalletProxy<LocalWalletClient, ExtKeychain> = WalletProxy::new(test_dir);\n\n\tlet chain = wallet_proxy.chain.clone();\n\n\n\n\t// Create a new wallet test client, and set its queues to communicate with the\n\n\t// proxy\n\n\tlet client1 = LocalWalletClient::new(\"wallet1\", wallet_proxy.tx.clone());\n\n\tlet wallet1 = test_framework::create_wallet(&format!(\"{}/wallet1\", test_dir), client1.clone());\n\n\twallet_proxy.add_wallet(\"wallet1\", client1.get_send_instance(), wallet1.clone());\n\n\n\n\tlet client2 = LocalWalletClient::new(\"wallet2\", wallet_proxy.tx.clone());\n\n\t// define recipient wallet, add to proxy\n\n\tlet wallet2 = test_framework::create_wallet(&format!(\"{}/wallet2\", test_dir), client2.clone());\n\n\twallet_proxy.add_wallet(\"wallet2\", client2.get_send_instance(), wallet2.clone());\n\n\n\n\t// Set the wallet proxy listener running\n\n\tthread::spawn(move || {\n\n\t\tif let Err(e) = wallet_proxy.run() {\n", "file_path": "wallet/tests/accounts.rs", "rank": 18, "score": 419153.9490482664 }, { "content": "/// Send a json object to the keybase process. Type `keybase chat api --help` for a list of available methods.\n\nfn api_send(payload: &str) -> Result<Value, Error> {\n\n\tlet mut proc = Command::new(\"keybase\");\n\n\tproc.args(&[\"chat\", \"api\", \"-m\", &payload]);\n\n\tlet output = proc.output().expect(\"No output\");\n\n\tif !output.status.success() {\n\n\t\terror!(\n\n\t\t\t\"keybase api fail: {} {}\",\n\n\t\t\tString::from_utf8_lossy(&output.stdout),\n\n\t\t\tString::from_utf8_lossy(&output.stderr)\n\n\t\t);\n\n\t\tErr(ErrorKind::GenericError(\"keybase api fail\".to_owned()))?\n\n\t} else {\n\n\t\tlet response: Value =\n\n\t\t\tfrom_str(from_utf8(&output.stdout).expect(\"Bad output\")).expect(\"Bad output\");\n\n\t\tlet err_msg = format!(\"{}\", response[\"error\"][\"message\"]);\n\n\t\tif err_msg.len() > 0 && err_msg != \"null\" {\n\n\t\t\terror!(\"api_send got error: {}\", err_msg);\n\n\t\t}\n\n\n\n\t\tOk(response)\n\n\t}\n\n}\n\n\n", "file_path": "wallet/src/adapters/keybase.rs", "rank": 19, "score": 418289.85654769046 }, { "content": "///\n\nfn cancel_tx_log_entry<T, C, K>(wallet: &mut T, output: &OutputData) -> Result<(), Error>\n\nwhere\n\n\tT: WalletBackend<C, K>,\n\n\tC: NodeClient,\n\n\tK: Keychain,\n\n{\n\n\tlet parent_key_id = output.key_id.parent_path();\n\n\tlet updated_tx_entry = if output.tx_log_entry.is_some() {\n\n\t\tlet entries = updater::retrieve_txs(\n\n\t\t\twallet,\n\n\t\t\toutput.tx_log_entry.clone(),\n\n\t\t\tNone,\n\n\t\t\tSome(&parent_key_id),\n\n\t\t)?;\n\n\t\tif entries.len() > 0 {\n\n\t\t\tlet mut entry = entries[0].clone();\n\n\t\t\tmatch entry.tx_type {\n\n\t\t\t\tTxLogEntryType::TxSent => entry.tx_type = TxLogEntryType::TxSentCancelled,\n\n\t\t\t\tTxLogEntryType::TxReceived => entry.tx_type = TxLogEntryType::TxReceivedCancelled,\n\n\t\t\t\t_ => {}\n", "file_path": "wallet/src/libwallet/internal/restore.rs", "rank": 20, "score": 415007.41317965946 }, { "content": "/// self send impl\n\nfn file_exchange_test_impl(test_dir: &str) -> Result<(), libwallet::Error> {\n\n\tsetup(test_dir);\n\n\t// Create a new proxy to simulate server and wallet responses\n\n\tlet mut wallet_proxy: WalletProxy<LocalWalletClient, ExtKeychain> = WalletProxy::new(test_dir);\n\n\tlet chain = wallet_proxy.chain.clone();\n\n\n\n\tlet client1 = LocalWalletClient::new(\"wallet1\", wallet_proxy.tx.clone());\n\n\tlet wallet1 = test_framework::create_wallet(&format!(\"{}/wallet1\", test_dir), client1.clone());\n\n\twallet_proxy.add_wallet(\"wallet1\", client1.get_send_instance(), wallet1.clone());\n\n\n\n\tlet client2 = LocalWalletClient::new(\"wallet2\", wallet_proxy.tx.clone());\n\n\tlet wallet2 = test_framework::create_wallet(&format!(\"{}/wallet2\", test_dir), client2.clone());\n\n\twallet_proxy.add_wallet(\"wallet2\", client2.get_send_instance(), wallet2.clone());\n\n\n\n\t// Set the wallet proxy listener running\n\n\tthread::spawn(move || {\n\n\t\tif let Err(e) = wallet_proxy.run() {\n\n\t\t\terror!(\"Wallet Proxy error: {}\", e);\n\n\t\t}\n\n\t});\n", "file_path": "wallet/tests/file.rs", "rank": 21, "score": 413474.7249442304 }, { "content": "/// test to see if database files exist in the current directory. If so,\n\n/// use a DB backend for all operations\n\npub fn wallet_db_exists(config: WalletConfig) -> bool {\n\n\tlet db_path = path::Path::new(&config.data_file_dir).join(DB_DIR);\n\n\tdb_path.exists()\n\n}\n\n\n", "file_path": "wallet/src/lmdb_wallet.rs", "rank": 22, "score": 412276.98497320287 }, { "content": "/// Test rolling back transactions and outputs when a transaction is never\n\n/// posted to a chain\n\nfn tx_rollback(test_dir: &str) -> Result<(), libwallet::Error> {\n\n\tsetup(test_dir);\n\n\t// Create a new proxy to simulate server and wallet responses\n\n\tlet mut wallet_proxy: WalletProxy<LocalWalletClient, ExtKeychain> = WalletProxy::new(test_dir);\n\n\tlet chain = wallet_proxy.chain.clone();\n\n\n\n\t// Create a new wallet test client, and set its queues to communicate with the\n\n\t// proxy\n\n\tlet client1 = LocalWalletClient::new(\"wallet1\", wallet_proxy.tx.clone());\n\n\tlet wallet1 = test_framework::create_wallet(&format!(\"{}/wallet1\", test_dir), client1.clone());\n\n\twallet_proxy.add_wallet(\"wallet1\", client1.get_send_instance(), wallet1.clone());\n\n\n\n\t// define recipient wallet, add to proxy\n\n\tlet client2 = LocalWalletClient::new(\"wallet2\", wallet_proxy.tx.clone());\n\n\tlet wallet2 = test_framework::create_wallet(&format!(\"{}/wallet2\", test_dir), client2.clone());\n\n\twallet_proxy.add_wallet(\"wallet2\", client2.get_send_instance(), wallet2.clone());\n\n\n\n\t// Set the wallet proxy listener running\n\n\tthread::spawn(move || {\n\n\t\tif let Err(e) = wallet_proxy.run() {\n", "file_path": "wallet/tests/transaction.rs", "rank": 23, "score": 411788.5953480528 }, { "content": "/// Aggregate a vec of txs into a multi-kernel tx with cut_through.\n\npub fn aggregate(mut txs: Vec<Transaction>) -> Result<Transaction, Error> {\n\n\t// convenience short-circuiting\n\n\tif txs.is_empty() {\n\n\t\treturn Ok(Transaction::empty());\n\n\t} else if txs.len() == 1 {\n\n\t\treturn Ok(txs.pop().unwrap());\n\n\t}\n\n\n\n\tlet mut inputs: Vec<Input> = vec![];\n\n\tlet mut outputs: Vec<Output> = vec![];\n\n\tlet mut kernels: Vec<TxKernel> = vec![];\n\n\n\n\t// we will sum these together at the end to give us the overall offset for the\n\n\t// transaction\n\n\tlet mut kernel_offsets: Vec<BlindingFactor> = vec![];\n\n\n\n\tfor mut tx in txs {\n\n\t\t// we will sum these later to give a single aggregate offset\n\n\t\tkernel_offsets.push(tx.offset);\n\n\n", "file_path": "core/src/core/transaction.rs", "rank": 24, "score": 410971.25728907745 }, { "content": "/// self send impl\n\nfn self_send_test_impl(test_dir: &str) -> Result<(), libwallet::Error> {\n\n\tsetup(test_dir);\n\n\t// Create a new proxy to simulate server and wallet responses\n\n\tlet mut wallet_proxy: WalletProxy<LocalWalletClient, ExtKeychain> = WalletProxy::new(test_dir);\n\n\tlet chain = wallet_proxy.chain.clone();\n\n\n\n\t// Create a new wallet test client, and set its queues to communicate with the\n\n\t// proxy\n\n\tlet client1 = LocalWalletClient::new(\"wallet1\", wallet_proxy.tx.clone());\n\n\tlet wallet1 = test_framework::create_wallet(&format!(\"{}/wallet1\", test_dir), client1.clone());\n\n\twallet_proxy.add_wallet(\"wallet1\", client1.get_send_instance(), wallet1.clone());\n\n\n\n\t// Set the wallet proxy listener running\n\n\tthread::spawn(move || {\n\n\t\tif let Err(e) = wallet_proxy.run() {\n\n\t\t\terror!(\"Wallet Proxy error: {}\", e);\n\n\t\t}\n\n\t});\n\n\n\n\t// few values to keep things shorter\n", "file_path": "wallet/tests/self_send.rs", "rank": 25, "score": 408154.01370818715 }, { "content": "/// Helper function to easily issue a HTTP POST request with the provided JSON\n\n/// object as body on a given URL that returns nothing. Handles request\n\n/// building, JSON serialization, and response code\n\n/// checking.\n\npub fn post_no_ret<IN>(url: &str, api_secret: Option<String>, input: &IN) -> Result<(), Error>\n\nwhere\n\n\tIN: Serialize,\n\n{\n\n\tlet req = create_post_request(url, api_secret, input)?;\n\n\tsend_request(req)?;\n\n\tOk(())\n\n}\n\n\n", "file_path": "api/src/client.rs", "rank": 26, "score": 406087.54286203644 }, { "content": "/// Exercises the Transaction API fully with a test NodeClient operating\n\n/// directly on a chain instance\n\n/// Callable with any type of wallet\n\nfn basic_transaction_api(test_dir: &str) -> Result<(), libwallet::Error> {\n\n\tsetup(test_dir);\n\n\t// Create a new proxy to simulate server and wallet responses\n\n\tlet mut wallet_proxy: WalletProxy<LocalWalletClient, ExtKeychain> = WalletProxy::new(test_dir);\n\n\tlet chain = wallet_proxy.chain.clone();\n\n\n\n\t// Create a new wallet test client, and set its queues to communicate with the\n\n\t// proxy\n\n\tlet client1 = LocalWalletClient::new(\"wallet1\", wallet_proxy.tx.clone());\n\n\tlet wallet1 = test_framework::create_wallet(&format!(\"{}/wallet1\", test_dir), client1.clone());\n\n\twallet_proxy.add_wallet(\"wallet1\", client1.get_send_instance(), wallet1.clone());\n\n\n\n\tlet client2 = LocalWalletClient::new(\"wallet2\", wallet_proxy.tx.clone());\n\n\t// define recipient wallet, add to proxy\n\n\tlet wallet2 = test_framework::create_wallet(&format!(\"{}/wallet2\", test_dir), client2.clone());\n\n\twallet_proxy.add_wallet(\"wallet2\", client2.get_send_instance(), wallet2.clone());\n\n\n\n\t// Set the wallet proxy listener running\n\n\tthread::spawn(move || {\n\n\t\tif let Err(e) = wallet_proxy.run() {\n", "file_path": "wallet/tests/transaction.rs", "rank": 27, "score": 405393.79585873534 }, { "content": "/// Helper function to easily issue a HTTP POST request with the provided JSON\n\n/// object as body on a given URL that returns a JSON object. Handles request\n\n/// building, JSON serialization and deserialization, and response code\n\n/// checking.\n\npub fn post<IN, OUT>(url: &str, api_secret: Option<String>, input: &IN) -> Result<OUT, Error>\n\nwhere\n\n\tIN: Serialize,\n\n\tfor<'de> OUT: Deserialize<'de>,\n\n{\n\n\tlet req = create_post_request(url, api_secret, input)?;\n\n\thandle_request(req)\n\n}\n\n\n", "file_path": "api/src/client.rs", "rank": 28, "score": 404034.34988801216 }, { "content": "/// Helper function to easily issue a HTTP GET request against a given URL that\n\n/// returns a JSON object. Handles request building, JSON deserialization and\n\n/// response code checking.\n\npub fn get<'a, T>(url: &'a str, api_secret: Option<String>) -> Result<T, Error>\n\nwhere\n\n\tfor<'de> T: Deserialize<'de>,\n\n{\n\n\thandle_request(build_request(url, \"GET\", api_secret, None)?)\n\n}\n\n\n", "file_path": "api/src/client.rs", "rank": 29, "score": 401922.9031208374 }, { "content": "/// Get all unread messages from a specific channel/topic and mark as read.\n\nfn read_from_channel(channel: &str, topic: &str) -> Result<Vec<String>, Error> {\n\n\tlet payload = to_string(&json!({\n\n\t\t\"method\": \"read\",\n\n\t\t\"params\": {\n\n\t\t\t\"options\": {\n\n\t\t\t\t\"channel\": {\n\n\t\t\t\t\t\t\"name\": channel, \"topic_type\": \"dev\", \"topic_name\": topic\n\n\t\t\t\t\t},\n\n\t\t\t\t\t\"unread_only\": true, \"peek\": false\n\n\t\t\t\t},\n\n\t\t\t}\n\n\t\t}\n\n\t))\n\n\t.unwrap();\n\n\n\n\tlet response = api_send(&payload);\n\n\tif let Ok(res) = response {\n\n\t\tlet mut unread: Vec<String> = Vec::new();\n\n\t\tfor msg in res[\"result\"][\"messages\"]\n\n\t\t\t.as_array()\n", "file_path": "wallet/src/adapters/keybase.rs", "rank": 30, "score": 400302.5271313152 }, { "content": "/// self send impl\n\nfn file_repost_test_impl(test_dir: &str) -> Result<(), libwallet::Error> {\n\n\tsetup(test_dir);\n\n\t// Create a new proxy to simulate server and wallet responses\n\n\tlet mut wallet_proxy: WalletProxy<LocalWalletClient, ExtKeychain> = WalletProxy::new(test_dir);\n\n\tlet chain = wallet_proxy.chain.clone();\n\n\n\n\tlet client1 = LocalWalletClient::new(\"wallet1\", wallet_proxy.tx.clone());\n\n\tlet wallet1 = test_framework::create_wallet(&format!(\"{}/wallet1\", test_dir), client1.clone());\n\n\twallet_proxy.add_wallet(\"wallet1\", client1.get_send_instance(), wallet1.clone());\n\n\n\n\tlet client2 = LocalWalletClient::new(\"wallet2\", wallet_proxy.tx.clone());\n\n\tlet wallet2 = test_framework::create_wallet(&format!(\"{}/wallet2\", test_dir), client2.clone());\n\n\twallet_proxy.add_wallet(\"wallet2\", client2.get_send_instance(), wallet2.clone());\n\n\n\n\t// Set the wallet proxy listener running\n\n\tthread::spawn(move || {\n\n\t\tif let Err(e) = wallet_proxy.run() {\n\n\t\t\terror!(\"Wallet Proxy error: {}\", e);\n\n\t\t}\n\n\t});\n", "file_path": "wallet/tests/repost.rs", "rank": 31, "score": 400080.58707971184 }, { "content": "/// Instantiate wallet Foreign API for a single-use (command line) call\n\n/// Return a function containing a loaded API context to call\n\npub fn foreign_single_use<F, T: ?Sized, C, K>(wallet: Arc<Mutex<T>>, f: F) -> Result<(), Error>\n\nwhere\n\n\tT: WalletBackend<C, K>,\n\n\tF: FnOnce(&mut APIForeign<T, C, K>) -> Result<(), Error>,\n\n\tC: NodeClient,\n\n\tK: Keychain,\n\n{\n\n\tf(&mut APIForeign::new(wallet.clone()))?;\n\n\tOk(())\n\n}\n\n\n", "file_path": "wallet/src/controller.rs", "rank": 32, "score": 397729.78541597945 }, { "content": "/// Instantiate wallet Owner API for a single-use (command line) call\n\n/// Return a function containing a loaded API context to call\n\npub fn owner_single_use<F, T: ?Sized, C, K>(wallet: Arc<Mutex<T>>, f: F) -> Result<(), Error>\n\nwhere\n\n\tT: WalletBackend<C, K>,\n\n\tF: FnOnce(&mut APIOwner<T, C, K>) -> Result<(), Error>,\n\n\tC: NodeClient,\n\n\tK: Keychain,\n\n{\n\n\tf(&mut APIOwner::new(wallet.clone()))?;\n\n\tOk(())\n\n}\n\n\n", "file_path": "wallet/src/controller.rs", "rank": 33, "score": 397729.78541597945 }, { "content": "/// Update the sync head so we can keep syncing from where we left off.\n\nfn update_sync_head(bh: &BlockHeader, batch: &mut store::Batch<'_>) -> Result<(), Error> {\n\n\tlet tip = Tip::from_header(bh);\n\n\tbatch\n\n\t\t.save_sync_head(&tip)\n\n\t\t.map_err(|e| ErrorKind::StoreErr(e, \"pipe save sync head\".to_owned()))?;\n\n\tdebug!(\"sync head {} @ {}\", bh.hash(), bh.height);\n\n\tOk(())\n\n}\n\n\n", "file_path": "chain/src/pipe.rs", "rank": 34, "score": 396715.8920621893 }, { "content": "/// Utility function to handle forks. From the forked block, jump backward\n\n/// to find to fork root. Rewind the txhashset to the root and apply all the\n\n/// forked blocks prior to the one being processed to set the txhashset in\n\n/// the expected state.\n\npub fn rewind_and_apply_fork(b: &Block, ext: &mut txhashset::Extension<'_>) -> Result<(), Error> {\n\n\t// extending a fork, first identify the block where forking occurred\n\n\t// keeping the hashes of blocks along the fork\n\n\tlet mut fork_hashes = vec![];\n\n\tlet mut current = ext.batch.get_previous_header(&b.header)?;\n\n\twhile current.height > 0 && !ext.is_on_current_chain(&current).is_ok() {\n\n\t\tfork_hashes.push(current.hash());\n\n\t\tcurrent = ext.batch.get_previous_header(&current)?;\n\n\t}\n\n\tfork_hashes.reverse();\n\n\n\n\tlet forked_header = current;\n\n\n\n\t// Rewind the txhashset state back to the block where we forked from the most work chain.\n\n\text.rewind(&forked_header)?;\n\n\n\n\t// Now re-apply all blocks on this fork.\n\n\tfor h in fork_hashes {\n\n\t\tlet fb = ext\n\n\t\t\t.batch\n", "file_path": "chain/src/pipe.rs", "rank": 35, "score": 393178.9518146547 }, { "content": "/// Process block header as part of \"header first\" block propagation.\n\n/// We validate the header but we do not store it or update header head based\n\n/// on this. We will update these once we get the block back after requesting\n\n/// it.\n\npub fn process_block_header(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(), Error> {\n\n\tdebug!(\n\n\t\t\"pipe: process_block_header: {} at {}\",\n\n\t\theader.hash(),\n\n\t\theader.height,\n\n\t); // keep this\n\n\n\n\tcheck_header_known(header, ctx)?;\n\n\tvalidate_header(header, ctx)?;\n\n\tOk(())\n\n}\n\n\n", "file_path": "chain/src/pipe.rs", "rank": 36, "score": 388825.35824130627 }, { "content": "fn restore_wallet(base_dir: &str, wallet_dir: &str) -> Result<(), libwallet::Error> {\n\n\tlet source_seed = format!(\"{}/{}/wallet.seed\", base_dir, wallet_dir);\n\n\tlet dest_dir = format!(\"{}/{}_restore\", base_dir, wallet_dir);\n\n\tfs::create_dir_all(dest_dir.clone())?;\n\n\tlet dest_seed = format!(\"{}/wallet.seed\", dest_dir);\n\n\tfs::copy(source_seed, dest_seed)?;\n\n\n\n\tlet mut wallet_proxy: WalletProxy<LocalWalletClient, ExtKeychain> = WalletProxy::new(base_dir);\n\n\tlet client = LocalWalletClient::new(wallet_dir, wallet_proxy.tx.clone());\n\n\n\n\tlet wallet = test_framework::create_wallet(&dest_dir, client.clone());\n\n\n\n\twallet_proxy.add_wallet(wallet_dir, client.get_send_instance(), wallet.clone());\n\n\n\n\t// Set the wallet proxy listener running\n\n\tthread::spawn(move || {\n\n\t\tif let Err(e) = wallet_proxy.run() {\n\n\t\t\terror!(\"Wallet Proxy error: {}\", e);\n\n\t\t}\n\n\t});\n", "file_path": "wallet/tests/restore.rs", "rank": 37, "score": 387000.6890179956 }, { "content": "/// unwraps the inner option by converting the none case to a not found error\n\npub fn option_to_not_found<T>(res: Result<Option<T>, Error>, field_name: &str) -> Result<T, Error> {\n\n\tmatch res {\n\n\t\tOk(None) => Err(Error::NotFoundErr(field_name.to_owned())),\n\n\t\tOk(Some(o)) => Ok(o),\n\n\t\tErr(e) => Err(e),\n\n\t}\n\n}\n\n\n", "file_path": "store/src/lmdb.rs", "rank": 38, "score": 385440.0985181521 }, { "content": "/// Runs the block processing pipeline, including validation and finding a\n\n/// place for the new block in the chain.\n\n/// Returns new head if chain head updated.\n\npub fn process_block(b: &Block, ctx: &mut BlockContext<'_>) -> Result<Option<Tip>, Error> {\n\n\t// TODO should just take a promise for a block with a full header so we don't\n\n\t// spend resources reading the full block when its header is invalid\n\n\n\n\tdebug!(\n\n\t\t\"pipe: process_block {} at {} [in/out/kern: {}/{}/{}]\",\n\n\t\tb.hash(),\n\n\t\tb.header.height,\n\n\t\tb.inputs().len(),\n\n\t\tb.outputs().len(),\n\n\t\tb.kernels().len(),\n\n\t);\n\n\n\n\t// Check if we have already processed this block previously.\n\n\tcheck_known(b, ctx)?;\n\n\n\n\t// Delay hitting the db for current chain head until we know\n\n\t// this block is not already known.\n\n\tlet head = ctx.batch.head()?;\n\n\tlet is_next = b.header.prev_hash == head.last_block_h;\n", "file_path": "chain/src/pipe.rs", "rank": 39, "score": 382530.5016225636 }, { "content": "/// Issue a new transaction to the provided sender by spending some of our\n\n/// wallet\n\npub fn create_send_tx<T: ?Sized, C, K>(\n\n\twallet: &mut T,\n\n\tamount: u64,\n\n\tminimum_confirmations: u64,\n\n\tmax_outputs: usize,\n\n\tnum_change_outputs: usize,\n\n\tselection_strategy_is_use_all: bool,\n\n\tparent_key_id: &Identifier,\n\n\tmessage: Option<String>,\n\n) -> Result<\n\n\t(\n\n\t\tSlate,\n\n\t\tContext,\n\n\t\timpl FnOnce(&mut T, &Transaction) -> Result<(), Error>,\n\n\t),\n\n\tError,\n\n>\n\nwhere\n\n\tT: WalletBackend<C, K>,\n\n\tC: NodeClient,\n", "file_path": "wallet/src/libwallet/internal/tx.rs", "rank": 40, "score": 382347.8302295385 }, { "content": "/// Update the header head if this header has most work.\n\nfn update_header_head(bh: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<Option<Tip>, Error> {\n\n\tlet header_head = ctx.batch.header_head()?;\n\n\tif has_more_work(&bh, &header_head) {\n\n\t\tlet tip = Tip::from_header(bh);\n\n\t\tctx.batch\n\n\t\t\t.save_header_head(&tip)\n\n\t\t\t.map_err(|e| ErrorKind::StoreErr(e, \"pipe save header head\".to_owned()))?;\n\n\n\n\t\tdebug!(\n\n\t\t\t\"pipe: header_head updated to {} at {}\",\n\n\t\t\ttip.last_block_h, tip.height\n\n\t\t);\n\n\n\n\t\tOk(Some(tip))\n\n\t} else {\n\n\t\tOk(None)\n\n\t}\n\n}\n\n\n", "file_path": "chain/src/pipe.rs", "rank": 41, "score": 381019.4120596526 }, { "content": "fn clean_old_unconfirmed<T: ?Sized, C, K>(wallet: &mut T, height: u64) -> Result<(), Error>\n\nwhere\n\n\tT: WalletBackend<C, K>,\n\n\tC: NodeClient,\n\n\tK: Keychain,\n\n{\n\n\tif height < 50 {\n\n\t\treturn Ok(());\n\n\t}\n\n\tlet mut ids_to_del = vec![];\n\n\tfor out in wallet.iter() {\n\n\t\tif out.status == OutputStatus::Unconfirmed\n\n\t\t\t&& out.height > 0\n\n\t\t\t&& out.height < height - 50\n\n\t\t\t&& out.is_coinbase\n\n\t\t{\n\n\t\t\tids_to_del.push(out.key_id.clone())\n\n\t\t}\n\n\t}\n\n\tlet mut batch = wallet.batch()?;\n\n\tfor id in ids_to_del {\n\n\t\tbatch.delete(&id)?;\n\n\t}\n\n\tbatch.commit()?;\n\n\tOk(())\n\n}\n\n\n", "file_path": "wallet/src/libwallet/internal/updater.rs", "rank": 42, "score": 379309.15559292893 }, { "content": "/// Combined trait to allow dynamic wallet dispatch\n\npub trait WalletInst<C, K>: WalletBackend<C, K> + Send + Sync + 'static\n\nwhere\n\n\tC: NodeClient,\n\n\tK: Keychain,\n\n{\n\n}\n\nimpl<T, C, K> WalletInst<C, K> for T\n\nwhere\n\n\tT: WalletBackend<C, K> + Send + Sync + 'static,\n\n\tC: NodeClient,\n\n\tK: Keychain,\n\n{\n\n}\n\n\n", "file_path": "wallet/src/libwallet/types.rs", "rank": 43, "score": 378394.6866306419 }, { "content": "/// Decode a hex string into bytes.\n\npub fn from_hex(hex_str: String) -> Result<Vec<u8>, num::ParseIntError> {\n\n\tif hex_str.len() % 2 == 1 {\n\n\t\t// TODO: other way to instantiate a ParseIntError?\n\n\t\tlet err = (\"QQQ\").parse::<u64>();\n\n\t\tif let Err(e) = err {\n\n\t\t\treturn Err(e);\n\n\t\t}\n\n\t}\n\n\tlet hex_trim = if &hex_str[..2] == \"0x\" {\n\n\t\thex_str[2..].to_owned()\n\n\t} else {\n\n\t\thex_str.clone()\n\n\t};\n\n\tsplit_n(&hex_trim.trim()[..], 2)\n\n\t\t.iter()\n\n\t\t.map(|b| u8::from_str_radix(b, 16))\n\n\t\t.collect::<Result<Vec<u8>, _>>()\n\n}\n\n\n", "file_path": "util/src/hex.rs", "rank": 44, "score": 376919.5782967451 }, { "content": "/// Build up 2 wallets, perform a few transactions on them\n\n/// Then attempt to restore them in separate directories and check contents are the same\n\nfn setup_restore(test_dir: &str) -> Result<(), libwallet::Error> {\n\n\tsetup(test_dir);\n\n\t// Create a new proxy to simulate server and wallet responses\n\n\tlet mut wallet_proxy: WalletProxy<LocalWalletClient, ExtKeychain> = WalletProxy::new(test_dir);\n\n\tlet chain = wallet_proxy.chain.clone();\n\n\n\n\t// Create a new wallet test client, and set its queues to communicate with the\n\n\t// proxy\n\n\tlet client1 = LocalWalletClient::new(\"wallet1\", wallet_proxy.tx.clone());\n\n\tlet wallet1 = test_framework::create_wallet(&format!(\"{}/wallet1\", test_dir), client1.clone());\n\n\twallet_proxy.add_wallet(\"wallet1\", client1.get_send_instance(), wallet1.clone());\n\n\n\n\t// define recipient wallet, add to proxy\n\n\tlet client2 = LocalWalletClient::new(\"wallet2\", wallet_proxy.tx.clone());\n\n\tlet wallet2 = test_framework::create_wallet(&format!(\"{}/wallet2\", test_dir), client2.clone());\n\n\twallet_proxy.add_wallet(\"wallet2\", client2.get_send_instance(), wallet2.clone());\n\n\n\n\t// wallet 2 will use another account\n\n\twallet::controller::owner_single_use(wallet2.clone(), |api| {\n\n\t\tapi.create_account_path(\"account1\")?;\n", "file_path": "wallet/tests/restore.rs", "rank": 45, "score": 369812.5771107719 }, { "content": "fn perform_restore(test_dir: &str) -> Result<(), libwallet::Error> {\n\n\trestore_wallet(test_dir, \"wallet1\")?;\n\n\tcompare_wallet_restore(\n\n\t\ttest_dir,\n\n\t\t\"wallet1\",\n\n\t\t&ExtKeychain::derive_key_id(2, 0, 0, 0, 0),\n\n\t)?;\n\n\trestore_wallet(test_dir, \"wallet2\")?;\n\n\tcompare_wallet_restore(\n\n\t\ttest_dir,\n\n\t\t\"wallet2\",\n\n\t\t&ExtKeychain::derive_key_id(2, 0, 0, 0, 0),\n\n\t)?;\n\n\tcompare_wallet_restore(\n\n\t\ttest_dir,\n\n\t\t\"wallet2\",\n\n\t\t&ExtKeychain::derive_key_id(2, 1, 0, 0, 0),\n\n\t)?;\n\n\tcompare_wallet_restore(\n\n\t\ttest_dir,\n", "file_path": "wallet/tests/restore.rs", "rank": 46, "score": 369801.2851282533 }, { "content": "/// Validates the proof of work of a given header, and that the proof of work\n\n/// satisfies the requirements of the header.\n\npub fn verify_size(bh: &BlockHeader) -> Result<(), Error> {\n\n\tlet mut ctx = global::create_pow_context::<u64>(\n\n\t\tbh.height,\n\n\t\tbh.pow.edge_bits(),\n\n\t\tbh.pow.proof.nonces.len(),\n\n\t\tMAX_SOLS,\n\n\t)?;\n\n\tctx.set_header_nonce(bh.pre_pow(), None, false)?;\n\n\tctx.verify(&bh.pow.proof)\n\n}\n\n\n", "file_path": "core/src/pow.rs", "rank": 47, "score": 368306.46974825463 }, { "content": "/// Reads multiple serialized items into a Vec.\n\npub fn read_multi<T>(reader: &mut dyn Reader, count: u64) -> Result<Vec<T>, Error>\n\nwhere\n\n\tT: Readable,\n\n{\n\n\t// Very rudimentary check to ensure we do not overflow anything\n\n\t// attempting to read huge amounts of data.\n\n\t// Probably better than checking if count * size overflows a u64 though.\n\n\tif count > 1_000_000 {\n\n\t\treturn Err(Error::TooLargeReadErr);\n\n\t}\n\n\n\n\tlet res: Vec<T> = IteratingReader::new(reader, count).collect();\n\n\tif res.len() as u64 != count {\n\n\t\treturn Err(Error::CountError);\n\n\t}\n\n\tOk(res)\n\n}\n\n\n", "file_path": "core/src/ser.rs", "rank": 48, "score": 367890.86239667644 }, { "content": "/// Returns the index of a word in the wordlist\n\npub fn search(word: &str) -> Result<u16, Error> {\n\n\tlet w = word.to_string();\n\n\tmatch WORDS.binary_search(&w) {\n\n\t\tOk(index) => Ok(index as u16),\n\n\t\tErr(_) => Err(Error::BadWord(w)),\n\n\t}\n\n}\n\n\n", "file_path": "keychain/src/mnemonic.rs", "rank": 49, "score": 366412.01077531034 }, { "content": "/// Makes a single request to the wallet API to create a new coinbase output.\n\nfn single_create_coinbase(url: &str, block_fees: &BlockFees) -> Result<CbData, Error> {\n\n\tlet res = api::client::post(url, None, block_fees).context(ErrorKind::GenericError(\n\n\t\t\"Posting create coinbase\".to_string(),\n\n\t))?;\n\n\tOk(res)\n\n}\n", "file_path": "wallet/src/node_clients/http.rs", "rank": 50, "score": 365948.7414338507 }, { "content": "/// Various tests on accounts within the same wallet\n\nfn check_repair_impl(test_dir: &str) -> Result<(), libwallet::Error> {\n\n\tsetup(test_dir);\n\n\t// Create a new proxy to simulate server and wallet responses\n\n\tlet mut wallet_proxy: WalletProxy<LocalWalletClient, ExtKeychain> = WalletProxy::new(test_dir);\n\n\tlet chain = wallet_proxy.chain.clone();\n\n\n\n\t// Create a new wallet test client, and set its queues to communicate with the\n\n\t// proxy\n\n\tlet client1 = LocalWalletClient::new(\"wallet1\", wallet_proxy.tx.clone());\n\n\tlet wallet1 = test_framework::create_wallet(&format!(\"{}/wallet1\", test_dir), client1.clone());\n\n\twallet_proxy.add_wallet(\"wallet1\", client1.get_send_instance(), wallet1.clone());\n\n\n\n\tlet client2 = LocalWalletClient::new(\"wallet2\", wallet_proxy.tx.clone());\n\n\t// define recipient wallet, add to proxy\n\n\tlet wallet2 = test_framework::create_wallet(&format!(\"{}/wallet2\", test_dir), client2.clone());\n\n\twallet_proxy.add_wallet(\"wallet2\", client2.get_send_instance(), wallet2.clone());\n\n\n\n\t// Set the wallet proxy listener running\n\n\tthread::spawn(move || {\n\n\t\tif let Err(e) = wallet_proxy.run() {\n", "file_path": "wallet/tests/check.rs", "rank": 51, "score": 364825.4142562954 }, { "content": "/// Attempt to deaggregate a multi-kernel transaction based on multiple\n\n/// transactions\n\npub fn deaggregate(mk_tx: Transaction, txs: Vec<Transaction>) -> Result<Transaction, Error> {\n\n\tlet mut inputs: Vec<Input> = vec![];\n\n\tlet mut outputs: Vec<Output> = vec![];\n\n\tlet mut kernels: Vec<TxKernel> = vec![];\n\n\n\n\t// we will subtract these at the end to give us the overall offset for the\n\n\t// transaction\n\n\tlet mut kernel_offsets = vec![];\n\n\n\n\tlet tx = aggregate(txs)?;\n\n\n\n\tfor mk_input in mk_tx.body.inputs {\n\n\t\tif !tx.body.inputs.contains(&mk_input) && !inputs.contains(&mk_input) {\n\n\t\t\tinputs.push(mk_input);\n\n\t\t}\n\n\t}\n\n\tfor mk_output in mk_tx.body.outputs {\n\n\t\tif !tx.body.outputs.contains(&mk_output) && !outputs.contains(&mk_output) {\n\n\t\t\toutputs.push(mk_output);\n\n\t\t}\n", "file_path": "core/src/core/transaction.rs", "rank": 52, "score": 364126.03609566187 }, { "content": "/// Starts a new unit of work to extend (or rewind) the chain with additional\n\n/// blocks. Accepts a closure that will operate within that unit of work.\n\n/// The closure has access to an Extension object that allows the addition\n\n/// of blocks to the txhashset and the checking of the current tree roots.\n\n///\n\n/// The unit of work is always discarded (always rollback) as this is read-only.\n\npub fn extending_readonly<'a, F, T>(trees: &'a mut TxHashSet, inner: F) -> Result<T, Error>\n\nwhere\n\n\tF: FnOnce(&mut Extension<'_>) -> Result<T, Error>,\n\n{\n\n\tlet commit_index = trees.commit_index.clone();\n\n\tlet batch = commit_index.batch()?;\n\n\n\n\t// We want to use the current head of the most work chain unless\n\n\t// we explicitly rewind the extension.\n\n\tlet header = batch.head_header()?;\n\n\n\n\ttrace!(\"Starting new txhashset (readonly) extension.\");\n\n\n\n\tlet res = {\n\n\t\tlet mut extension = Extension::new(trees, &batch, header);\n\n\t\textension.force_rollback();\n\n\n\n\t\t// TODO - header_mmr may be out ahead via the header_head\n\n\t\t// TODO - do we need to handle this via an explicit rewind on the header_mmr?\n\n\n", "file_path": "chain/src/txhashset/txhashset.rs", "rank": 53, "score": 362843.4487337724 }, { "content": "/// Convenience function when the seed list is immediately known. Mostly used\n\n/// for tests.\n\npub fn predefined_seeds(addrs_str: Vec<String>) -> Box<dyn Fn() -> Vec<SocketAddr> + Send> {\n\n\tBox::new(move || {\n\n\t\taddrs_str\n\n\t\t\t.iter()\n\n\t\t\t.map(|s| s.parse().unwrap())\n\n\t\t\t.collect::<Vec<_>>()\n\n\t})\n\n}\n\n\n", "file_path": "servers/src/grin/seed.rs", "rank": 54, "score": 362770.9717510004 }, { "content": "/// Check that the api secret file exists and is valid\n\nfn check_api_secret_file(chain_type: &global::ChainTypes) -> Result<(), ConfigError> {\n\n\tlet grin_path = get_grin_path(chain_type)?;\n\n\tlet mut api_secret_path = grin_path.clone();\n\n\tapi_secret_path.push(API_SECRET_FILE_NAME);\n\n\tif !api_secret_path.exists() {\n\n\t\tinit_api_secret(&api_secret_path)\n\n\t} else {\n\n\t\tcheck_api_secret(&api_secret_path)\n\n\t}\n\n}\n\n\n", "file_path": "config/src/config.rs", "rank": 55, "score": 357419.50349513325 }, { "content": "pub fn amount_from_hr_string(amount: &str) -> Result<u64, Error> {\n\n\t// no i18n yet, make sure we use '.' as the separator\n\n\tif amount.find(',').is_some() {\n\n\t\treturn Err(Error::InvalidAmountString);\n\n\t}\n\n\tlet (grins, ngrins) = match amount.find('.') {\n\n\t\tNone => (parse_grins(amount)?, 0),\n\n\t\tSome(pos) => {\n\n\t\t\tlet (gs, tail) = amount.split_at(pos);\n\n\t\t\t(parse_grins(gs)?, parse_ngrins(&tail[1..])?)\n\n\t\t}\n\n\t};\n\n\tOk(grins * GRIN_BASE + ngrins)\n\n}\n\n\n", "file_path": "core/src/core.rs", "rank": 56, "score": 356149.946994156 }, { "content": "/// Converts entropy to a mnemonic\n\npub fn from_entropy(entropy: &Vec<u8>) -> Result<String, Error> {\n\n\tlet sizes: [usize; 5] = [16, 20, 24, 28, 32];\n\n\tlet length = entropy.len();\n\n\tif !sizes.contains(&length) {\n\n\t\treturn Err(Error::InvalidLength(length));\n\n\t}\n\n\n\n\tlet checksum_bits = length / 4;\n\n\tlet mask = ((1 << checksum_bits) - 1) as u8;\n\n\n\n\tlet mut hash = [0; 32];\n\n\tlet mut sha2sum = Sha256::default();\n\n\tsha2sum.input(&entropy.clone());\n\n\thash.copy_from_slice(sha2sum.result().as_slice());\n\n\n\n\tlet checksum = (hash[0] >> 8 - checksum_bits) & mask;\n\n\n\n\tlet nwords = (length * 8 + checksum_bits) / 11;\n\n\tlet mut indexes: Vec<u16> = vec![0; nwords];\n\n\tlet mut loc: usize = 0;\n", "file_path": "keychain/src/mnemonic.rs", "rank": 57, "score": 355263.6662886192 }, { "content": "/// Adds a block with a given reward to the chain and mines it\n\npub fn add_block_with_reward(chain: &Chain, txs: Vec<&Transaction>, reward: CbData) {\n\n\tlet prev = chain.head_header().unwrap();\n\n\tlet next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());\n\n\tlet out_bin = util::from_hex(reward.output).unwrap();\n\n\tlet kern_bin = util::from_hex(reward.kernel).unwrap();\n\n\tlet output = ser::deserialize(&mut &out_bin[..]).unwrap();\n\n\tlet kernel = ser::deserialize(&mut &kern_bin[..]).unwrap();\n\n\tlet mut b = core::core::Block::new(\n\n\t\t&prev,\n\n\t\ttxs.into_iter().cloned().collect(),\n\n\t\tnext_header_info.clone().difficulty,\n\n\t\t(output, kernel),\n\n\t)\n\n\t.unwrap();\n\n\tb.header.timestamp = prev.timestamp + Duration::seconds(60);\n\n\tb.header.pow.secondary_scaling = next_header_info.secondary_scaling;\n\n\tchain.set_txhashset_roots(&mut b).unwrap();\n\n\tpow::pow_size(\n\n\t\t&mut b.header,\n\n\t\tnext_header_info.difficulty,\n\n\t\tglobal::proofsize(),\n\n\t\tglobal::min_edge_bits(),\n\n\t)\n\n\t.unwrap();\n\n\tchain.process_block(b, chain::Options::MINE).unwrap();\n\n\tchain.validate(false).unwrap();\n\n}\n\n\n", "file_path": "wallet/src/test_framework/mod.rs", "rank": 58, "score": 353314.92610138893 }, { "content": "/// Receive a transaction, modifying the slate accordingly (which can then be\n\n/// sent back to sender for posting)\n\npub fn receive_tx<T: ?Sized, C, K>(\n\n\twallet: &mut T,\n\n\tslate: &mut Slate,\n\n\tparent_key_id: &Identifier,\n\n\tmessage: Option<String>,\n\n) -> Result<(), Error>\n\nwhere\n\n\tT: WalletBackend<C, K>,\n\n\tC: NodeClient,\n\n\tK: Keychain,\n\n{\n\n\t// create an output using the amount in the slate\n\n\tlet (_, mut context, receiver_create_fn) =\n\n\t\tselection::build_recipient_output_with_slate(wallet, slate, parent_key_id.clone())?;\n\n\n\n\t// fill public keys\n\n\tlet _ = slate.fill_round_1(\n\n\t\twallet.keychain(),\n\n\t\t&mut context.sec_key,\n\n\t\t&context.sec_nonce,\n", "file_path": "wallet/src/libwallet/internal/tx.rs", "rank": 59, "score": 347922.4582822772 }, { "content": "/// Utility function to serialize a writeable directly in memory using a\n\n/// Vec<u8>.\n\npub fn ser_vec<W: Writeable>(thing: &W) -> Result<Vec<u8>, Error> {\n\n\tlet mut vec = vec![];\n\n\tserialize(&mut vec, thing)?;\n\n\tOk(vec)\n\n}\n\n\n", "file_path": "core/src/ser.rs", "rank": 60, "score": 343750.25316191826 }, { "content": "/// Mines a genesis block using the internal miner\n\npub fn mine_genesis_block() -> Result<Block, Error> {\n\n\tlet mut gen = genesis::genesis_dev();\n\n\tif global::is_user_testing_mode() || global::is_automated_testing_mode() {\n\n\t\tgen = genesis::genesis_dev();\n\n\t\tgen.header.timestamp = Utc::now();\n\n\t}\n\n\n\n\t// total_difficulty on the genesis header *is* the difficulty of that block\n\n\tlet genesis_difficulty = gen.header.pow.total_difficulty;\n\n\n\n\tlet sz = global::min_edge_bits();\n\n\tlet proof_size = global::proofsize();\n\n\n\n\tpow_size(&mut gen.header, genesis_difficulty, proof_size, sz)?;\n\n\tOk(gen)\n\n}\n\n\n", "file_path": "core/src/pow.rs", "rank": 61, "score": 343710.8693105809 }, { "content": "// Check if we already know about this block for various reasons\n\n// from cheapest to most expensive (delay hitting the db until last).\n\nfn check_known(block: &Block, ctx: &mut BlockContext<'_>) -> Result<(), Error> {\n\n\tcheck_known_head(&block.header, ctx)?;\n\n\tcheck_known_orphans(&block.header, ctx)?;\n\n\tcheck_known_store(&block.header, ctx)?;\n\n\tOk(())\n\n}\n\n\n", "file_path": "chain/src/pipe.rs", "rank": 62, "score": 342440.4682242519 }, { "content": "fn validate_block(block: &Block, ctx: &mut BlockContext<'_>) -> Result<(), Error> {\n\n\tlet prev = ctx.batch.get_previous_header(&block.header)?;\n\n\tblock\n\n\t\t.validate(&prev.total_kernel_offset, ctx.verifier_cache.clone())\n\n\t\t.map_err(|e| ErrorKind::InvalidBlockProof(e))?;\n\n\tOk(())\n\n}\n\n\n", "file_path": "chain/src/pipe.rs", "rank": 63, "score": 342434.85106382816 }, { "content": "/// Creates temporary file with name created by adding `temp_suffix` to `path`.\n\n/// Applies writer function to it and renames temporary file into original specified by `path`.\n\npub fn save_via_temp_file<F, P, E>(\n\n\tpath: P,\n\n\ttemp_suffix: E,\n\n\tmut writer: F,\n\n) -> Result<(), std::io::Error>\n\nwhere\n\n\tF: FnMut(Box<dyn std::io::Write>) -> Result<(), std::io::Error>,\n\n\tP: AsRef<Path>,\n\n\tE: AsRef<OsStr>,\n\n{\n\n\tlet temp_suffix = temp_suffix.as_ref();\n\n\tassert!(!temp_suffix.is_empty());\n\n\n\n\tlet original = path.as_ref();\n\n\tlet mut _original = original.as_os_str().to_os_string();\n\n\t_original.push(temp_suffix);\n\n\t// Write temporary file\n\n\tlet temp_path = Path::new(&_original);\n\n\tif temp_path.exists() {\n\n\t\tremove_file(&temp_path)?;\n", "file_path": "store/src/lib.rs", "rank": 64, "score": 340959.48025192134 }, { "content": "fn parse_grins(amount: &str) -> Result<u64, Error> {\n\n\tif amount == \"\" {\n\n\t\tOk(0)\n\n\t} else {\n\n\t\tamount\n\n\t\t\t.parse::<u64>()\n\n\t\t\t.map_err(|_| Error::InvalidAmountString)\n\n\t}\n\n}\n\n\n\nlazy_static! {\n\n\tstatic ref WIDTH: usize = (GRIN_BASE as f64).log(10.0) as usize + 1;\n\n}\n\n\n", "file_path": "core/src/core.rs", "rank": 65, "score": 340402.8489313799 }, { "content": "/// Handles setup and detection of paths for node\n\npub fn initial_setup_server(chain_type: &global::ChainTypes) -> Result<GlobalConfig, ConfigError> {\n\n\tcheck_api_secret_file(chain_type)?;\n\n\t// Use config file if current directory if it exists, .grin home otherwise\n\n\tif let Some(p) = check_config_current_dir(SERVER_CONFIG_FILE_NAME) {\n\n\t\tGlobalConfig::new(p.to_str().unwrap())\n\n\t} else {\n\n\t\t// Check if grin dir exists\n\n\t\tlet grin_path = get_grin_path(chain_type)?;\n\n\n\n\t\t// Get path to default config file\n\n\t\tlet mut config_path = grin_path.clone();\n\n\t\tconfig_path.push(SERVER_CONFIG_FILE_NAME);\n\n\n\n\t\t// Spit it out if it doesn't exist\n\n\t\tif !config_path.exists() {\n\n\t\t\tlet mut default_config = GlobalConfig::for_chain(chain_type);\n\n\t\t\t// update paths relative to current dir\n\n\t\t\tdefault_config.update_paths(&grin_path);\n\n\t\t\tdefault_config.write_to_file(config_path.to_str().unwrap())?;\n\n\t\t}\n\n\n\n\t\tGlobalConfig::new(config_path.to_str().unwrap())\n\n\t}\n\n}\n\n\n", "file_path": "config/src/config.rs", "rank": 66, "score": 339543.1787501813 }, { "content": "fn send_request(req: Request<Body>) -> Result<String, Error> {\n\n\tlet task = send_request_async(req);\n\n\tlet mut rt = Runtime::new().unwrap();\n\n\tOk(rt.block_on(task)?)\n\n}\n", "file_path": "api/src/client.rs", "rank": 67, "score": 338901.13607556553 }, { "content": "/// Officially adds the block header to our header chain.\n\nfn add_block_header(bh: &BlockHeader, batch: &store::Batch<'_>) -> Result<(), Error> {\n\n\tbatch\n\n\t\t.save_block_header(bh)\n\n\t\t.map_err(|e| ErrorKind::StoreErr(e, \"pipe save header\".to_owned()))?;\n\n\tOk(())\n\n}\n\n\n", "file_path": "chain/src/pipe.rs", "rank": 68, "score": 338220.51187740103 }, { "content": "/// Update the block chain tail so we can know the exact tail of full blocks in this node\n\nfn update_body_tail(bh: &BlockHeader, batch: &store::Batch<'_>) -> Result<(), Error> {\n\n\tlet tip = Tip::from_header(bh);\n\n\tbatch\n\n\t\t.save_body_tail(&tip)\n\n\t\t.map_err(|e| ErrorKind::StoreErr(e, \"pipe save body tail\".to_owned()))?;\n\n\tdebug!(\"body tail {} @ {}\", bh.hash(), bh.height);\n\n\tOk(())\n\n}\n\n\n", "file_path": "chain/src/pipe.rs", "rank": 69, "score": 338220.31321041705 }, { "content": "fn split_n(s: &str, n: usize) -> Vec<&str> {\n\n\t(0..(s.len() - n + 1) / 2 + 1)\n\n\t\t.map(|i| &s[2 * i..2 * i + n])\n\n\t\t.collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n\tuse super::*;\n\n\n\n\t#[test]\n\n\tfn test_to_hex() {\n\n\t\tassert_eq!(to_hex(vec![0, 0, 0, 0]), \"00000000\");\n\n\t\tassert_eq!(to_hex(vec![10, 11, 12, 13]), \"0a0b0c0d\");\n\n\t\tassert_eq!(to_hex(vec![0, 0, 0, 255]), \"000000ff\");\n\n\t}\n\n\n\n\t#[test]\n\n\tfn test_from_hex() {\n\n\t\tassert_eq!(from_hex(\"00000000\".to_string()).unwrap(), vec![0, 0, 0, 0]);\n", "file_path": "util/src/hex.rs", "rank": 70, "score": 338185.0576128228 }, { "content": "/// First level of block validation that only needs to act on the block header\n\n/// to make it as cheap as possible. The different validations are also\n\n/// arranged by order of cost to have as little DoS surface as possible.\n\nfn validate_header(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(), Error> {\n\n\t// check version, enforces scheduled hard fork\n\n\tif !consensus::valid_header_version(header.height, header.version) {\n\n\t\terror!(\n\n\t\t\t\"Invalid block header version received ({}), maybe update Grin?\",\n\n\t\t\theader.version\n\n\t\t);\n\n\t\treturn Err(ErrorKind::InvalidBlockVersion(header.version).into());\n\n\t}\n\n\n\n\t// TODO: remove CI check from here somehow\n\n\tif header.timestamp > Utc::now() + Duration::seconds(12 * (consensus::BLOCK_TIME_SEC as i64))\n\n\t\t&& !global::is_automated_testing_mode()\n\n\t{\n\n\t\t// refuse blocks more than 12 blocks intervals in future (as in bitcoin)\n\n\t\t// TODO add warning in p2p code if local time is too different from peers\n\n\t\treturn Err(ErrorKind::InvalidBlockTime.into());\n\n\t}\n\n\n\n\tif !ctx.opts.contains(Options::SKIP_POW) {\n", "file_path": "chain/src/pipe.rs", "rank": 71, "score": 337893.0097269396 }, { "content": "/// Some \"real magick\" verification logic.\n\n/// The (BlockSums, Block) tuple implements Committed...\n\n/// This allows us to verify kernel sums across the full utxo and kernel sets\n\n/// based on block_sums of previous block, accounting for the inputs|outputs|kernels\n\n/// of the new block.\n\nfn verify_block_sums(b: &Block, ext: &mut txhashset::Extension<'_>) -> Result<(), Error> {\n\n\t// TODO - this is 2 db calls, can we optimize this?\n\n\t// Retrieve the block_sums for the previous block.\n\n\tlet prev = ext.batch.get_previous_header(&b.header)?;\n\n\tlet block_sums = ext.batch.get_block_sums(&prev.hash())?;\n\n\n\n\t// Overage is based purely on the new block.\n\n\t// Previous block_sums have taken all previous overage into account.\n\n\tlet overage = b.header.overage();\n\n\n\n\t// Offset on the other hand is the total kernel offset from the new block.\n\n\tlet offset = b.header.total_kernel_offset();\n\n\n\n\t// Verify the kernel sums for the block_sums with the new block applied.\n\n\tlet (utxo_sum, kernel_sum) =\n\n\t\t(block_sums, b as &dyn Committed).verify_kernel_sums(overage, offset)?;\n\n\n\n\t// Save the new block_sums for the new block to the db via the batch.\n\n\text.batch.save_block_sums(\n\n\t\t&b.header.hash(),\n\n\t\t&BlockSums {\n\n\t\t\tutxo_sum,\n\n\t\t\tkernel_sum,\n\n\t\t},\n\n\t)?;\n\n\n\n\tOk(())\n\n}\n\n\n", "file_path": "chain/src/pipe.rs", "rank": 72, "score": 337202.5195741711 }, { "content": "/// Packages the txhashset data files into a zip and returns a Read to the\n\n/// resulting file\n\npub fn zip_read(root_dir: String, header: &BlockHeader, rand: Option<u32>) -> Result<File, Error> {\n\n\tlet ts = if let None = rand {\n\n\t\tlet now = SystemTime::now();\n\n\t\tnow.duration_since(UNIX_EPOCH).unwrap().subsec_micros()\n\n\t} else {\n\n\t\trand.unwrap()\n\n\t};\n\n\tlet txhashset_zip = format!(\"{}_{}.zip\", TXHASHSET_ZIP, ts);\n\n\n\n\tlet txhashset_path = Path::new(&root_dir).join(TXHASHSET_SUBDIR);\n\n\tlet zip_path = Path::new(&root_dir).join(txhashset_zip);\n\n\t// create the zip archive\n\n\t{\n\n\t\t// Temp txhashset directory\n\n\t\tlet temp_txhashset_path =\n\n\t\t\tPath::new(&root_dir).join(format!(\"{}_zip_{}\", TXHASHSET_SUBDIR, ts));\n\n\t\t// Remove temp dir if it exist\n\n\t\tif temp_txhashset_path.exists() {\n\n\t\t\tfs::remove_dir_all(&temp_txhashset_path)?;\n\n\t\t}\n", "file_path": "chain/src/txhashset/txhashset.rs", "rank": 73, "score": 336110.8893505425 }, { "content": "/// Quick in-memory check to fast-reject any block header we've already handled\n\n/// recently. Keeps duplicates from the network in check.\n\n/// ctx here is specific to the header_head (tip of the header chain)\n\nfn check_header_known(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(), Error> {\n\n\tlet header_head = ctx.batch.header_head()?;\n\n\tif header.hash() == header_head.last_block_h || header.hash() == header_head.prev_block_h {\n\n\t\treturn Err(ErrorKind::Unfit(\"header already known\".to_string()).into());\n\n\t}\n\n\tOk(())\n\n}\n\n\n", "file_path": "chain/src/pipe.rs", "rank": 74, "score": 333554.86875961034 }, { "content": "// Check if this block is in the store already.\n\nfn check_known_store(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(), Error> {\n\n\tmatch ctx.batch.block_exists(&header.hash()) {\n\n\t\tOk(true) => {\n\n\t\t\tlet head = ctx.batch.head()?;\n\n\t\t\tif header.height < head.height.saturating_sub(50) {\n\n\t\t\t\t// TODO - we flag this as an \"abusive peer\" but only in the case\n\n\t\t\t\t// where we have the full block in our store.\n\n\t\t\t\t// So this is not a particularly exhaustive check.\n\n\t\t\t\tErr(ErrorKind::OldBlock.into())\n\n\t\t\t} else {\n\n\t\t\t\tErr(ErrorKind::Unfit(\"already known in store\".to_string()).into())\n\n\t\t\t}\n\n\t\t}\n\n\t\tOk(false) => {\n\n\t\t\t// Not yet processed this block, we can proceed.\n\n\t\t\tOk(())\n\n\t\t}\n\n\t\tErr(e) => {\n\n\t\t\treturn Err(ErrorKind::StoreErr(e, \"pipe get this block\".to_owned()).into());\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "chain/src/pipe.rs", "rank": 75, "score": 333550.40191282425 }, { "content": "/// Quick in-memory check to fast-reject any block handled recently.\n\n/// Keeps duplicates from the network in check.\n\n/// Checks against the last_block_h and prev_block_h of the chain head.\n\nfn check_known_head(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(), Error> {\n\n\tlet head = ctx.batch.head()?;\n\n\tlet bh = header.hash();\n\n\tif bh == head.last_block_h || bh == head.prev_block_h {\n\n\t\treturn Err(ErrorKind::Unfit(\"already known in head\".to_string()).into());\n\n\t}\n\n\tOk(())\n\n}\n\n\n", "file_path": "chain/src/pipe.rs", "rank": 76, "score": 333549.7139474503 }, { "content": "/// Check if this block is in the set of known orphans.\n\nfn check_known_orphans(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(), Error> {\n\n\tif ctx.orphans.contains(&header.hash()) {\n\n\t\tErr(ErrorKind::Unfit(\"already known in orphans\".to_string()).into())\n\n\t} else {\n\n\t\tOk(())\n\n\t}\n\n}\n\n\n", "file_path": "chain/src/pipe.rs", "rank": 77, "score": 333544.45816085313 }, { "content": "/// Read a message body from the provided stream, always blocking\n\n/// until we have a result (or timeout).\n\npub fn read_body<T: Readable>(h: &MsgHeader, stream: &mut dyn Read) -> Result<T, Error> {\n\n\tlet mut body = vec![0u8; h.msg_len as usize];\n\n\tread_exact(stream, &mut body, time::Duration::from_secs(20), true)?;\n\n\tser::deserialize(&mut &body[..]).map_err(From::from)\n\n}\n\n\n", "file_path": "p2p/src/msg.rs", "rank": 78, "score": 331677.0689854613 }, { "content": "pub fn dns_seeds() -> Box<dyn Fn() -> Vec<SocketAddr> + Send> {\n\n\tBox::new(|| {\n\n\t\tlet mut addresses: Vec<SocketAddr> = vec![];\n\n\t\tlet net_seeds = if global::is_floonet() {\n\n\t\t\tFLOONET_DNS_SEEDS\n\n\t\t} else {\n\n\t\t\tMAINNET_DNS_SEEDS\n\n\t\t};\n\n\t\tfor dns_seed in net_seeds {\n\n\t\t\tlet temp_addresses = addresses.clone();\n\n\t\t\tdebug!(\"Retrieving seed nodes from dns {}\", dns_seed);\n\n\t\t\tmatch (dns_seed.to_owned(), 0).to_socket_addrs() {\n\n\t\t\t\tOk(addrs) => addresses.append(\n\n\t\t\t\t\t&mut (addrs\n\n\t\t\t\t\t\t.map(|mut addr| {\n\n\t\t\t\t\t\t\taddr.set_port(13414);\n\n\t\t\t\t\t\t\taddr\n\n\t\t\t\t\t\t})\n\n\t\t\t\t\t\t.filter(|addr| !temp_addresses.contains(addr))\n\n\t\t\t\t\t\t.collect()),\n\n\t\t\t\t),\n\n\t\t\t\tErr(e) => debug!(\"Failed to resolve seed {:?} got error {:?}\", dns_seed, e),\n\n\t\t\t}\n\n\t\t}\n\n\t\tdebug!(\"Retrieved seed addresses: {:?}\", addresses);\n\n\t\taddresses\n\n\t})\n\n}\n\n\n", "file_path": "servers/src/grin/seed.rs", "rank": 79, "score": 330359.68076370354 }, { "content": "/// Builds a transaction to send to someone from the HD seed associated with the\n\n/// wallet and the amount to send. Handles reading through the wallet data file,\n\n/// selecting outputs to spend and building the change.\n\npub fn select_send_tx<T: ?Sized, C, K>(\n\n\twallet: &mut T,\n\n\tamount: u64,\n\n\tcurrent_height: u64,\n\n\tminimum_confirmations: u64,\n\n\tlock_height: u64,\n\n\tmax_outputs: usize,\n\n\tchange_outputs: usize,\n\n\tselection_strategy_is_use_all: bool,\n\n\tparent_key_id: &Identifier,\n\n) -> Result<\n\n\t(\n\n\t\tVec<Box<build::Append<K>>>,\n\n\t\tVec<OutputData>,\n\n\t\tVec<(u64, Identifier)>, // change amounts and derivations\n\n\t\tu64, // amount\n\n\t\tu64, // fee\n\n\t),\n\n\tError,\n\n>\n", "file_path": "wallet/src/libwallet/internal/selection.rs", "rank": 80, "score": 328896.00525726756 }, { "content": "/// Create file with api secret\n\nfn init_api_secret(api_secret_path: &PathBuf) -> Result<(), ConfigError> {\n\n\tlet mut api_secret_file = File::create(api_secret_path)?;\n\n\tlet api_secret: String = Alphanumeric\n\n\t\t.sample_iter(&mut thread_rng())\n\n\t\t.take(20)\n\n\t\t.collect();\n\n\tapi_secret_file.write_all(api_secret.as_bytes())?;\n\n\tOk(())\n\n}\n\n\n", "file_path": "config/src/config.rs", "rank": 81, "score": 327313.3307096627 }, { "content": "/// Convert Result to ResponseFuture\n\npub fn result_to_response<T>(res: Result<T, Error>) -> ResponseFuture\n\nwhere\n\n\tT: Serialize,\n\n{\n\n\tmatch res {\n\n\t\tOk(s) => json_response_pretty(&s),\n\n\t\tErr(e) => match e.kind() {\n\n\t\t\tErrorKind::Argument(msg) => response(StatusCode::BAD_REQUEST, msg.clone()),\n\n\t\t\tErrorKind::RequestError(msg) => response(StatusCode::BAD_REQUEST, msg.clone()),\n\n\t\t\tErrorKind::NotFound => response(StatusCode::NOT_FOUND, \"\"),\n\n\t\t\tErrorKind::Internal(msg) => response(StatusCode::INTERNAL_SERVER_ERROR, msg.clone()),\n\n\t\t\tErrorKind::ResponseError(msg) => {\n\n\t\t\t\tresponse(StatusCode::INTERNAL_SERVER_ERROR, msg.clone())\n\n\t\t\t}\n\n\t\t},\n\n\t}\n\n}\n\n\n", "file_path": "api/src/web.rs", "rank": 82, "score": 326390.82462752267 }, { "content": "/// Deserializes a Readable from any std::io::Read implementation.\n\npub fn deserialize<T: Readable>(source: &mut dyn Read) -> Result<T, Error> {\n\n\tlet mut reader = BinReader { source };\n\n\tT::read(&mut reader)\n\n}\n\n\n", "file_path": "core/src/ser.rs", "rank": 83, "score": 325611.5586872444 }, { "content": "fn generate<W: ser::Writeable>(target: &str, obj: W) -> Result<(), ser::Error> {\n\n\tlet dir_path = Path::new(\"corpus\").join(target);\n\n\tif !dir_path.is_dir() {\n\n\t\tfs::create_dir_all(&dir_path).map_err(|e| {\n\n\t\t\tprintln!(\"fail: {}\", e);\n\n\t\t\tser::Error::IOErr(\"can't create corpus directory\".to_owned(), e.kind())\n\n\t\t})?;\n\n\t}\n\n\n\n\tlet pattern_path = dir_path.join(\"pattern\");\n\n\tif !pattern_path.exists() {\n\n\t\tlet mut file = File::create(&pattern_path)\n\n\t\t\t.map_err(|e| ser::Error::IOErr(\"can't create a pattern file\".to_owned(), e.kind()))?;\n\n\t\tser::serialize(&mut file, &obj)\n\n\t} else {\n\n\t\tOk(())\n\n\t}\n\n}\n\n\n", "file_path": "core/fuzz/src/main.rs", "rank": 84, "score": 323861.3270500365 }, { "content": "pub fn build_send_tx_slate<T: ?Sized, C, K>(\n\n\twallet: &mut T,\n\n\tnum_participants: usize,\n\n\tamount: u64,\n\n\tcurrent_height: u64,\n\n\tminimum_confirmations: u64,\n\n\tlock_height: u64,\n\n\tmax_outputs: usize,\n\n\tchange_outputs: usize,\n\n\tselection_strategy_is_use_all: bool,\n\n\tparent_key_id: Identifier,\n\n) -> Result<\n\n\t(\n\n\t\tSlate,\n\n\t\tContext,\n\n\t\timpl FnOnce(&mut T, &Transaction) -> Result<(), Error>,\n\n\t),\n\n\tError,\n\n>\n\nwhere\n", "file_path": "wallet/src/libwallet/internal/selection.rs", "rank": 85, "score": 323425.6773256986 }, { "content": "/// Initialize the logger with the given configuration\n\npub fn init_logger(config: Option<LoggingConfig>) {\n\n\tif let Some(c) = config {\n\n\t\tlet tui_running = c.tui_running.unwrap_or(false);\n\n\t\tif tui_running {\n\n\t\t\tlet mut tui_running_ref = TUI_RUNNING.lock();\n\n\t\t\t*tui_running_ref = true;\n\n\t\t}\n\n\n\n\t\t// Save current logging configuration\n\n\t\tlet mut config_ref = LOGGING_CONFIG.lock();\n\n\t\t*config_ref = c.clone();\n\n\n\n\t\tlet level_stdout = convert_log_level(&c.stdout_log_level);\n\n\t\tlet level_file = convert_log_level(&c.file_log_level);\n\n\t\tlet level_minimum;\n\n\n\n\t\t// Determine minimum logging level for Root logger\n\n\t\tif level_stdout > level_file {\n\n\t\t\tlevel_minimum = level_stdout;\n\n\t\t} else {\n", "file_path": "util/src/logger.rs", "rank": 86, "score": 322174.9471184849 }, { "content": "/// Read Bitmap from a file\n\npub fn read_bitmap<P: AsRef<Path>>(file_path: P) -> io::Result<Bitmap> {\n\n\tlet mut bitmap_file = File::open(file_path)?;\n\n\tlet f_md = bitmap_file.metadata()?;\n\n\tlet mut buffer = Vec::with_capacity(f_md.len() as usize);\n\n\tbitmap_file.read_to_end(&mut buffer)?;\n\n\tOk(Bitmap::deserialize(&buffer))\n\n}\n", "file_path": "store/src/lib.rs", "rank": 87, "score": 321873.78676682 }, { "content": "/// Converts a nemonic and a passphrase into a seed\n\npub fn to_seed<'a, T: 'a>(mnemonic: &str, passphrase: T) -> Result<[u8; 64], Error>\n\nwhere\n\n\tOption<&'a str>: From<T>,\n\n{\n\n\t// make sure the mnemonic is valid\n\n\tr#try!(to_entropy(mnemonic));\n\n\n\n\tlet salt = (\"mnemonic\".to_owned() + Option::from(passphrase).unwrap_or(\"\")).into_bytes();\n\n\tlet data = mnemonic.as_bytes();\n\n\tlet mut seed = [0; 64];\n\n\n\n\tpbkdf2::<Hmac<Sha512>>(data, &salt[..], 2048, &mut seed);\n\n\n\n\tOk(seed)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::{from_entropy, to_entropy, to_seed};\n\n\tuse crate::util::{from_hex, to_hex};\n", "file_path": "keychain/src/mnemonic.rs", "rank": 88, "score": 321299.47042771074 }, { "content": "/// adds a reward output to a wallet, includes that reward in a block, mines\n\n/// the block and adds it to the chain, with option transactions included.\n\n/// Helpful for building up precise wallet balances for testing.\n\npub fn award_block_to_wallet<C, K>(\n\n\tchain: &Chain,\n\n\ttxs: Vec<&Transaction>,\n\n\twallet: Arc<Mutex<dyn WalletInst<C, K>>>,\n\n) -> Result<(), libwallet::Error>\n\nwhere\n\n\tC: NodeClient,\n\n\tK: keychain::Keychain,\n\n{\n\n\t// build block fees\n\n\tlet prev = chain.head_header().unwrap();\n\n\tlet fee_amt = txs.iter().map(|tx| tx.fee()).sum();\n\n\tlet block_fees = BlockFees {\n\n\t\tfees: fee_amt,\n\n\t\tkey_id: None,\n\n\t\theight: prev.height + 1,\n\n\t};\n\n\t// build coinbase (via api) and add block\n\n\tcontroller::foreign_single_use(wallet.clone(), |api| {\n\n\t\tlet coinbase_tx = api.build_coinbase(&block_fees)?;\n\n\t\tadd_block_with_reward(chain, txs, coinbase_tx.clone());\n\n\t\tOk(())\n\n\t})?;\n\n\tOk(())\n\n}\n\n\n", "file_path": "wallet/src/test_framework/mod.rs", "rank": 89, "score": 319949.23083811573 }, { "content": "/// Award a blocks to a wallet directly\n\npub fn award_blocks_to_wallet<C, K>(\n\n\tchain: &Chain,\n\n\twallet: Arc<Mutex<dyn WalletInst<C, K>>>,\n\n\tnumber: usize,\n\n) -> Result<(), libwallet::Error>\n\nwhere\n\n\tC: NodeClient,\n\n\tK: keychain::Keychain,\n\n{\n\n\tfor _ in 0..number {\n\n\t\taward_block_to_wallet(chain, vec![], wallet.clone())?;\n\n\t}\n\n\tOk(())\n\n}\n\n\n", "file_path": "wallet/src/test_framework/mod.rs", "rank": 90, "score": 319938.28099645954 }, { "content": "pub fn create_secnonce(secp: &Secp256k1) -> Result<SecretKey, Error> {\n\n\tlet nonce = aggsig::export_secnonce_single(secp)?;\n\n\tOk(nonce)\n\n}\n\n\n\n/// Calculates a partial signature given the signer's secure key,\n\n/// the sum of all public nonces and (optionally) the sum of all public keys.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `secp` - A Secp256k1 Context initialized for Signing\n\n/// * `sec_key` - The signer's secret key\n\n/// * `sec_nonce` - The signer's secret nonce (the public version of which\n\n/// was added to the `nonce_sum` total)\n\n/// * `nonce_sum` - The sum of the public nonces of all signers participating\n\n/// in the full signature. This value is encoded in e.\n\n/// * `pubkey_sum` - (Optional) The sum of the public keys of all signers participating\n\n/// in the full signature. If included, this value is encoded in e.\n\n/// * `msg` - The message to sign.\n\n///\n", "file_path": "core/src/libtx/aggsig.rs", "rank": 91, "score": 318184.7933299418 }, { "content": "/// List directory\n\npub fn list_files(path: String) -> Vec<String> {\n\n\tlet mut files_vec: Vec<String> = vec![];\n\n\tfor entry in WalkDir::new(Path::new(&path))\n\n\t\t.into_iter()\n\n\t\t.filter_map(|e| e.ok())\n\n\t{\n\n\t\tmatch entry.file_name().to_str() {\n\n\t\t\tSome(path_str) => files_vec.push(path_str.to_string()),\n\n\t\t\tNone => println!(\"Could not read optional type\"),\n\n\t\t}\n\n\t}\n\n\treturn files_vec;\n\n}\n\n\n", "file_path": "util/src/file.rs", "rank": 92, "score": 317631.4175702692 }, { "content": "/// Serializes a Writeable into any std::io::Write implementation.\n\npub fn serialize<W: Writeable>(sink: &mut dyn Write, thing: &W) -> Result<(), Error> {\n\n\tlet mut writer = BinWriter { sink };\n\n\tthing.write(&mut writer)\n\n}\n\n\n", "file_path": "core/src/ser.rs", "rank": 93, "score": 316802.7864695308 }, { "content": "pub fn create_siphash_keys(header: &[u8]) -> Result<[u64; 4], Error> {\n\n\tlet h = blake2b(32, &[], &header);\n\n\tlet hb = h.as_bytes();\n\n\tlet mut rdr = Cursor::new(hb);\n\n\tOk([\n\n\t\trdr.read_u64::<LittleEndian>()?,\n\n\t\trdr.read_u64::<LittleEndian>()?,\n\n\t\trdr.read_u64::<LittleEndian>()?,\n\n\t\trdr.read_u64::<LittleEndian>()?,\n\n\t])\n\n}\n\n\n\n/// Macros to clean up integer unwrapping\n\n#[macro_export]\n\nmacro_rules! to_u64 {\n\n\t($n:expr) => {\n\n\t\t$n.to_u64().ok_or(ErrorKind::IntegerCast)?\n\n\t};\n\n}\n\n\n", "file_path": "core/src/pow/common.rs", "rank": 94, "score": 313492.99448833574 }, { "content": "/// Read a single item from the provided stream, always blocking until we\n\n/// have a result (or timeout).\n\n/// Returns the item and the total bytes read.\n\npub fn read_item<T: Readable>(stream: &mut dyn Read) -> Result<(T, u64), Error> {\n\n\tlet timeout = time::Duration::from_secs(20);\n\n\tlet mut reader = StreamingReader::new(stream, timeout);\n\n\tlet res = T::read(&mut reader)?;\n\n\tOk((res, reader.total_bytes_read()))\n\n}\n\n\n", "file_path": "p2p/src/msg.rs", "rank": 95, "score": 312658.7342972059 }, { "content": "fn get_grin_path(chain_type: &global::ChainTypes) -> Result<PathBuf, ConfigError> {\n\n\t// Check if grin dir exists\n\n\tlet mut grin_path = match dirs::home_dir() {\n\n\t\tSome(p) => p,\n\n\t\tNone => PathBuf::new(),\n\n\t};\n\n\tgrin_path.push(GRIN_HOME);\n\n\tgrin_path.push(chain_type.shortname());\n\n\t// Create if the default path doesn't exist\n\n\tif !grin_path.exists() {\n\n\t\tfs::create_dir_all(grin_path.clone())?;\n\n\t}\n\n\tOk(grin_path)\n\n}\n\n\n", "file_path": "config/src/config.rs", "rank": 96, "score": 311942.9075734019 }, { "content": "/// Check a txhashset directory and remove any unexpected\n\nfn check_and_remove_files(txhashset_path: &PathBuf, header: &BlockHeader) -> Result<(), Error> {\n\n\t// First compare the subdirectories\n\n\tlet subdirectories_expected: HashSet<_> = [OUTPUT_SUBDIR, KERNEL_SUBDIR, RANGE_PROOF_SUBDIR]\n\n\t\t.iter()\n\n\t\t.cloned()\n\n\t\t.map(|s| String::from(s))\n\n\t\t.collect();\n\n\n\n\tlet subdirectories_found: HashSet<_> = fs::read_dir(txhashset_path)?\n\n\t\t.filter_map(|entry| {\n\n\t\t\tentry.ok().and_then(|e| {\n\n\t\t\t\te.path()\n\n\t\t\t\t\t.file_name()\n\n\t\t\t\t\t.and_then(|n| n.to_str().map(|s| String::from(s)))\n\n\t\t\t})\n\n\t\t})\n\n\t\t.collect();\n\n\n\n\tlet dir_difference: Vec<String> = subdirectories_found\n\n\t\t.difference(&subdirectories_expected)\n", "file_path": "chain/src/txhashset/txhashset.rs", "rank": 97, "score": 311406.41938083165 }, { "content": "/// Get unread messages from all channels and mark as read.\n\nfn get_unread(topic: &str) -> Result<HashMap<String, String>, Error> {\n\n\tlet payload = to_string(&json!({\n\n\t\t\"method\": \"list\",\n\n\t\t\"params\": {\n\n\t\t\t\"options\": {\n\n\t\t\t\t\"topic_type\": \"dev\",\n\n\t\t\t},\n\n\t\t}\n\n\t}))\n\n\t.unwrap();\n\n\tlet response = api_send(&payload);\n\n\n\n\tif let Ok(res) = response {\n\n\t\tlet mut channels = HashSet::new();\n\n\t\t// Unfortunately the response does not contain the message body\n\n\t\t// and a separate call is needed for each channel\n\n\t\tfor msg in res[\"result\"][\"conversations\"]\n\n\t\t\t.as_array()\n\n\t\t\t.unwrap_or(&vec![json!({})])\n\n\t\t\t.iter()\n", "file_path": "wallet/src/adapters/keybase.rs", "rank": 98, "score": 310463.2863251979 }, { "content": "/// Readonly view on the UTXO set.\n\n/// Based on the current txhashset output_pmmr.\n\npub fn utxo_view<'a, F, T>(trees: &'a TxHashSet, inner: F) -> Result<T, Error>\n\nwhere\n\n\tF: FnOnce(&UTXOView<'_>) -> Result<T, Error>,\n\n{\n\n\tlet res: Result<T, Error>;\n\n\t{\n\n\t\tlet output_pmmr =\n\n\t\t\tReadonlyPMMR::at(&trees.output_pmmr_h.backend, trees.output_pmmr_h.last_pos);\n\n\t\tlet header_pmmr =\n\n\t\t\tReadonlyPMMR::at(&trees.header_pmmr_h.backend, trees.header_pmmr_h.last_pos);\n\n\n\n\t\t// Create a new batch here to pass into the utxo_view.\n\n\t\t// Discard it (rollback) after we finish with the utxo_view.\n\n\t\tlet batch = trees.commit_index.batch()?;\n\n\t\tlet utxo = UTXOView::new(output_pmmr, header_pmmr, &batch);\n\n\t\tres = inner(&utxo);\n\n\t}\n\n\tres\n\n}\n\n\n", "file_path": "chain/src/txhashset/txhashset.rs", "rank": 99, "score": 309113.26547544135 } ]
Rust
diesel/src/serialize.rs
unvalley/diesel
fd723880db6887a859ffa691890fe964de7dd3f8
use std::error::Error; use std::fmt; use std::io::{self, Write}; use std::result; use crate::backend::{Backend, HasBindCollector}; use crate::query_builder::bind_collector::RawBytesBindCollector; use crate::query_builder::BindCollector; #[cfg(feature = "postgres_backend")] pub use crate::pg::serialize::*; pub type Result = result::Result<IsNull, Box<dyn Error + Send + Sync>>; #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum IsNull { Yes, No, } pub struct Output<'a, 'b, DB> where DB: Backend, DB::MetadataLookup: 'a, { out: <crate::backend::BindCollector<'a, DB> as BindCollector<'a, DB>>::Buffer, metadata_lookup: Option<&'b mut DB::MetadataLookup>, } impl<'a, 'b, DB: Backend> Output<'a, 'b, DB> { pub fn new( out: <crate::backend::BindCollector<'a, DB> as BindCollector<'a, DB>>::Buffer, metadata_lookup: &'b mut DB::MetadataLookup, ) -> Self { Output { out, metadata_lookup: Some(metadata_lookup), } } pub fn into_inner( self, ) -> <crate::backend::BindCollector<'a, DB> as BindCollector<'a, DB>>::Buffer { self.out } pub fn metadata_lookup(&mut self) -> &mut DB::MetadataLookup { *self.metadata_lookup.as_mut().expect("Lookup is there") } pub fn set_value<V>(&mut self, value: V) where V: Into<<crate::backend::BindCollector<'a, DB> as BindCollector<'a, DB>>::Buffer>, { self.out = value.into(); } } #[cfg(test)] impl<'a, DB: Backend> Output<'a, 'static, DB> { pub fn test( buffer: <crate::backend::BindCollector<'a, DB> as BindCollector<'a, DB>>::Buffer, ) -> Self { Self { out: buffer, metadata_lookup: None, } } } impl<'a, 'b, DB: Backend<BindCollector = RawBytesBindCollector<DB>>> Write for Output<'a, 'b, DB> { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { self.out.write(buf) } fn flush(&mut self) -> io::Result<()> { self.out.flush() } fn write_all(&mut self, buf: &[u8]) -> io::Result<()> { self.out.write_all(buf) } fn write_fmt(&mut self, fmt: fmt::Arguments<'_>) -> io::Result<()> { self.out.write_fmt(fmt) } } impl<'a, 'b, DB: Backend<BindCollector = RawBytesBindCollector<DB>>> Output<'a, 'b, DB> { pub fn reborrow<'c>(&'c mut self) -> Output<'c, 'c, DB> where 'a: 'c, { Output { out: RawBytesBindCollector::<DB>::reborrow_buffer(self.out), metadata_lookup: match &mut self.metadata_lookup { None => None, Some(m) => Some(&mut **m), }, } } } impl<'a, 'b, DB> fmt::Debug for Output<'a, 'b, DB> where <<DB as HasBindCollector<'a>>::BindCollector as BindCollector<'a, DB>>::Buffer: fmt::Debug, DB: Backend, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.out.fmt(f) } } pub trait ToSql<A, DB: Backend>: fmt::Debug { fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, DB>) -> Result; } impl<'a, A, T, DB> ToSql<A, DB> for &'a T where DB: Backend, T: ToSql<A, DB> + ?Sized, { fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, DB>) -> Result { (*self).to_sql(out) } }
use std::error::Error; use std::fmt; use std::io::{self, Write}; use std::result; use crate::backend::{Backend, HasBindCollector}; use crate::query_builder::bind_collector::RawBytesBindCollector; use crate::query_builder::BindCollector; #[cfg(feature = "postgres_backend")] pub use crate::pg::serialize::*; pub type Result = result::Result<IsNull, Box<dyn Error + Send + Sync>>; #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum IsNull { Yes, No, } pub struct Output<'a, 'b, DB> where DB: Backend, DB::MetadataLookup: 'a, { out: <crate::backend::BindCollector<'a, DB> as BindCollector<'a, DB>>::Buffer, metadata_lookup: Option<&'b mut DB::MetadataLookup>, } impl<'a, 'b, DB: Backend> Output<'a, 'b, DB> {
pub fn into_inner( self, ) -> <crate::backend::BindCollector<'a, DB> as BindCollector<'a, DB>>::Buffer { self.out } pub fn metadata_lookup(&mut self) -> &mut DB::MetadataLookup { *self.metadata_lookup.as_mut().expect("Lookup is there") } pub fn set_value<V>(&mut self, value: V) where V: Into<<crate::backend::BindCollector<'a, DB> as BindCollector<'a, DB>>::Buffer>, { self.out = value.into(); } } #[cfg(test)] impl<'a, DB: Backend> Output<'a, 'static, DB> { pub fn test( buffer: <crate::backend::BindCollector<'a, DB> as BindCollector<'a, DB>>::Buffer, ) -> Self { Self { out: buffer, metadata_lookup: None, } } } impl<'a, 'b, DB: Backend<BindCollector = RawBytesBindCollector<DB>>> Write for Output<'a, 'b, DB> { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { self.out.write(buf) } fn flush(&mut self) -> io::Result<()> { self.out.flush() } fn write_all(&mut self, buf: &[u8]) -> io::Result<()> { self.out.write_all(buf) } fn write_fmt(&mut self, fmt: fmt::Arguments<'_>) -> io::Result<()> { self.out.write_fmt(fmt) } } impl<'a, 'b, DB: Backend<BindCollector = RawBytesBindCollector<DB>>> Output<'a, 'b, DB> { pub fn reborrow<'c>(&'c mut self) -> Output<'c, 'c, DB> where 'a: 'c, { Output { out: RawBytesBindCollector::<DB>::reborrow_buffer(self.out), metadata_lookup: match &mut self.metadata_lookup { None => None, Some(m) => Some(&mut **m), }, } } } impl<'a, 'b, DB> fmt::Debug for Output<'a, 'b, DB> where <<DB as HasBindCollector<'a>>::BindCollector as BindCollector<'a, DB>>::Buffer: fmt::Debug, DB: Backend, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.out.fmt(f) } } pub trait ToSql<A, DB: Backend>: fmt::Debug { fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, DB>) -> Result; } impl<'a, A, T, DB> ToSql<A, DB> for &'a T where DB: Backend, T: ToSql<A, DB> + ?Sized, { fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, DB>) -> Result { (*self).to_sql(out) } }
pub fn new( out: <crate::backend::BindCollector<'a, DB> as BindCollector<'a, DB>>::Buffer, metadata_lookup: &'b mut DB::MetadataLookup, ) -> Self { Output { out, metadata_lookup: Some(metadata_lookup), } }
function_block-full_function
[]
Rust
components/front_matter/src/lib.rs
zoosky/zola
b359cca4fe46a7abc8ce10620561d48d685b86ef
use lazy_static::lazy_static; use serde_derive::{Deserialize, Serialize}; use errors::{bail, Error, Result}; use regex::Regex; use std::path::Path; mod page; mod section; pub use page::PageFrontMatter; pub use section::SectionFrontMatter; lazy_static! { static ref PAGE_RE: Regex = Regex::new(r"^[[:space:]]*\+\+\+\r?\n((?s).*?(?-s))\+\+\+\r?\n?((?s).*(?-s))$").unwrap(); } #[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "lowercase")] pub enum SortBy { Date, Weight, None, } #[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "lowercase")] pub enum InsertAnchor { Left, Right, None, } fn split_content(file_path: &Path, content: &str) -> Result<(String, String)> { if !PAGE_RE.is_match(content) { bail!( "Couldn't find front matter in `{}`. Did you forget to add `+++`?", file_path.to_string_lossy() ); } let caps = PAGE_RE.captures(content).unwrap(); Ok((caps[1].to_string(), caps[2].to_string())) } pub fn split_section_content( file_path: &Path, content: &str, ) -> Result<(SectionFrontMatter, String)> { let (front_matter, content) = split_content(file_path, content)?; let meta = SectionFrontMatter::parse(&front_matter).map_err(|e| { Error::chain( format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy()), e, ) })?; Ok((meta, content)) } pub fn split_page_content(file_path: &Path, content: &str) -> Result<(PageFrontMatter, String)> { let (front_matter, content) = split_content(file_path, content)?; let meta = PageFrontMatter::parse(&front_matter).map_err(|e| { Error::chain( format!("Error when parsing front matter of page `{}`", file_path.to_string_lossy()), e, ) })?; Ok((meta, content)) } #[cfg(test)] mod tests { use std::path::Path; use super::{split_page_content, split_section_content}; #[test] fn can_split_page_content_valid() { let content = r#" +++ title = "Title" description = "hey there" date = 2002-10-12 +++ Hello "#; let (front_matter, content) = split_page_content(Path::new(""), content).unwrap(); assert_eq!(content, "Hello\n"); assert_eq!(front_matter.title.unwrap(), "Title"); } #[test] fn can_split_section_content_valid() { let content = r#" +++ paginate_by = 10 +++ Hello "#; let (front_matter, content) = split_section_content(Path::new(""), content).unwrap(); assert_eq!(content, "Hello\n"); assert!(front_matter.is_paginated()); } #[test] fn can_split_content_with_only_frontmatter_valid() { let content = r#" +++ title = "Title" description = "hey there" date = 2002-10-12 +++"#; let (front_matter, content) = split_page_content(Path::new(""), content).unwrap(); assert_eq!(content, ""); assert_eq!(front_matter.title.unwrap(), "Title"); } #[test] fn can_split_content_lazily() { let content = r#" +++ title = "Title" description = "hey there" date = 2002-10-02T15:00:00Z +++ +++"#; let (front_matter, content) = split_page_content(Path::new(""), content).unwrap(); assert_eq!(content, "+++"); assert_eq!(front_matter.title.unwrap(), "Title"); } #[test] fn errors_if_cannot_locate_frontmatter() { let content = r#" +++ title = "Title" description = "hey there" date = 2002-10-12"#; let res = split_page_content(Path::new(""), content); assert!(res.is_err()); } }
use lazy_static::lazy_static; use serde_derive::{Deserialize, Serialize}; use errors::{bail, Error, Result}; use regex::Regex; use std::path::Path; mod page; mod section; pub use page::PageFrontMatter; pub use section::SectionFrontMatter; lazy_static! { static ref PAGE_RE: Regex = Regex::new(r"^[[:space:]]*\+\+\+\r?\n((?s).*?(?-s))\+\+\+\r?\n?((?s).*(?-s))$").unwrap(); } #[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "lowercase")] pub enum SortBy { Date, Weight, None, } #[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "lowercase")] pub enum InsertAnchor { Left, Right, None, } fn split_content(file_path: &Path, content: &str) -> Result<(String, String)> { if !PAGE_RE.is_match(content) { bail!( "Couldn't find front matter in `{}`. Did you forget to add `+++`?", file_path.to_string_lossy() ); } let caps = PAGE_RE.captures(content).unwrap(); Ok((caps[1].to_string(), caps[2].to_string())) } pub fn split_section_content( file_path: &Path, content: &str, ) -> Result<(SectionFrontMatter, String)> { let (front_matter, content) = split_content(file_path, content)?; let meta = SectionFrontMatter::parse(&front_matter).map_err(|e| { Error::chain( format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy()), e, ) })?; Ok((meta, content)) } pub fn split_page_content(file_path: &Path, content: &str) -> Result<(PageFrontMatter, String)> { let (front_matter, content) = split_content(file_path, content)?; let meta = PageFrontMatter::parse(&front_matter).map_err(|e| { Error::chain( format!("Error when parsing front matter of page `{}`", file_path.to_string_lossy()), e, ) })?;
frontmatter() { let content = r#" +++ title = "Title" description = "hey there" date = 2002-10-12"#; let res = split_page_content(Path::new(""), content); assert!(res.is_err()); } }
Ok((meta, content)) } #[cfg(test)] mod tests { use std::path::Path; use super::{split_page_content, split_section_content}; #[test] fn can_split_page_content_valid() { let content = r#" +++ title = "Title" description = "hey there" date = 2002-10-12 +++ Hello "#; let (front_matter, content) = split_page_content(Path::new(""), content).unwrap(); assert_eq!(content, "Hello\n"); assert_eq!(front_matter.title.unwrap(), "Title"); } #[test] fn can_split_section_content_valid() { let content = r#" +++ paginate_by = 10 +++ Hello "#; let (front_matter, content) = split_section_content(Path::new(""), content).unwrap(); assert_eq!(content, "Hello\n"); assert!(front_matter.is_paginated()); } #[test] fn can_split_content_with_only_frontmatter_valid() { let content = r#" +++ title = "Title" description = "hey there" date = 2002-10-12 +++"#; let (front_matter, content) = split_page_content(Path::new(""), content).unwrap(); assert_eq!(content, ""); assert_eq!(front_matter.title.unwrap(), "Title"); } #[test] fn can_split_content_lazily() { let content = r#" +++ title = "Title" description = "hey there" date = 2002-10-02T15:00:00Z +++ +++"#; let (front_matter, content) = split_page_content(Path::new(""), content).unwrap(); assert_eq!(content, "+++"); assert_eq!(front_matter.title.unwrap(), "Title"); } #[test] fn errors_if_cannot_locate_
random
[ { "content": "/// Return the content of a file, with error handling added.\n\n/// The default error message is overwritten by the message given.\n\n/// That means it is allocation 2 strings, oh well\n\npub fn read_file_with_error(path: &Path, message: &str) -> Result<String> {\n\n let res = read_file(&path);\n\n if res.is_ok() {\n\n return res;\n\n }\n\n let mut err = Error::msg(message);\n\n err.source = res.unwrap_err().source;\n\n Err(err)\n\n}\n\n\n", "file_path": "components/utils/src/fs.rs", "rank": 2, "score": 348728.71274387673 }, { "content": "/// Create a file with the content given\n\npub fn create_file(path: &Path, content: &str) -> Result<()> {\n\n let mut file =\n\n File::create(&path).map_err(|e| Error::chain(format!(\"Failed to create {:?}\", path), e))?;\n\n file.write_all(content.as_bytes())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "components/utils/src/fs.rs", "rank": 3, "score": 329750.08851757046 }, { "content": "/// Takes a full path to a file and returns only the components after the first `content` directory\n\n/// Will not return the filename as last component\n\npub fn find_content_components<P: AsRef<Path>>(path: P) -> Vec<String> {\n\n let path = path.as_ref();\n\n let mut is_in_content = false;\n\n let mut components = vec![];\n\n\n\n for section in path.parent().unwrap().components() {\n\n let component = section.as_os_str().to_string_lossy();\n\n\n\n if is_in_content {\n\n components.push(component.to_string());\n\n continue;\n\n }\n\n\n\n if component == \"content\" {\n\n is_in_content = true;\n\n }\n\n }\n\n\n\n components\n\n}\n", "file_path": "components/library/src/content/file_info.rs", "rank": 4, "score": 306356.8376177728 }, { "content": "fn compute_file_sha256(path: &PathBuf) -> result::Result<String, io::Error> {\n\n let mut file = fs::File::open(path)?;\n\n let mut hasher = Sha256::new();\n\n io::copy(&mut file, &mut hasher)?;\n\n Ok(format!(\"{:x}\", hasher.result()))\n\n}\n\n\n\nimpl TeraFn for GetUrl {\n\n fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {\n\n let cachebust =\n\n args.get(\"cachebust\").map_or(false, |c| from_value::<bool>(c.clone()).unwrap_or(false));\n\n\n\n let trailing_slash = args\n\n .get(\"trailing_slash\")\n\n .map_or(false, |c| from_value::<bool>(c.clone()).unwrap_or(false));\n\n\n\n let path = required_arg!(\n\n String,\n\n args.get(\"path\"),\n\n \"`get_url` requires a `path` argument with a string value\"\n", "file_path": "components/templates/src/global_fns/mod.rs", "rank": 5, "score": 298006.001956392 }, { "content": "pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result<String> {\n\n let mut res = String::with_capacity(content.len());\n\n\n\n let mut pairs = match ContentParser::parse(Rule::page, content) {\n\n Ok(p) => p,\n\n Err(e) => {\n\n let fancy_e = e.renamed_rules(|rule| match *rule {\n\n Rule::int => \"an integer\".to_string(),\n\n Rule::float => \"a float\".to_string(),\n\n Rule::string => \"a string\".to_string(),\n\n Rule::literal => \"a literal (int, float, string, bool)\".to_string(),\n\n Rule::array => \"an array\".to_string(),\n\n Rule::kwarg => \"a keyword argument\".to_string(),\n\n Rule::ident => \"an identifier\".to_string(),\n\n Rule::inline_shortcode => \"an inline shortcode\".to_string(),\n\n Rule::ignored_inline_shortcode => \"an ignored inline shortcode\".to_string(),\n\n Rule::sc_body_start => \"the start of a shortcode\".to_string(),\n\n Rule::ignored_sc_body_start => \"the start of an ignored shortcode\".to_string(),\n\n Rule::text => \"some text\".to_string(),\n\n Rule::EOI => \"end of input\".to_string(),\n", "file_path": "components/rendering/src/shortcode.rs", "rank": 6, "score": 293386.1372455651 }, { "content": "fn make_path_with_lang(path: String, lang: &str, config: &Config) -> Result<String> {\n\n if lang == &config.default_language {\n\n return Ok(path);\n\n }\n\n\n\n if !config.languages.iter().any(|x| x.code == lang) {\n\n return Err(\n\n format!(\"`{}` is not an authorized language (check config.languages).\", lang).into()\n\n );\n\n }\n\n\n\n let mut splitted_path: Vec<String> = path.split(\".\").map(String::from).collect();\n\n let ilast = splitted_path.len() - 1;\n\n splitted_path[ilast] = format!(\"{}.{}\", lang, splitted_path[ilast]);\n\n Ok(splitted_path.join(\".\"))\n\n}\n\n\n", "file_path": "components/templates/src/global_fns/mod.rs", "rank": 7, "score": 292597.64295600494 }, { "content": "/// Return the content of a file, with error handling added\n\npub fn read_file(path: &Path) -> Result<String> {\n\n let mut content = String::new();\n\n File::open(path)\n\n .map_err(|e| Error::chain(format!(\"Failed to open '{:?}'\", path.display()), e))?\n\n .read_to_string(&mut content)?;\n\n\n\n // Remove utf-8 BOM if any.\n\n if content.starts_with(\"\\u{feff}\") {\n\n content.drain(..3);\n\n }\n\n\n\n Ok(content)\n\n}\n\n\n", "file_path": "components/utils/src/fs.rs", "rank": 8, "score": 287981.021866722 }, { "content": "/// Ask a question to the user where they can write a URL\n\npub fn ask_url(question: &str, default: &str) -> Result<String> {\n\n print!(\"{} ({}): \", question, default);\n\n let _ = io::stdout().flush();\n\n let input = read_line()?;\n\n\n\n match &*input {\n\n \"\" => Ok(default.to_string()),\n\n _ => match Url::parse(&input) {\n\n Ok(_) => Ok(input),\n\n Err(_) => {\n\n println!(\"Invalid URL: '{}'\", input);\n\n ask_url(question, default)\n\n }\n\n },\n\n }\n\n}\n", "file_path": "src/prompt.rs", "rank": 9, "score": 276039.46189571207 }, { "content": "fn check_page_for_anchor(url: &str, body: String) -> errors::Result<()> {\n\n let index = url.find('#').unwrap();\n\n let anchor = url.get(index + 1..).unwrap();\n\n let checks: [String; 8] = [\n\n format!(\" id='{}'\", anchor),\n\n format!(\" ID='{}'\", anchor),\n\n format!(r#\" id=\"{}\"\"#, anchor),\n\n format!(r#\" ID=\"{}\"\"#, anchor),\n\n format!(\" name='{}'\", anchor),\n\n format!(\" NAME='{}'\", anchor),\n\n format!(r#\" name=\"{}\"\"#, anchor),\n\n format!(r#\" NAME=\"{}\"\"#, anchor),\n\n ];\n\n\n\n if checks.iter().any(|check| body[..].contains(&check[..])) {\n\n Ok(())\n\n } else {\n\n Err(errors::Error::from(format!(\"Anchor `#{}` not found on page\", anchor)))\n\n }\n\n}\n", "file_path": "components/link_checker/src/lib.rs", "rank": 11, "score": 265245.2559560406 }, { "content": "/// Used as an attribute when we want to convert from TOML to a string date\n\npub fn from_toml_datetime<'de, D>(deserializer: D) -> Result<Option<String>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n toml::value::Datetime::deserialize(deserializer).map(|s| Some(s.to_string()))\n\n}\n\n\n", "file_path": "components/utils/src/de.rs", "rank": 12, "score": 257060.82813421113 }, { "content": "/// What happens when a section or a page is created/edited\n\npub fn after_content_change(site: &mut Site, path: &Path) -> Result<()> {\n\n let is_section = {\n\n let languages_codes = site.config.languages_codes();\n\n is_section(path.file_name().unwrap().to_str().unwrap(), &languages_codes)\n\n };\n\n\n\n let is_md = path.extension().unwrap() == \"md\";\n\n let index = path.parent().unwrap().join(\"index.md\");\n\n\n\n let mut potential_indices = vec![path.parent().unwrap().join(\"index.md\")];\n\n for language in &site.config.languages {\n\n potential_indices.push(path.parent().unwrap().join(format!(\"index.{}.md\", language.code)));\n\n }\n\n let colocated_index = potential_indices.contains(&path.to_path_buf());\n\n\n\n // A few situations can happen:\n\n // 1. Change on .md files\n\n // a. Is there already an `index.md`? Return an error if it's something other than delete\n\n // b. Deleted? remove the element\n\n // c. Edited?\n", "file_path": "components/rebuild/src/lib.rs", "rank": 13, "score": 249582.23018558644 }, { "content": "/// Returns the generated JSON index with all the documents of the site added using\n\n/// the language given\n\n/// Errors if the language given is not available in Elasticlunr\n\n/// TODO: is making `in_search_index` apply to subsections of a `false` section useful?\n\npub fn build_index(lang: &str, library: &Library) -> Result<String> {\n\n let language = match Language::from_code(lang) {\n\n Some(l) => l,\n\n None => {\n\n bail!(\"Tried to build search index for language {} which is not supported\", lang);\n\n }\n\n };\n\n\n\n let mut index = Index::with_language(language, &[\"title\", \"body\"]);\n\n\n\n for section in library.sections_values() {\n\n if section.lang == lang {\n\n add_section_to_index(&mut index, section, library);\n\n }\n\n }\n\n\n\n Ok(index.to_json())\n\n}\n\n\n", "file_path": "components/search/src/lib.rs", "rank": 14, "score": 247687.1815987779 }, { "content": "pub fn render_content(content: &str, context: &RenderContext) -> Result<markdown::Rendered> {\n\n // Don't do shortcodes if there is nothing like a shortcode in the content\n\n if content.contains(\"{{\") || content.contains(\"{%\") {\n\n let rendered = render_shortcodes(content, context)?;\n\n return markdown_to_html(&rendered, context);\n\n }\n\n\n\n markdown_to_html(&content, context)\n\n}\n", "file_path": "components/rendering/src/lib.rs", "rank": 15, "score": 245398.16173027665 }, { "content": "/// Display an error message and the actual error(s)\n\npub fn unravel_errors(message: &str, error: &Error) {\n\n if !message.is_empty() {\n\n self::error(message);\n\n }\n\n self::error(&format!(\"Error: {}\", error));\n\n let mut cause = error.source();\n\n while let Some(e) = cause {\n\n self::error(&format!(\"Reason: {}\", e));\n\n cause = e.source();\n\n }\n\n}\n\n\n", "file_path": "src/console.rs", "rank": 16, "score": 245316.02338908747 }, { "content": "pub fn error(message: &str) {\n\n colorize(message, ColorSpec::new().set_bold(true).set_fg(Some(Color::Red)));\n\n}\n\n\n", "file_path": "src/console.rs", "rank": 17, "score": 244420.76722190948 }, { "content": "/// Renders the `internal/alias.html` template that will redirect\n\n/// via refresh to the url given\n\npub fn render_redirect_template(url: &str, tera: &Tera) -> Result<String> {\n\n let mut context = Context::new();\n\n context.insert(\"url\", &url);\n\n\n\n tera.render(\"internal/alias.html\", &context)\n\n .map_err(|e| Error::chain(format!(\"Failed to render alias for '{}'\", url), e))\n\n}\n", "file_path": "components/templates/src/lib.rs", "rank": 18, "score": 243757.5684994754 }, { "content": "pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Rendered> {\n\n // the rendered html\n\n let mut html = String::with_capacity(content.len());\n\n // Set while parsing\n\n let mut error = None;\n\n\n\n let mut background = IncludeBackground::Yes;\n\n let mut highlighter: Option<(HighlightLines, bool)> = None;\n\n\n\n let mut inserted_anchors: Vec<String> = vec![];\n\n let mut headings: Vec<Heading> = vec![];\n\n let mut internal_links_with_anchors = Vec::new();\n\n let mut external_links = Vec::new();\n\n\n\n let mut opts = Options::empty();\n\n let mut has_summary = false;\n\n opts.insert(Options::ENABLE_TABLES);\n\n opts.insert(Options::ENABLE_FOOTNOTES);\n\n opts.insert(Options::ENABLE_STRIKETHROUGH);\n\n\n", "file_path": "components/rendering/src/markdown.rs", "rank": 19, "score": 242597.75604150718 }, { "content": "/// Very similar to `create_dir` from the std except it checks if the folder\n\n/// exists before creating it\n\npub fn create_directory(path: &Path) -> Result<()> {\n\n if !path.exists() {\n\n create_dir_all(path).map_err(|e| {\n\n Error::chain(format!(\"Was not able to create folder {}\", path.display()), e)\n\n })?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "components/utils/src/fs.rs", "rank": 20, "score": 241183.413426047 }, { "content": "pub fn slugify_paths(s: &str, strategy: SlugifyStrategy) -> String {\n\n match strategy {\n\n SlugifyStrategy::On => slug::slugify(s),\n\n SlugifyStrategy::Safe => strip_invalid_paths_chars(s),\n\n SlugifyStrategy::Off => s.to_string(),\n\n }\n\n}\n\n\n", "file_path": "components/utils/src/slugs.rs", "rank": 21, "score": 239022.53830144173 }, { "content": "/// Create a directory at the given path if it doesn't exist already\n\npub fn ensure_directory_exists(path: &Path) -> Result<()> {\n\n if !path.exists() {\n\n create_directory(path)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "components/utils/src/fs.rs", "rank": 22, "score": 237708.78559992122 }, { "content": "/// What happens when we rename a file/folder in the content directory.\n\n/// Note that this is only called for folders when it isn't empty\n\npub fn after_content_rename(site: &mut Site, old: &Path, new: &Path) -> Result<()> {\n\n let new_path = if new.is_dir() {\n\n if new.join(\"_index.md\").exists() {\n\n // This is a section keep the dir folder to differentiate from renaming _index.md\n\n // which doesn't do the same thing\n\n new.to_path_buf()\n\n } else if new.join(\"index.md\").exists() {\n\n new.join(\"index.md\")\n\n } else {\n\n bail!(\"Got unexpected folder {:?} while handling renaming that was not expected\", new);\n\n }\n\n } else {\n\n new.to_path_buf()\n\n };\n\n\n\n // A section folder has been renamed: just reload the whole site and rebuild it as we\n\n // do not really know what needs to be rendered\n\n if new_path.is_dir() {\n\n site.load()?;\n\n return site.build();\n", "file_path": "components/rebuild/src/lib.rs", "rank": 23, "score": 234993.48274074166 }, { "content": "// Given a path, return true if it is a directory and it doesn't have any\n\n// non-hidden files, otherwise return false (path is assumed to exist)\n\npub fn is_directory_quasi_empty(path: &Path) -> Result<bool> {\n\n if path.is_dir() {\n\n let mut entries = match path.read_dir() {\n\n Ok(entries) => entries,\n\n Err(e) => {\n\n bail!(\n\n \"Could not read `{}` because of error: {}\",\n\n path.to_string_lossy().to_string(),\n\n e\n\n );\n\n }\n\n };\n\n // If any entry raises an error or isn't hidden (i.e. starts with `.`), we raise an error\n\n if entries.any(|x| match x {\n\n Ok(file) => !file\n\n .file_name()\n\n .to_str()\n\n .expect(\"Could not convert filename to &str\")\n\n .starts_with('.'),\n\n Err(_) => true,\n\n }) {\n\n return Ok(false);\n\n }\n\n return Ok(true);\n\n }\n\n\n\n Ok(false)\n\n}\n\n\n", "file_path": "src/cmd/init.rs", "rank": 24, "score": 232177.1837017683 }, { "content": "pub fn is_path_in_directory(parent: &Path, path: &Path) -> Result<bool> {\n\n let canonical_path = path\n\n .canonicalize()\n\n .map_err(|e| format!(\"Failed to canonicalize {}: {}\", path.display(), e))?;\n\n let canonical_parent = parent\n\n .canonicalize()\n\n .map_err(|e| format!(\"Failed to canonicalize {}: {}\", parent.display(), e))?;\n\n\n\n Ok(canonical_path.starts_with(canonical_parent))\n\n}\n\n\n", "file_path": "components/utils/src/fs.rs", "rank": 25, "score": 231136.13228370904 }, { "content": "pub fn message(res: &Result) -> String {\n\n match res {\n\n Ok(ref code) => format!(\"{}\", code),\n\n Err(ref error) => error.clone(),\n\n }\n\n}\n\n\n\nlazy_static! {\n\n // Keep history of link checks so a rebuild doesn't have to check again\n\n static ref LINKS: Arc<RwLock<HashMap<String, Result>>> = Arc::new(RwLock::new(HashMap::new()));\n\n}\n\n\n", "file_path": "components/link_checker/src/lib.rs", "rank": 26, "score": 220915.36921902298 }, { "content": "pub fn create_new_project(name: &str) -> Result<()> {\n\n let path = Path::new(name);\n\n // Better error message than the rust default\n\n if path.exists() && !is_directory_quasi_empty(&path)? {\n\n if name == \".\" {\n\n bail!(\"The current directory is not an empty folder (hidden files are ignored).\");\n\n } else {\n\n bail!(\n\n \"`{}` is not an empty folder (hidden files are ignored).\",\n\n path.to_string_lossy().to_string()\n\n )\n\n }\n\n }\n\n\n\n console::info(\"Welcome to Zola!\");\n\n console::info(\"Please answer a few questions to get started quickly.\");\n\n console::info(\"Any choices made can be changed by modifying the `config.toml` file later.\");\n\n\n\n let base_url = ask_url(\"> What is the URL of your site?\", \"https://example.com\")?;\n\n let compile_sass = ask_bool(\"> Do you want to enable Sass compilation?\", true)?;\n", "file_path": "src/cmd/init.rs", "rank": 27, "score": 220654.5836507425 }, { "content": "fn populate(path: &Path, compile_sass: bool, config: &str) -> Result<()> {\n\n if !path.exists() {\n\n create_dir(path)?;\n\n }\n\n create_file(&path.join(\"config.toml\"), &config)?;\n\n create_dir(path.join(\"content\"))?;\n\n create_dir(path.join(\"templates\"))?;\n\n create_dir(path.join(\"static\"))?;\n\n create_dir(path.join(\"themes\"))?;\n\n if compile_sass {\n\n create_dir(path.join(\"sass\"))?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::env::temp_dir;\n", "file_path": "src/cmd/init.rs", "rank": 28, "score": 220436.61338652403 }, { "content": "/// Copy a file but takes into account where to start the copy as\n\n/// there might be folders we need to create on the way.\n\n/// No copy occurs if all of the following conditions are satisfied:\n\n/// 1. A file with the same name already exists in the dest path.\n\n/// 2. Its modification timestamp is identical to that of the src file.\n\n/// 3. Its filesize is identical to that of the src file.\n\npub fn copy_file(src: &Path, dest: &PathBuf, base_path: &PathBuf, hard_link: bool) -> Result<()> {\n\n let relative_path = src.strip_prefix(base_path).unwrap();\n\n let target_path = dest.join(relative_path);\n\n\n\n if let Some(parent_directory) = target_path.parent() {\n\n create_dir_all(parent_directory)?;\n\n }\n\n\n\n if hard_link {\n\n std::fs::hard_link(src, target_path)?\n\n } else {\n\n let src_metadata = metadata(src)?;\n\n let src_mtime = FileTime::from_last_modification_time(&src_metadata);\n\n if Path::new(&target_path).is_file() {\n\n let target_metadata = metadata(&target_path)?;\n\n let target_mtime = FileTime::from_last_modification_time(&target_metadata);\n\n if !(src_mtime == target_mtime && src_metadata.len() == target_metadata.len()) {\n\n copy(src, &target_path)?;\n\n set_file_mtime(&target_path, src_mtime)?;\n\n }\n\n } else {\n\n copy(src, &target_path)?;\n\n set_file_mtime(&target_path, src_mtime)?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "components/utils/src/fs.rs", "rank": 29, "score": 219417.41266799517 }, { "content": "pub fn has_anchor(headings: &[Heading], anchor: &str) -> bool {\n\n for heading in headings {\n\n if heading.id == anchor {\n\n return true;\n\n }\n\n if has_anchor(&heading.children, anchor) {\n\n return true;\n\n }\n\n }\n\n\n\n false\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn can_find_anchor_at_root() {\n\n let input = vec![\n", "file_path": "components/library/src/content/mod.rs", "rank": 30, "score": 218898.59931090038 }, { "content": "/// What happens when a template is changed\n\npub fn after_template_change(site: &mut Site, path: &Path) -> Result<()> {\n\n site.tera.full_reload()?;\n\n let filename = path.file_name().unwrap().to_str().unwrap();\n\n\n\n match filename {\n\n \"sitemap.xml\" => site.render_sitemap(),\n\n filename if filename == site.config.feed_filename => {\n\n // FIXME: this is insufficient; for multilingual sites, it’s rendering the wrong\n\n // content into the root feed, and it’s not regenerating any of the other feeds (other\n\n // languages or taxonomies with feed enabled).\n\n site.render_feed(\n\n site.library.read().unwrap().pages_values(),\n\n None,\n\n &site.config.default_language,\n\n None,\n\n )\n\n }\n\n \"split_sitemap_index.xml\" => site.render_sitemap(),\n\n \"robots.txt\" => site.render_robots(),\n\n \"single.html\" | \"list.html\" => site.render_taxonomies(),\n", "file_path": "components/rebuild/src/lib.rs", "rank": 31, "score": 216590.03990855723 }, { "content": "fn is_section(path: &str, languages_codes: &[&str]) -> bool {\n\n if path == \"_index.md\" {\n\n return true;\n\n }\n\n\n\n for language_code in languages_codes {\n\n let lang_section_string = format!(\"_index.{}.md\", language_code);\n\n if path == lang_section_string {\n\n return true;\n\n }\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "components/rebuild/src/lib.rs", "rank": 32, "score": 213357.98946085636 }, { "content": "/// Looks into the current folder for the path and see if there's anything that is not a .md\n\n/// file. Those will be copied next to the rendered .html file\n\npub fn find_related_assets(path: &Path) -> Vec<PathBuf> {\n\n let mut assets = vec![];\n\n\n\n for entry in read_dir(path).unwrap().filter_map(std::result::Result::ok) {\n\n let entry_path = entry.path();\n\n if entry_path.is_file() {\n\n match entry_path.extension() {\n\n Some(e) => match e.to_str() {\n\n Some(\"md\") => continue,\n\n _ => assets.push(entry_path.to_path_buf()),\n\n },\n\n None => continue,\n\n }\n\n }\n\n }\n\n\n\n assets\n\n}\n\n\n", "file_path": "components/utils/src/fs.rs", "rank": 33, "score": 212217.0677892684 }, { "content": "#[allow(clippy::trivially_copy_pass_by_ref)]\n\npub fn sort_actual_pages_by_date(a: &&Page, b: &&Page) -> Ordering {\n\n let ord = b.meta.datetime.unwrap().cmp(&a.meta.datetime.unwrap());\n\n if ord == Ordering::Equal {\n\n a.permalink.cmp(&b.permalink)\n\n } else {\n\n ord\n\n }\n\n}\n\n\n", "file_path": "components/library/src/sorting.rs", "rank": 34, "score": 212064.992528556 }, { "content": "pub fn copy_directory(src: &PathBuf, dest: &PathBuf, hard_link: bool) -> Result<()> {\n\n for entry in WalkDir::new(src).into_iter().filter_map(std::result::Result::ok) {\n\n let relative_path = entry.path().strip_prefix(src).unwrap();\n\n let target_path = dest.join(relative_path);\n\n\n\n if entry.path().is_dir() {\n\n if !target_path.exists() {\n\n create_directory(&target_path)?;\n\n }\n\n } else {\n\n copy_file(entry.path(), dest, src, hard_link)?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "components/utils/src/fs.rs", "rank": 35, "score": 208434.26447652045 }, { "content": "/// Get word count and estimated reading time\n\npub fn get_reading_analytics(content: &str) -> (usize, usize) {\n\n let word_count: usize = content.unicode_words().count();\n\n\n\n // https://help.medium.com/hc/en-us/articles/214991667-Read-time\n\n // 275 seems a bit too high though\n\n (word_count, ((word_count + 199) / 200))\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct ResolvedInternalLink {\n\n pub permalink: String,\n\n // The 2 fields below are only set when there is an anchor\n\n // as we will need that to check if it exists after the markdown rendering is done\n\n pub md_path: Option<String>,\n\n pub anchor: Option<String>,\n\n}\n\n\n", "file_path": "components/utils/src/site.rs", "rank": 36, "score": 207221.1243847928 }, { "content": "/// Evaluates all the params in the front matter that changed so we can do the smallest\n\n/// delta in the serve command\n\n/// Order matters as the actions will be done in insertion order\n\nfn find_section_front_matter_changes(\n\n current: &SectionFrontMatter,\n\n new: &SectionFrontMatter,\n\n) -> Vec<SectionChangesNeeded> {\n\n let mut changes_needed = vec![];\n\n\n\n if current.sort_by != new.sort_by {\n\n changes_needed.push(SectionChangesNeeded::Sort);\n\n }\n\n\n\n if current.transparent != new.transparent {\n\n changes_needed.push(SectionChangesNeeded::Transparent);\n\n }\n\n\n\n // We want to hide the section\n\n // TODO: what to do on redirect_path change?\n\n if current.render && !new.render {\n\n changes_needed.push(SectionChangesNeeded::Delete);\n\n // Nothing else we can do\n\n return changes_needed;\n", "file_path": "components/rebuild/src/lib.rs", "rank": 37, "score": 206048.66454044046 }, { "content": "/// Evaluates all the params in the front matter that changed so we can do the smallest\n\n/// delta in the serve command\n\n/// Order matters as the actions will be done in insertion order\n\nfn find_page_front_matter_changes(\n\n current: &PageFrontMatter,\n\n other: &PageFrontMatter,\n\n) -> Vec<PageChangesNeeded> {\n\n let mut changes_needed = vec![];\n\n\n\n if current.taxonomies != other.taxonomies {\n\n changes_needed.push(PageChangesNeeded::Taxonomies);\n\n }\n\n\n\n if current.date != other.date || current.order != other.order || current.weight != other.weight\n\n {\n\n changes_needed.push(PageChangesNeeded::Sort);\n\n }\n\n\n\n changes_needed.push(PageChangesNeeded::Render);\n\n changes_needed\n\n}\n\n\n", "file_path": "components/rebuild/src/lib.rs", "rank": 38, "score": 205849.33806532118 }, { "content": "fn strip_invalid_paths_chars(s: &str) -> String {\n\n // NTFS forbidden characters : https://gist.github.com/doctaphred/d01d05291546186941e1b7ddc02034d3\n\n // Also we need to trim whitespaces and `.` from the end of filename\n\n let trimmed = s.trim_end_matches(|c| c == ' ' || c == '.');\n\n strip_chars(&trimmed, r#\"<>:\"/\\|?*\"#)\n\n}\n\n\n", "file_path": "components/utils/src/slugs.rs", "rank": 39, "score": 205450.0605041028 }, { "content": "pub fn slugify_anchors(s: &str, strategy: SlugifyStrategy) -> String {\n\n match strategy {\n\n SlugifyStrategy::On => slug::slugify(s),\n\n SlugifyStrategy::Safe | SlugifyStrategy::Off => {\n\n s.replace(|c: char| c.is_ascii_whitespace(), \"_\")\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn can_slugify_paths() {\n\n let tests = vec![\n\n // input, (on, safe, off)\n\n (\"input\", (\"input\", \"input\", \"input\")),\n\n (\"test \", (\"test\", \"test\", \"test \")),\n\n (\"tes t\", (\"tes-t\", \"tes t\", \"tes t\")),\n", "file_path": "components/utils/src/slugs.rs", "rank": 40, "score": 203692.95594070872 }, { "content": "/// Handles a `_index.md` (a section) being edited in some ways\n\nfn handle_section_editing(site: &mut Site, path: &Path) -> Result<()> {\n\n let section = Section::from_file(path, &site.config, &site.base_path)?;\n\n let pathbuf = path.to_path_buf();\n\n match site.add_section(section, true)? {\n\n // Updating a section\n\n Some(prev) => {\n\n site.populate_sections();\n\n site.process_images()?;\n\n {\n\n let library = site.library.read().unwrap();\n\n\n\n if library.get_section(&pathbuf).unwrap().meta == prev.meta {\n\n // Front matter didn't change, only content did\n\n // so we render only the section page, not its pages\n\n return site.render_section(&library.get_section(&pathbuf).unwrap(), false);\n\n }\n\n }\n\n\n\n // Front matter changed\n\n let changes = find_section_front_matter_changes(\n", "file_path": "components/rebuild/src/lib.rs", "rank": 41, "score": 203434.31990811837 }, { "content": "pub fn find_taxonomies(config: &Config, library: &Library) -> Result<Vec<Taxonomy>> {\n\n let taxonomies_def = {\n\n let mut m = HashMap::new();\n\n for t in &config.taxonomies {\n\n m.insert(format!(\"{}-{}\", t.name, t.lang), t);\n\n }\n\n m\n\n };\n\n\n\n let mut all_taxonomies = HashMap::new();\n\n for (key, page) in library.pages() {\n\n for (name, val) in &page.meta.taxonomies {\n\n let taxo_key = format!(\"{}-{}\", name, page.lang);\n\n if taxonomies_def.contains_key(&taxo_key) {\n\n all_taxonomies.entry(taxo_key.clone()).or_insert_with(HashMap::new);\n\n\n\n for v in val {\n\n all_taxonomies\n\n .get_mut(&taxo_key)\n\n .unwrap()\n", "file_path": "components/library/src/taxonomies/mod.rs", "rank": 42, "score": 203351.74937336566 }, { "content": "/// Handles a page being edited in some ways\n\nfn handle_page_editing(site: &mut Site, path: &Path) -> Result<()> {\n\n let page = Page::from_file(path, &site.config, &site.base_path)?;\n\n let pathbuf = path.to_path_buf();\n\n match site.add_page(page, true)? {\n\n // Updating a page\n\n Some(prev) => {\n\n site.populate_sections();\n\n site.populate_taxonomies()?;\n\n site.register_tera_global_fns();\n\n site.process_images()?;\n\n {\n\n let library = site.library.read().unwrap();\n\n\n\n // Front matter didn't change, only content did\n\n if library.get_page(&pathbuf).unwrap().meta == prev.meta {\n\n // Other than the page itself, the summary might be seen\n\n // on a paginated list for a blog for example\n\n if library.get_page(&pathbuf).unwrap().summary.is_some() {\n\n render_parent_sections!(site, path);\n\n }\n", "file_path": "components/rebuild/src/lib.rs", "rank": 43, "score": 203253.58786291516 }, { "content": "/// Ask a yes/no question to the user\n\npub fn ask_bool(question: &str, default: bool) -> Result<bool> {\n\n print!(\"{} {}: \", question, if default { \"[Y/n]\" } else { \"[y/N]\" });\n\n let _ = io::stdout().flush();\n\n let input = read_line()?;\n\n\n\n match &*input {\n\n \"y\" | \"Y\" | \"yes\" | \"YES\" | \"true\" => Ok(true),\n\n \"n\" | \"N\" | \"no\" | \"NO\" | \"false\" => Ok(false),\n\n \"\" => Ok(default),\n\n _ => {\n\n println!(\"Invalid choice: '{}'\", input);\n\n ask_bool(question, default)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/prompt.rs", "rank": 44, "score": 202813.3734809156 }, { "content": "pub fn check_url(url: &str, config: &LinkChecker) -> Result {\n\n {\n\n let guard = LINKS.read().unwrap();\n\n if let Some(res) = guard.get(url) {\n\n return res.clone();\n\n }\n\n }\n\n\n\n let mut headers = HeaderMap::new();\n\n headers.insert(ACCEPT, \"text/html\".parse().unwrap());\n\n headers.append(ACCEPT, \"*/*\".parse().unwrap());\n\n\n\n let client = Client::builder()\n\n .user_agent(concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")))\n\n .build()\n\n .expect(\"reqwest client build\");\n\n\n\n let check_anchor = !config.skip_anchor_prefixes.iter().any(|prefix| url.starts_with(prefix));\n\n\n\n // Need to actually do the link checking\n", "file_path": "components/link_checker/src/lib.rs", "rank": 45, "score": 197057.73060001805 }, { "content": "/// Takes a list of (page key, weight, permalink) and sort them by weight if possible\n\n/// Pages without weight will be put in the unsortable bucket\n\n/// The permalink is used to break ties\n\npub fn sort_pages_by_weight(\n\n pages: Vec<(&DefaultKey, Option<usize>, &str)>,\n\n) -> (Vec<DefaultKey>, Vec<DefaultKey>) {\n\n let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) =\n\n pages.into_par_iter().partition(|page| page.1.is_some());\n\n\n\n can_be_sorted.par_sort_unstable_by(|a, b| {\n\n let ord = a.1.unwrap().cmp(&b.1.unwrap());\n\n if ord == Ordering::Equal {\n\n a.2.cmp(&b.2)\n\n } else {\n\n ord\n\n }\n\n });\n\n\n\n (can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect())\n\n}\n\n\n", "file_path": "components/library/src/sorting.rs", "rank": 46, "score": 196535.98088882258 }, { "content": "/// Takes a list of (page key, date, permalink) and sort them by dates if possible\n\n/// Pages without date will be put in the unsortable bucket\n\n/// The permalink is used to break ties\n\npub fn sort_pages_by_date(\n\n pages: Vec<(&DefaultKey, Option<NaiveDateTime>, &str)>,\n\n) -> (Vec<DefaultKey>, Vec<DefaultKey>) {\n\n let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) =\n\n pages.into_par_iter().partition(|page| page.1.is_some());\n\n\n\n can_be_sorted.par_sort_unstable_by(|a, b| {\n\n let ord = b.1.unwrap().cmp(&a.1.unwrap());\n\n if ord == Ordering::Equal {\n\n a.2.cmp(&b.2)\n\n } else {\n\n ord\n\n }\n\n });\n\n\n\n (can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect())\n\n}\n\n\n", "file_path": "components/library/src/sorting.rs", "rank": 47, "score": 196512.7673987244 }, { "content": "/// Handles a path deletion: could be a page, a section, a folder\n\nfn delete_element(site: &mut Site, path: &Path, is_section: bool) -> Result<()> {\n\n {\n\n let mut library = site.library.write().unwrap();\n\n // Ignore the event if this path was not known\n\n if !library.contains_section(&path.to_path_buf())\n\n && !library.contains_page(&path.to_path_buf())\n\n {\n\n return Ok(());\n\n }\n\n\n\n if is_section {\n\n if let Some(s) = library.remove_section(&path.to_path_buf()) {\n\n site.permalinks.remove(&s.file.relative);\n\n }\n\n } else if let Some(p) = library.remove_page(&path.to_path_buf()) {\n\n site.permalinks.remove(&p.file.relative);\n\n }\n\n }\n\n\n\n // We might have delete the root _index.md so ensure we have at least the default one\n", "file_path": "components/rebuild/src/lib.rs", "rank": 48, "score": 194494.63764142562 }, { "content": "pub fn build_cli() -> App<'static, 'static> {\n\n App::new(\"zola\")\n\n .version(crate_version!())\n\n .author(crate_authors!())\n\n .about(crate_description!())\n\n .setting(AppSettings::SubcommandRequiredElseHelp)\n\n .arg(\n\n Arg::with_name(\"root\")\n\n .short(\"r\")\n\n .long(\"root\")\n\n .takes_value(true)\n\n .default_value(\".\")\n\n .help(\"Directory to use as root of project\")\n\n )\n\n .arg(\n\n Arg::with_name(\"config\")\n\n .short(\"c\")\n\n .long(\"config\")\n\n .takes_value(true)\n\n .help(\"Path to a config file other than config.toml in the root of project\")\n", "file_path": "src/cli.rs", "rank": 49, "score": 189641.84331443312 }, { "content": "/// Wait for user input and return what they typed\n\nfn read_line() -> Result<String> {\n\n let stdin = io::stdin();\n\n let stdin = stdin.lock();\n\n let mut lines = stdin.lines();\n\n lines\n\n .next()\n\n .and_then(|l| l.ok())\n\n .ok_or_else(|| \"unable to read from stdin for confirmation\".into())\n\n}\n\n\n", "file_path": "src/prompt.rs", "rank": 50, "score": 186216.41256524692 }, { "content": "fn read_data_file(base_path: &PathBuf, full_path: PathBuf) -> Result<String> {\n\n if !is_path_in_directory(&base_path, &full_path)\n\n .map_err(|e| format!(\"Failed to read data file {}: {}\", full_path.display(), e))?\n\n {\n\n return Err(format!(\n\n \"{} is not inside the base site directory {}\",\n\n full_path.display(),\n\n base_path.display()\n\n )\n\n .into());\n\n }\n\n read_file(&full_path).map_err(|e| {\n\n format!(\"`load_data`: error {} loading file {}\", full_path.to_str().unwrap(), e).into()\n\n })\n\n}\n\n\n", "file_path": "components/templates/src/global_fns/load_data.rs", "rank": 51, "score": 186006.70171169282 }, { "content": "/// TOML datetimes will be serialized as a struct but we want the\n\n/// stringified version for json, otherwise they are going to be weird\n\npub fn fix_toml_dates(table: Map<String, Value>) -> Value {\n\n let mut new = Map::new();\n\n\n\n for (key, value) in table {\n\n match value {\n\n Value::Object(o) => {\n\n new.insert(key, convert_toml_date(o));\n\n }\n\n _ => {\n\n new.insert(key, value);\n\n }\n\n }\n\n }\n\n\n\n Value::Object(new)\n\n}\n", "file_path": "components/utils/src/de.rs", "rank": 52, "score": 182548.72208409428 }, { "content": "/// Rewrites the path from extend/macros of the theme used to ensure\n\n/// that they will point to the right place (theme/templates/...)\n\n/// Include is NOT supported as it would be a pain to add and using blocks\n\n/// or macros is always better anyway for themes\n\n/// This will also rename the shortcodes to NOT have the themes in the path\n\n/// so themes shortcodes can be used.\n\npub fn rewrite_theme_paths(tera_theme: &mut Tera, site_templates: Vec<&str>, theme: &str) {\n\n let mut shortcodes_to_move = vec![];\n\n let mut templates = HashMap::new();\n\n let old_templates = ::std::mem::replace(&mut tera_theme.templates, HashMap::new());\n\n\n\n // We want to match the paths in the templates to the new names\n\n for (key, mut tpl) in old_templates {\n\n tpl.name = format!(\"{}/templates/{}\", theme, tpl.name);\n\n // First the parent if there is one\n\n // If a template with the same name is also in site, assumes it overrides the theme one\n\n // and do not change anything\n\n if let Some(ref p) = tpl.parent.clone() {\n\n if !site_templates.contains(&p.as_ref()) {\n\n tpl.parent = Some(format!(\"{}/templates/{}\", theme, p));\n\n }\n\n }\n\n\n\n // Next the macros import\n\n let mut updated = vec![];\n\n for &(ref filename, ref namespace) in &tpl.imported_macro_files {\n", "file_path": "components/utils/src/templates.rs", "rank": 53, "score": 180185.78737096838 }, { "content": "fn strip_chars(s: &str, chars: &str) -> String {\n\n let mut sanitized_string = s.to_string();\n\n sanitized_string.retain(|c| !chars.contains(c));\n\n sanitized_string\n\n}\n\n\n", "file_path": "components/utils/src/slugs.rs", "rank": 54, "score": 179818.8752244096 }, { "content": "fn replace_string_markers(input: &str) -> String {\n\n match input.chars().next().unwrap() {\n\n '\"' => input.replace('\"', \"\"),\n\n '\\'' => input.replace('\\'', \"\"),\n\n '`' => input.replace('`', \"\"),\n\n _ => unreachable!(\"How did you even get there\"),\n\n }\n\n}\n\n\n", "file_path": "components/rendering/src/shortcode.rs", "rank": 55, "score": 179529.9071426083 }, { "content": "pub fn success(message: &str) {\n\n colorize(message, ColorSpec::new().set_bold(true).set_fg(Some(Color::Green)));\n\n}\n\n\n", "file_path": "src/console.rs", "rank": 56, "score": 179528.00146592368 }, { "content": "pub fn warn(message: &str) {\n\n colorize(message, ColorSpec::new().set_bold(true).set_fg(Some(Color::Yellow)));\n\n}\n\n\n", "file_path": "src/console.rs", "rank": 57, "score": 179528.00146592368 }, { "content": "pub fn info(message: &str) {\n\n colorize(message, ColorSpec::new().set_bold(true));\n\n}\n\n\n", "file_path": "src/console.rs", "rank": 58, "score": 179528.00146592368 }, { "content": "/// We return the tmpdir otherwise it would get out of scope and be deleted\n\n/// The tests can ignore it if they dont need it by prefixing it with a `_`\n\npub fn build_site(name: &str) -> (Site, TempDir, PathBuf) {\n\n let mut path = env::current_dir().unwrap().parent().unwrap().parent().unwrap().to_path_buf();\n\n path.push(name);\n\n let config_file = path.join(\"config.toml\");\n\n let mut site = Site::new(&path, &config_file).unwrap();\n\n site.load().unwrap();\n\n let tmp_dir = tempdir().expect(\"create temp dir\");\n\n let public = &tmp_dir.path().join(\"public\");\n\n site.set_output_path(&public);\n\n site.build().expect(\"Couldn't build the site\");\n\n (site, tmp_dir, public.clone())\n\n}\n\n\n", "file_path": "components/site/tests/common.rs", "rank": 59, "score": 179034.96995316187 }, { "content": "fn rebuild_done_handling(broadcaster: &Option<Sender>, res: Result<()>, reload_path: &str) {\n\n match res {\n\n Ok(_) => {\n\n if let Some(broadcaster) = broadcaster.as_ref() {\n\n broadcaster\n\n .send(format!(\n\n r#\"\n\n {{\n\n \"command\": \"reload\",\n\n \"path\": \"{}\",\n\n \"originalPath\": \"\",\n\n \"liveCSS\": true,\n\n \"liveImg\": true,\n\n \"protocol\": [\"http://livereload.com/protocols/official-7\"]\n\n }}\"#,\n\n reload_path\n\n ))\n\n .unwrap();\n\n }\n\n }\n\n Err(e) => console::unravel_errors(\"Failed to build the site\", &e),\n\n }\n\n}\n\n\n", "file_path": "src/cmd/serve.rs", "rank": 60, "score": 176769.87677445632 }, { "content": "pub fn get_file_time(path: &Path) -> Option<SystemTime> {\n\n path.metadata().ok().and_then(|meta| {\n\n Some(match (meta.created().ok(), meta.modified().ok()) {\n\n (Some(tc), Some(tm)) => tc.max(tm),\n\n (Some(tc), None) => tc,\n\n (None, Some(tm)) => tm,\n\n (None, None) => return None,\n\n })\n\n })\n\n}\n\n\n", "file_path": "components/utils/src/fs.rs", "rank": 61, "score": 171803.09753420937 }, { "content": "/// Get and parse the config.\n\n/// If it doesn't succeed, exit\n\npub fn get_config(filename: &Path) -> Config {\n\n match Config::from_file(filename) {\n\n Ok(c) => c,\n\n Err(e) => {\n\n println!(\"Failed to load {}\", filename.display());\n\n println!(\"Error: {}\", e);\n\n ::std::process::exit(1);\n\n }\n\n }\n\n}\n", "file_path": "components/config/src/lib.rs", "rank": 62, "score": 162906.64961262 }, { "content": "pub fn is_valid(res: &Result) -> bool {\n\n match res {\n\n Ok(ref code) => code.is_success() || *code == StatusCode::NOT_MODIFIED,\n\n Err(_) => false,\n\n }\n\n}\n\n\n", "file_path": "components/link_checker/src/lib.rs", "rank": 63, "score": 162637.7464944078 }, { "content": "/// Find the lighter/heavier and earlier/later pages for all pages having a date/weight\n\npub fn find_siblings(\n\n sorted: &[DefaultKey],\n\n) -> Vec<(DefaultKey, Option<DefaultKey>, Option<DefaultKey>)> {\n\n let mut res = Vec::with_capacity(sorted.len());\n\n let length = sorted.len();\n\n\n\n for (i, key) in sorted.iter().enumerate() {\n\n let mut with_siblings = (*key, None, None);\n\n\n\n if i > 0 {\n\n // lighter / later\n\n with_siblings.1 = Some(sorted[i - 1]);\n\n }\n\n\n\n if i < length - 1 {\n\n // heavier/earlier\n\n with_siblings.2 = Some(sorted[i + 1]);\n\n }\n\n res.push(with_siblings);\n\n }\n", "file_path": "components/library/src/sorting.rs", "rank": 64, "score": 160888.63450442292 }, { "content": "// We might have cases where the slug is already present in our list of anchor\n\n// for example an article could have several titles named Example\n\n// We add a counter after the slug if the slug is already present, which\n\n// means we will have example, example-1, example-2 etc\n\nfn find_anchor(anchors: &[String], name: String, level: u8) -> String {\n\n if level == 0 && !anchors.contains(&name) {\n\n return name;\n\n }\n\n\n\n let new_anchor = format!(\"{}-{}\", name, level + 1);\n\n if !anchors.contains(&new_anchor) {\n\n return new_anchor;\n\n }\n\n\n\n find_anchor(anchors, name, level + 1)\n\n}\n\n\n", "file_path": "components/rendering/src/markdown.rs", "rank": 65, "score": 160247.1867517922 }, { "content": "/// Finds out all the links to put in a sitemap from the pages/sections/taxonomies\n\n/// There are no duplicate permalinks in the output vec\n\npub fn find_entries<'a>(\n\n library: &'a Library,\n\n taxonomies: &'a [Taxonomy],\n\n config: &'a Config,\n\n) -> Vec<SitemapEntry<'a>> {\n\n let pages = library\n\n .pages_values()\n\n .iter()\n\n .map(|p| {\n\n let mut entry = SitemapEntry::new(\n\n Cow::Borrowed(&p.permalink),\n\n p.meta.updated.clone().or_else(|| p.meta.date.clone()),\n\n );\n\n entry.add_extra(&p.meta.extra);\n\n entry\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n let mut sections = library\n\n .sections_values()\n", "file_path": "components/site/src/sitemap.rs", "rank": 66, "score": 156916.80533320346 }, { "content": "/// Parse a TOML string and convert it to a Tera Value\n\nfn load_toml(toml_data: String) -> Result<Value> {\n\n let toml_content: toml::Value = toml::from_str(&toml_data).map_err(|e| format!(\"{:?}\", e))?;\n\n let toml_value = to_value(toml_content).expect(\"Got invalid JSON that was valid TOML somehow\");\n\n\n\n match toml_value {\n\n Value::Object(m) => Ok(fix_toml_dates(m)),\n\n _ => unreachable!(\"Loaded something other than a TOML object\"),\n\n }\n\n}\n\n\n", "file_path": "components/templates/src/global_fns/load_data.rs", "rank": 67, "score": 153616.6118483754 }, { "content": "/// Parse a JSON string and convert it to a Tera Value\n\nfn load_json(json_data: String) -> Result<Value> {\n\n let json_content: Value =\n\n serde_json::from_str(json_data.as_str()).map_err(|e| format!(\"{:?}\", e))?;\n\n Ok(json_content)\n\n}\n\n\n", "file_path": "components/templates/src/global_fns/load_data.rs", "rank": 68, "score": 153616.6118483754 }, { "content": "/// Parse a CSV string and convert it to a Tera Value\n\n///\n\n/// An example csv file `example.csv` could be:\n\n/// ```csv\n\n/// Number, Title\n\n/// 1,Gutenberg\n\n/// 2,Printing\n\n/// ```\n\n/// The json value output would be:\n\n/// ```json\n\n/// {\n\n/// \"headers\": [\"Number\", \"Title\"],\n\n/// \"records\": [\n\n/// [\"1\", \"Gutenberg\"],\n\n/// [\"2\", \"Printing\"]\n\n/// ],\n\n/// }\n\n/// ```\n\nfn load_csv(csv_data: String) -> Result<Value> {\n\n let mut reader = Reader::from_reader(csv_data.as_bytes());\n\n let mut csv_map = Map::new();\n\n\n\n {\n\n let hdrs = reader.headers().map_err(|e| {\n\n format!(\"'load_data': {} - unable to read CSV header line (line 1) for CSV file\", e)\n\n })?;\n\n\n\n let headers_array = hdrs.iter().map(|v| Value::String(v.to_string())).collect();\n\n\n\n csv_map.insert(String::from(\"headers\"), Value::Array(headers_array));\n\n }\n\n\n\n {\n\n let records = reader.records();\n\n\n\n let mut records_array: Vec<Value> = Vec::new();\n\n\n\n for result in records {\n", "file_path": "components/templates/src/global_fns/load_data.rs", "rank": 69, "score": 153615.6387368793 }, { "content": "fn add_section_to_index(index: &mut Index, section: &Section, library: &Library) {\n\n if !section.meta.in_search_index {\n\n return;\n\n }\n\n\n\n // Don't index redirecting sections\n\n if section.meta.redirect_to.is_none() {\n\n index.add_doc(\n\n &section.permalink,\n\n &[\n\n &section.meta.title.clone().unwrap_or_default(),\n\n &AMMONIA.clean(&section.content).to_string(),\n\n ],\n\n );\n\n }\n\n\n\n for key in &section.pages {\n\n let page = library.get_page_by_key(*key);\n\n if !page.meta.in_search_index {\n\n continue;\n", "file_path": "components/search/src/lib.rs", "rank": 70, "score": 153294.8525417392 }, { "content": "/// Same as `build_site` but has a hook to setup some config options\n\npub fn build_site_with_setup<F>(name: &str, mut setup_cb: F) -> (Site, TempDir, PathBuf)\n\nwhere\n\n F: FnMut(Site) -> (Site, bool),\n\n{\n\n let mut path = env::current_dir().unwrap().parent().unwrap().parent().unwrap().to_path_buf();\n\n path.push(name);\n\n let config_file = path.join(\"config.toml\");\n\n let site = Site::new(&path, &config_file).unwrap();\n\n let (mut site, needs_loading) = setup_cb(site);\n\n if needs_loading {\n\n site.load().unwrap();\n\n }\n\n let tmp_dir = tempdir().expect(\"create temp dir\");\n\n let public = &tmp_dir.path().join(\"public\");\n\n site.set_output_path(&public);\n\n site.build().expect(\"Couldn't build the site\");\n\n (site, tmp_dir, public.clone())\n\n}\n", "file_path": "components/site/tests/common.rs", "rank": 71, "score": 153108.06767466903 }, { "content": "/// Display a warning in the console if there are ignored pages in the site\n\npub fn warn_about_ignored_pages(site: &Site) {\n\n let library = site.library.read().unwrap();\n\n let ignored_pages: Vec<_> = library\n\n .sections_values()\n\n .iter()\n\n .flat_map(|s| s.ignored_pages.iter().map(|k| library.get_page_by_key(*k).file.path.clone()))\n\n .collect();\n\n\n\n if !ignored_pages.is_empty() {\n\n warn(&format!(\n\n \"{} page(s) ignored (missing date or weight in a sorted section):\",\n\n ignored_pages.len()\n\n ));\n\n for path in ignored_pages {\n\n warn(&format!(\"- {}\", path.display()));\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/console.rs", "rank": 72, "score": 150015.8517717498 }, { "content": "fn is_ignored_file(ignored_content_globset: &Option<GlobSet>, path: &Path) -> bool {\n\n match ignored_content_globset {\n\n Some(gs) => gs.is_match(path),\n\n None => false,\n\n }\n\n}\n\n\n", "file_path": "src/cmd/serve.rs", "rank": 73, "score": 147241.7424552367 }, { "content": "/// Returns the highlighter and whether it was found in the extra or not\n\npub fn get_highlighter<'a>(info: &str, config: &Config) -> (HighlightLines<'a>, bool) {\n\n let theme = &THEME_SET.themes[&config.highlight_theme];\n\n let mut in_extra = false;\n\n\n\n if let Some(ref lang) = info.split(' ').next() {\n\n let syntax = SYNTAX_SET\n\n .find_syntax_by_token(lang)\n\n .or_else(|| {\n\n if let Some(ref extra) = config.extra_syntax_set {\n\n let s = extra.find_syntax_by_token(lang);\n\n if s.is_some() {\n\n in_extra = true;\n\n }\n\n s\n\n } else {\n\n None\n\n }\n\n })\n\n .unwrap_or_else(|| SYNTAX_SET.find_syntax_plain_text());\n\n (HighlightLines::new(syntax, theme), in_extra)\n\n } else {\n\n (HighlightLines::new(SYNTAX_SET.find_syntax_plain_text(), theme), false)\n\n }\n\n}\n", "file_path": "components/config/src/highlighting.rs", "rank": 74, "score": 137551.94236320443 }, { "content": "/// Converts the flat temp headings into a nested set of headings\n\n/// representing the hierarchy\n\npub fn make_table_of_contents(headings: Vec<Heading>) -> Vec<Heading> {\n\n let mut toc = vec![];\n\n for heading in headings {\n\n // First heading or we try to insert the current heading in a previous one\n\n if toc.is_empty() || !insert_into_parent(toc.iter_mut().last(), &heading) {\n\n toc.push(heading);\n\n }\n\n }\n\n\n\n toc\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn can_make_basic_toc() {\n\n let input = vec![Heading::new(1), Heading::new(1), Heading::new(1)];\n\n let toc = make_table_of_contents(input);\n", "file_path": "components/rendering/src/table_of_contents.rs", "rank": 75, "score": 136903.72495970633 }, { "content": "/// Returns key/value for a converted date from TOML.\n\n/// If the table itself is the TOML struct, only return its value without the key\n\nfn convert_toml_date(table: Map<String, Value>) -> Value {\n\n let mut new = Map::new();\n\n\n\n for (k, v) in table {\n\n if k == \"$__toml_private_datetime\" {\n\n return v;\n\n }\n\n\n\n match v {\n\n Value::Object(o) => {\n\n new.insert(k, convert_toml_date(o));\n\n }\n\n _ => {\n\n new.insert(k, v);\n\n }\n\n }\n\n }\n\n\n\n Value::Object(new)\n\n}\n\n\n", "file_path": "components/utils/src/de.rs", "rank": 76, "score": 135255.79694542367 }, { "content": "/// Returns whether the path we received corresponds to a temp file created\n\n/// by an editor or the OS\n\nfn is_temp_file(path: &Path) -> bool {\n\n let ext = path.extension();\n\n match ext {\n\n Some(ex) => match ex.to_str().unwrap() {\n\n \"swp\" | \"swx\" | \"tmp\" | \".DS_STORE\" => true,\n\n // jetbrains IDE\n\n x if x.ends_with(\"jb_old___\") => true,\n\n x if x.ends_with(\"jb_tmp___\") => true,\n\n x if x.ends_with(\"jb_bak___\") => true,\n\n // vim\n\n x if x.ends_with('~') => true,\n\n _ => {\n\n if let Some(filename) = path.file_stem() {\n\n // emacs\n\n let name = filename.to_str().unwrap();\n\n name.starts_with('#') || name.starts_with(\".#\")\n\n } else {\n\n false\n\n }\n\n }\n\n },\n\n None => true,\n\n }\n\n}\n\n\n", "file_path": "src/cmd/serve.rs", "rank": 77, "score": 132318.29330842546 }, { "content": "/// Detect what changed from the given path so we have an idea what needs\n\n/// to be reloaded\n\nfn detect_change_kind(pwd: &Path, path: &Path) -> (ChangeKind, PathBuf) {\n\n let mut partial_path = PathBuf::from(\"/\");\n\n partial_path.push(path.strip_prefix(pwd).unwrap_or(path));\n\n\n\n let change_kind = if partial_path.starts_with(\"/templates\") {\n\n ChangeKind::Templates\n\n } else if partial_path.starts_with(\"/themes\") {\n\n ChangeKind::Themes\n\n } else if partial_path.starts_with(\"/content\") {\n\n ChangeKind::Content\n\n } else if partial_path.starts_with(\"/static\") {\n\n ChangeKind::StaticFiles\n\n } else if partial_path.starts_with(\"/sass\") {\n\n ChangeKind::Sass\n\n } else if partial_path == Path::new(\"/config.toml\") {\n\n ChangeKind::Config\n\n } else {\n\n unreachable!(\"Got a change in an unexpected path: {}\", partial_path.display());\n\n };\n\n\n\n (change_kind, partial_path)\n\n}\n\n\n", "file_path": "src/cmd/serve.rs", "rank": 78, "score": 129704.71029085912 }, { "content": "use serde_derive::{Deserialize, Serialize};\n\nuse tera::{Map, Value};\n\nuse toml;\n\n\n\nuse super::{InsertAnchor, SortBy};\n\nuse errors::{bail, Result};\n\nuse utils::de::fix_toml_dates;\n\n\n\nstatic DEFAULT_PAGINATE_PATH: &str = \"page\";\n\n\n\n/// The front matter of every section\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\n#[serde(default)]\n\npub struct SectionFrontMatter {\n\n /// <title> of the page\n\n pub title: Option<String>,\n\n /// Description in <meta> that appears when linked, e.g. on twitter\n\n pub description: Option<String>,\n\n /// Whether to sort by \"date\", \"order\", \"weight\" or \"none\". Defaults to `none`.\n\n #[serde(skip_serializing)]\n", "file_path": "components/front_matter/src/section.rs", "rank": 79, "score": 127359.17690116934 }, { "content": " /// redirect to this\n\n #[serde(skip_serializing)]\n\n pub aliases: Vec<String>,\n\n /// Any extra parameter present in the front matter\n\n pub extra: Map<String, Value>,\n\n}\n\n\n\nimpl SectionFrontMatter {\n\n pub fn parse(toml: &str) -> Result<SectionFrontMatter> {\n\n let mut f: SectionFrontMatter = match toml::from_str(toml) {\n\n Ok(d) => d,\n\n Err(e) => bail!(e),\n\n };\n\n\n\n f.extra = match fix_toml_dates(f.extra) {\n\n Value::Object(o) => o,\n\n _ => unreachable!(\"Got something other than a table in section extra\"),\n\n };\n\n\n\n Ok(f)\n", "file_path": "components/front_matter/src/section.rs", "rank": 80, "score": 127333.19131202785 }, { "content": " #[serde(skip_serializing)]\n\n pub render: bool,\n\n /// Whether to redirect when landing on that section. Defaults to `None`.\n\n /// Useful for the same reason as `render` but when you don't want a 404 when\n\n /// landing on the root section page\n\n #[serde(skip_serializing)]\n\n pub redirect_to: Option<String>,\n\n /// Whether the section content and its pages/subsections are included in the index.\n\n /// Defaults to `true` but is only used if search if explicitly enabled in the config.\n\n #[serde(skip_serializing)]\n\n pub in_search_index: bool,\n\n /// Whether the section should pass its pages on to the parent section. Defaults to `false`.\n\n /// Useful when the section shouldn't split up the parent section, like\n\n /// sections for each year under a posts section.\n\n #[serde(skip_serializing)]\n\n pub transparent: bool,\n\n /// Optional template for all pages in this section (including the pages of children section)\n\n #[serde(skip_serializing)]\n\n pub page_template: Option<String>,\n\n /// All aliases for that page. Zola will create HTML templates that will\n", "file_path": "components/front_matter/src/section.rs", "rank": 81, "score": 127324.9401454106 }, { "content": " pub sort_by: SortBy,\n\n /// Used by the parent section to order its subsections.\n\n /// Higher values means it will be at the end. Defaults to `0`\n\n #[serde(skip_serializing)]\n\n pub weight: usize,\n\n /// Optional template, if we want to specify which template to render for that section\n\n #[serde(skip_serializing)]\n\n pub template: Option<String>,\n\n /// How many pages to be displayed per paginated page. No pagination will happen if this isn't set\n\n #[serde(skip_serializing)]\n\n pub paginate_by: Option<usize>,\n\n /// Path to be used by pagination: the page number will be appended after it. Defaults to `page`.\n\n #[serde(skip_serializing)]\n\n pub paginate_path: String,\n\n /// Whether to insert a link for each header like the ones you can see in this site if you hover one\n\n /// The default template can be overridden by creating a `anchor-link.html` in the `templates` directory\n\n pub insert_anchor_links: InsertAnchor,\n\n /// Whether to render that section or not. Defaults to `true`.\n\n /// Useful when the section is only there to organize things but is not meant\n\n /// to be used directly, like a posts section in a personal site\n", "file_path": "components/front_matter/src/section.rs", "rank": 82, "score": 127323.28914147824 }, { "content": " }\n\n\n\n /// Only applies to section, whether it is paginated or not.\n\n pub fn is_paginated(&self) -> bool {\n\n match self.paginate_by {\n\n Some(v) => v > 0,\n\n None => false,\n\n }\n\n }\n\n}\n\n\n\nimpl Default for SectionFrontMatter {\n\n fn default() -> SectionFrontMatter {\n\n SectionFrontMatter {\n\n title: None,\n\n description: None,\n\n sort_by: SortBy::None,\n\n weight: 0,\n\n template: None,\n\n paginate_by: None,\n", "file_path": "components/front_matter/src/section.rs", "rank": 83, "score": 127322.05314366701 }, { "content": " paginate_path: DEFAULT_PAGINATE_PATH.to_string(),\n\n render: true,\n\n redirect_to: None,\n\n insert_anchor_links: InsertAnchor::None,\n\n in_search_index: true,\n\n transparent: false,\n\n page_template: None,\n\n aliases: Vec::new(),\n\n extra: Map::new(),\n\n }\n\n }\n\n}\n", "file_path": "components/front_matter/src/section.rs", "rank": 84, "score": 127310.32241512282 }, { "content": "use std::collections::HashMap;\n\n\n\nuse chrono::prelude::*;\n\nuse serde_derive::Deserialize;\n\nuse tera::{Map, Value};\n\nuse toml;\n\n\n\nuse errors::{bail, Result};\n\nuse utils::de::{fix_toml_dates, from_toml_datetime};\n\n\n\n/// The front matter of every page\n\n#[derive(Debug, Clone, PartialEq, Deserialize)]\n\n#[serde(default)]\n\npub struct PageFrontMatter {\n\n /// <title> of the page\n\n pub title: Option<String>,\n\n /// Description in <meta> that appears when linked, e.g. on twitter\n\n pub description: Option<String>,\n\n /// Updated date\n\n #[serde(default, deserialize_with = \"from_toml_datetime\")]\n", "file_path": "components/front_matter/src/page.rs", "rank": 85, "score": 127118.37429568023 }, { "content": " pub fn parse(toml: &str) -> Result<PageFrontMatter> {\n\n let mut f: PageFrontMatter = match toml::from_str(toml) {\n\n Ok(d) => d,\n\n Err(e) => bail!(e),\n\n };\n\n\n\n if let Some(ref slug) = f.slug {\n\n if slug == \"\" {\n\n bail!(\"`slug` can't be empty if present\")\n\n }\n\n }\n\n\n\n if let Some(ref path) = f.path {\n\n if path == \"\" {\n\n bail!(\"`path` can't be empty if present\")\n\n }\n\n }\n\n\n\n f.extra = match fix_toml_dates(f.extra) {\n\n Value::Object(o) => o,\n", "file_path": "components/front_matter/src/page.rs", "rank": 86, "score": 127112.36332639921 }, { "content": " pub updated: Option<String>,\n\n /// Date if we want to order pages (ie blog post)\n\n #[serde(default, deserialize_with = \"from_toml_datetime\")]\n\n pub date: Option<String>,\n\n /// Chrono converted datetime\n\n #[serde(default, skip_deserializing)]\n\n pub datetime: Option<NaiveDateTime>,\n\n /// The converted date into a (year, month, day) tuple\n\n #[serde(default, skip_deserializing)]\n\n pub datetime_tuple: Option<(i32, u32, u32)>,\n\n /// Whether this page is a draft\n\n pub draft: bool,\n\n /// The page slug. Will be used instead of the filename if present\n\n /// Can't be an empty string if present\n\n pub slug: Option<String>,\n\n /// The path the page appears at, overrides the slug if set in the front-matter\n\n /// otherwise is set after parsing front matter and sections\n\n /// Can't be an empty string if present\n\n pub path: Option<String>,\n\n pub taxonomies: HashMap<String, Vec<String>>,\n", "file_path": "components/front_matter/src/page.rs", "rank": 87, "score": 127108.09566830838 }, { "content": " /// Integer to use to order content. Lowest is at the bottom, highest first\n\n pub order: Option<usize>,\n\n /// Integer to use to order content. Highest is at the bottom, lowest first\n\n pub weight: Option<usize>,\n\n /// All aliases for that page. Zola will create HTML templates that will\n\n /// redirect to this\n\n #[serde(skip_serializing)]\n\n pub aliases: Vec<String>,\n\n /// Specify a template different from `page.html` to use for that page\n\n #[serde(skip_serializing)]\n\n pub template: Option<String>,\n\n /// Whether the page is included in the search index\n\n /// Defaults to `true` but is only used if search if explicitly enabled in the config.\n\n #[serde(skip_serializing)]\n\n pub in_search_index: bool,\n\n /// Any extra parameter present in the front matter\n\n pub extra: Map<String, Value>,\n\n}\n\n\n\nimpl PageFrontMatter {\n", "file_path": "components/front_matter/src/page.rs", "rank": 88, "score": 127106.25468322478 }, { "content": "\n\n self.datetime_tuple = if let Some(ref dt) = self.datetime {\n\n Some((dt.year(), dt.month(), dt.day()))\n\n } else {\n\n None\n\n };\n\n }\n\n\n\n pub fn order(&self) -> usize {\n\n self.order.unwrap()\n\n }\n\n\n\n pub fn weight(&self) -> usize {\n\n self.weight.unwrap()\n\n }\n\n}\n\n\n\nimpl Default for PageFrontMatter {\n\n fn default() -> PageFrontMatter {\n\n PageFrontMatter {\n", "file_path": "components/front_matter/src/page.rs", "rank": 89, "score": 127102.81003374828 }, { "content": " }\n\n\n\n #[test]\n\n fn errors_on_present_but_empty_path() {\n\n let content = r#\"\n\n title = \"Hello\"\n\n description = \"hey there\"\n\n path = \"\"\"#;\n\n let res = PageFrontMatter::parse(content);\n\n assert!(res.is_err());\n\n }\n\n\n\n #[test]\n\n fn can_parse_date_yyyy_mm_dd() {\n\n let content = r#\"\n\n title = \"Hello\"\n\n description = \"hey there\"\n\n date = 2016-10-10\n\n \"#;\n\n let res = PageFrontMatter::parse(content).unwrap();\n", "file_path": "components/front_matter/src/page.rs", "rank": 90, "score": 127101.57261407514 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::PageFrontMatter;\n\n use tera::to_value;\n\n\n\n #[test]\n\n fn can_have_empty_front_matter() {\n\n let content = r#\" \"#;\n\n let res = PageFrontMatter::parse(content);\n\n println!(\"{:?}\", res);\n\n assert!(res.is_ok());\n\n }\n\n\n\n #[test]\n\n fn can_parse_valid_front_matter() {\n\n let content = r#\"\n\n title = \"Hello\"\n\n description = \"hey there\"\"#;\n\n let res = PageFrontMatter::parse(content);\n\n assert!(res.is_ok());\n", "file_path": "components/front_matter/src/page.rs", "rank": 91, "score": 127099.05029645214 }, { "content": " let res = PageFrontMatter::parse(content);\n\n assert!(res.is_err());\n\n }\n\n\n\n #[test]\n\n fn cannot_parse_invalid_date_format() {\n\n let content = r#\"\n\n title = \"Hello\"\n\n description = \"hey there\"\n\n date = 2002-14-01\"#;\n\n let res = PageFrontMatter::parse(content);\n\n assert!(res.is_err());\n\n }\n\n\n\n #[test]\n\n fn cannot_parse_date_as_string() {\n\n let content = r#\"\n\n title = \"Hello\"\n\n description = \"hey there\"\n\n date = \"2002-14-01\"\"#;\n", "file_path": "components/front_matter/src/page.rs", "rank": 92, "score": 127098.37611246025 }, { "content": " let res = res.unwrap();\n\n assert_eq!(res.title.unwrap(), \"Hello\".to_string());\n\n assert_eq!(res.description.unwrap(), \"hey there\".to_string())\n\n }\n\n\n\n #[test]\n\n fn errors_with_invalid_front_matter() {\n\n let content = r#\"title = 1\\n\"#;\n\n let res = PageFrontMatter::parse(content);\n\n assert!(res.is_err());\n\n }\n\n\n\n #[test]\n\n fn errors_on_present_but_empty_slug() {\n\n let content = r#\"\n\n title = \"Hello\"\n\n description = \"hey there\"\n\n slug = \"\"\"#;\n\n let res = PageFrontMatter::parse(content);\n\n assert!(res.is_err());\n", "file_path": "components/front_matter/src/page.rs", "rank": 93, "score": 127097.8180121378 }, { "content": " _ => unreachable!(\"Got something other than a table in page extra\"),\n\n };\n\n\n\n f.date_to_datetime();\n\n\n\n Ok(f)\n\n }\n\n\n\n /// Converts the TOML datetime to a Chrono naive datetime\n\n /// Also grabs the year/month/day tuple that will be used in serialization\n\n pub fn date_to_datetime(&mut self) {\n\n self.datetime = if let Some(ref d) = self.date {\n\n if d.contains('T') {\n\n DateTime::parse_from_rfc3339(&d).ok().map(|s| s.naive_local())\n\n } else {\n\n NaiveDate::parse_from_str(&d, \"%Y-%m-%d\").ok().map(|s| s.and_hms(0, 0, 0))\n\n }\n\n } else {\n\n None\n\n };\n", "file_path": "components/front_matter/src/page.rs", "rank": 94, "score": 127097.7570312492 }, { "content": " let res = PageFrontMatter::parse(content);\n\n assert!(res.is_err());\n\n }\n\n\n\n #[test]\n\n fn can_parse_dates_in_extra() {\n\n let content = r#\"\n\n title = \"Hello\"\n\n description = \"hey there\"\n\n\n\n [extra]\n\n some-date = 2002-14-01\"#;\n\n let res = PageFrontMatter::parse(content);\n\n println!(\"{:?}\", res);\n\n assert!(res.is_ok());\n\n assert_eq!(res.unwrap().extra[\"some-date\"], to_value(\"2002-14-01\").unwrap());\n\n }\n\n\n\n #[test]\n\n fn can_parse_nested_dates_in_extra() {\n", "file_path": "components/front_matter/src/page.rs", "rank": 95, "score": 127093.96079685097 }, { "content": " assert!(res.date.is_some());\n\n }\n\n\n\n #[test]\n\n fn can_parse_date_rfc3339() {\n\n let content = r#\"\n\n title = \"Hello\"\n\n description = \"hey there\"\n\n date = 2002-10-02T15:00:00Z\n\n \"#;\n\n let res = PageFrontMatter::parse(content).unwrap();\n\n assert!(res.date.is_some());\n\n }\n\n\n\n #[test]\n\n fn cannot_parse_random_date_format() {\n\n let content = r#\"\n\n title = \"Hello\"\n\n description = \"hey there\"\n\n date = 2002/10/12\"#;\n", "file_path": "components/front_matter/src/page.rs", "rank": 96, "score": 127092.10603635178 }, { "content": " let content = r#\"\n\n title = \"Hello\"\n\n description = \"hey there\"\n\n\n\n [extra.something]\n\n some-date = 2002-14-01\"#;\n\n let res = PageFrontMatter::parse(content);\n\n println!(\"{:?}\", res);\n\n assert!(res.is_ok());\n\n assert_eq!(res.unwrap().extra[\"something\"][\"some-date\"], to_value(\"2002-14-01\").unwrap());\n\n }\n\n\n\n #[test]\n\n fn can_parse_taxonomies() {\n\n let content = r#\"\n\ntitle = \"Hello World\"\n\n\n\n[taxonomies]\n\ntags = [\"Rust\", \"JavaScript\"]\n\ncategories = [\"Dev\"]\n", "file_path": "components/front_matter/src/page.rs", "rank": 97, "score": 127090.00104227527 }, { "content": " title: None,\n\n description: None,\n\n updated: None,\n\n date: None,\n\n datetime: None,\n\n datetime_tuple: None,\n\n draft: false,\n\n slug: None,\n\n path: None,\n\n taxonomies: HashMap::new(),\n\n order: None,\n\n weight: None,\n\n aliases: Vec::new(),\n\n in_search_index: true,\n\n template: None,\n\n extra: Map::new(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "components/front_matter/src/page.rs", "rank": 98, "score": 127087.48051141406 }, { "content": "\"#;\n\n let res = PageFrontMatter::parse(content);\n\n println!(\"{:?}\", res);\n\n assert!(res.is_ok());\n\n let res2 = res.unwrap();\n\n assert_eq!(res2.taxonomies[\"categories\"], vec![\"Dev\"]);\n\n assert_eq!(res2.taxonomies[\"tags\"], vec![\"Rust\", \"JavaScript\"]);\n\n }\n\n}\n", "file_path": "components/front_matter/src/page.rs", "rank": 99, "score": 127085.6253551302 } ]
Rust
near-sdk/src/collections/lookup_set.rs
nearprotocol/near-sdk-rs
8a2b2e19b27a764abf43df05bd0e530c3ad91d6c
use std::marker::PhantomData; use borsh::{BorshDeserialize, BorshSerialize}; use crate::collections::append_slice; use crate::{env, IntoStorageKey}; const ERR_ELEMENT_SERIALIZATION: &str = "Cannot serialize element with Borsh"; #[derive(BorshSerialize, BorshDeserialize)] pub struct LookupSet<T> { element_prefix: Vec<u8>, #[borsh_skip] el: PhantomData<T>, } impl<T> LookupSet<T> { pub fn new<S>(element_prefix: S) -> Self where S: IntoStorageKey, { Self { element_prefix: element_prefix.into_storage_key(), el: PhantomData } } fn raw_element_to_storage_key(&self, element_raw: &[u8]) -> Vec<u8> { append_slice(&self.element_prefix, element_raw) } fn contains_raw(&self, element_raw: &[u8]) -> bool { let storage_key = self.raw_element_to_storage_key(element_raw); env::storage_has_key(&storage_key) } pub fn insert_raw(&mut self, element_raw: &[u8]) -> bool { let storage_key = self.raw_element_to_storage_key(element_raw); !env::storage_write(&storage_key, b"") } pub fn remove_raw(&mut self, element_raw: &[u8]) -> bool { let storage_key = self.raw_element_to_storage_key(element_raw); env::storage_remove(&storage_key) } } impl<T> LookupSet<T> where T: BorshSerialize, { fn serialize_element(element: &T) -> Vec<u8> { match element.try_to_vec() { Ok(x) => x, Err(_) => env::panic_str(ERR_ELEMENT_SERIALIZATION), } } pub fn contains(&self, element: &T) -> bool { self.contains_raw(&Self::serialize_element(element)) } pub fn remove(&mut self, element: &T) -> bool { self.remove_raw(&Self::serialize_element(element)) } pub fn insert(&mut self, element: &T) -> bool { self.insert_raw(&Self::serialize_element(element)) } pub fn extend<IT: IntoIterator<Item = T>>(&mut self, iter: IT) { for el in iter { self.insert(&el); } } } impl<T> std::fmt::Debug for LookupSet<T> where T: std::fmt::Debug + BorshSerialize, { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("LookupSet").field("element_prefix", &self.element_prefix).finish() } } #[cfg(not(target_arch = "wasm32"))] #[cfg(test)] mod tests { use crate::collections::LookupSet; use rand::seq::SliceRandom; use rand::{Rng, SeedableRng}; use std::collections::HashSet; #[test] pub fn test_insert_one() { let mut map = LookupSet::new(b"m"); assert!(map.insert(&1)); assert!(!map.insert(&1)); } #[test] pub fn test_insert() { let mut set = LookupSet::new(b"s"); let mut rng = rand_xorshift::XorShiftRng::seed_from_u64(0); for _ in 0..500 { let key = rng.gen::<u64>(); set.insert(&key); } } #[test] pub fn test_insert_remove() { let mut set = LookupSet::new(b"s"); let mut rng = rand_xorshift::XorShiftRng::seed_from_u64(1); let mut keys = vec![]; for _ in 0..100 { let key = rng.gen::<u64>(); keys.push(key); set.insert(&key); } keys.shuffle(&mut rng); for key in keys { assert!(set.remove(&key)); } } #[test] pub fn test_remove_last_reinsert() { let mut set = LookupSet::new(b"s"); let key1 = 1u64; set.insert(&key1); let key2 = 2u64; set.insert(&key2); let actual = set.remove(&key2); assert!(actual); let actual_reinsert = set.insert(&key2); assert!(actual_reinsert); } #[test] pub fn test_insert_override_remove() { let mut set = LookupSet::new(b"s"); let mut rng = rand_xorshift::XorShiftRng::seed_from_u64(2); let mut keys = vec![]; for _ in 0..100 { let key = rng.gen::<u64>(); keys.push(key); set.insert(&key); } keys.shuffle(&mut rng); for key in &keys { assert!(!set.insert(key)); } keys.shuffle(&mut rng); for key in keys { assert!(set.remove(&key)); } } #[test] pub fn test_contains_non_existent() { let mut set = LookupSet::new(b"s"); let mut rng = rand_xorshift::XorShiftRng::seed_from_u64(3); let mut set_tmp = HashSet::new(); for _ in 0..500 { let key = rng.gen::<u64>() % 20_000; set_tmp.insert(key); set.insert(&key); } for _ in 0..500 { let key = rng.gen::<u64>() % 20_000; assert_eq!(set.contains(&key), set_tmp.contains(&key)); } } #[test] pub fn test_extend() { let mut set = LookupSet::new(b"s"); let mut rng = rand_xorshift::XorShiftRng::seed_from_u64(4); let mut keys = HashSet::new(); for _ in 0..100 { let key = rng.gen::<u64>(); keys.insert(key); set.insert(&key); } for _ in 0..10 { let mut tmp = vec![]; for _ in 0..=(rng.gen::<u64>() % 20 + 1) { let key = rng.gen::<u64>(); tmp.push(key); } keys.extend(tmp.iter().cloned()); set.extend(tmp.iter().cloned()); } for key in keys { assert!(set.contains(&key)); } } #[test] fn test_debug() { let set: LookupSet<u64> = LookupSet::new(b"m"); assert_eq!( format!("{:?}", set), format!("LookupSet {{ element_prefix: {:?} }}", set.element_prefix) ); } }
use std::marker::PhantomData; use borsh::{BorshDeserialize, BorshSerialize}; use crate::collections::append_slice; use crate::{env, IntoStorageKey}; const ERR_ELEMENT_SERIALIZATION: &str = "Cannot serialize element with Borsh"; #[derive(BorshSerialize, BorshDeserialize)] pub struct LookupSet<T> { element_prefix: Vec<u8>, #[borsh_skip] el: PhantomData<T>, } impl<T> LookupSet<T> { pub fn new<S>(element_prefix: S) -> Self where S: IntoStorageKey, { Self { element_prefix: element_prefix.into_storage_key(), el: PhantomData } } fn raw_element_to_storage_key(&self, element_raw: &[u8]) -> Vec<u8> { append_slice(&self.element_prefix, element_raw) } fn contains_raw(&self, element_raw: &[u8]) -> bool { let storage_key = self.raw_element_to_storage_key(element_raw); env::storage_has_key(&storage_key) } pub fn insert_raw(&mut self, element_raw: &[u8]) -> bool { let storage_key = self.raw_element_to_storage_key(element_raw); !env::storage_write(&storage_key, b"") } pub fn remove_raw(&mut self, element_raw: &[u8]) -> bool { let storage_key = self.raw_element_to_storage_key(element_raw); env::storage_remove(&storage_key) } } impl<T> LookupSet<T> where T: BorshSerialize, { fn serialize_element(element: &T) -> Vec<u8> { match element.try_to_vec() { Ok(x) => x, Err(_) => env::panic_str(ERR_ELEMENT_SERIALIZATION), } } pub fn contains(&self, element: &T) -> bool { self.contains_raw(&Self::serialize_element(element)) } pub fn remove(&mut self, element: &T) -> bool { self.remove_raw(&Self::serialize_element(element)) } pub fn insert(&mut self, element: &T) -> bool { self.insert_raw(&Self::serialize_element(element)) } pub fn extend<IT: IntoIterator<Item = T>>(&mut self, iter: IT) { for el in iter { self.insert(&el); } } }
> std::fmt::Result { f.debug_struct("LookupSet").field("element_prefix", &self.element_prefix).finish() } } #[cfg(not(target_arch = "wasm32"))] #[cfg(test)] mod tests { use crate::collections::LookupSet; use rand::seq::SliceRandom; use rand::{Rng, SeedableRng}; use std::collections::HashSet; #[test] pub fn test_insert_one() { let mut map = LookupSet::new(b"m"); assert!(map.insert(&1)); assert!(!map.insert(&1)); } #[test] pub fn test_insert() { let mut set = LookupSet::new(b"s"); let mut rng = rand_xorshift::XorShiftRng::seed_from_u64(0); for _ in 0..500 { let key = rng.gen::<u64>(); set.insert(&key); } } #[test] pub fn test_insert_remove() { let mut set = LookupSet::new(b"s"); let mut rng = rand_xorshift::XorShiftRng::seed_from_u64(1); let mut keys = vec![]; for _ in 0..100 { let key = rng.gen::<u64>(); keys.push(key); set.insert(&key); } keys.shuffle(&mut rng); for key in keys { assert!(set.remove(&key)); } } #[test] pub fn test_remove_last_reinsert() { let mut set = LookupSet::new(b"s"); let key1 = 1u64; set.insert(&key1); let key2 = 2u64; set.insert(&key2); let actual = set.remove(&key2); assert!(actual); let actual_reinsert = set.insert(&key2); assert!(actual_reinsert); } #[test] pub fn test_insert_override_remove() { let mut set = LookupSet::new(b"s"); let mut rng = rand_xorshift::XorShiftRng::seed_from_u64(2); let mut keys = vec![]; for _ in 0..100 { let key = rng.gen::<u64>(); keys.push(key); set.insert(&key); } keys.shuffle(&mut rng); for key in &keys { assert!(!set.insert(key)); } keys.shuffle(&mut rng); for key in keys { assert!(set.remove(&key)); } } #[test] pub fn test_contains_non_existent() { let mut set = LookupSet::new(b"s"); let mut rng = rand_xorshift::XorShiftRng::seed_from_u64(3); let mut set_tmp = HashSet::new(); for _ in 0..500 { let key = rng.gen::<u64>() % 20_000; set_tmp.insert(key); set.insert(&key); } for _ in 0..500 { let key = rng.gen::<u64>() % 20_000; assert_eq!(set.contains(&key), set_tmp.contains(&key)); } } #[test] pub fn test_extend() { let mut set = LookupSet::new(b"s"); let mut rng = rand_xorshift::XorShiftRng::seed_from_u64(4); let mut keys = HashSet::new(); for _ in 0..100 { let key = rng.gen::<u64>(); keys.insert(key); set.insert(&key); } for _ in 0..10 { let mut tmp = vec![]; for _ in 0..=(rng.gen::<u64>() % 20 + 1) { let key = rng.gen::<u64>(); tmp.push(key); } keys.extend(tmp.iter().cloned()); set.extend(tmp.iter().cloned()); } for key in keys { assert!(set.contains(&key)); } } #[test] fn test_debug() { let set: LookupSet<u64> = LookupSet::new(b"m"); assert_eq!( format!("{:?}", set), format!("LookupSet {{ element_prefix: {:?} }}", set.element_prefix) ); } }
impl<T> std::fmt::Debug for LookupSet<T> where T: std::fmt::Debug + BorshSerialize, { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -
random
[ { "content": "// ###############\n\n// # Storage API #\n\n// ###############\n\n/// Writes key-value into storage.\n\n/// If another key-value existed in the storage with the same key it returns `true`, otherwise `false`.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use near_sdk::env::{storage_write, storage_read};\n\n///\n\n/// assert!(!storage_write(b\"key\", b\"value\"));\n\n/// assert!(storage_write(b\"key\", b\"another_value\"));\n\n/// assert_eq!(storage_read(b\"key\").unwrap(), b\"another_value\");\n\n/// ```\n\npub fn storage_write(key: &[u8], value: &[u8]) -> bool {\n\n match unsafe {\n\n sys::storage_write(\n\n key.len() as _,\n\n key.as_ptr() as _,\n\n value.len() as _,\n\n value.as_ptr() as _,\n\n EVICTED_REGISTER,\n\n )\n\n } {\n\n 0 => false,\n\n 1 => true,\n\n _ => abort(),\n\n }\n\n}\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 0, "score": 284826.0062000232 }, { "content": "/// Checks if there is a key-value in the storage.\n\npub fn storage_has_key(key: &[u8]) -> bool {\n\n match unsafe { sys::storage_has_key(key.len() as _, key.as_ptr() as _) } {\n\n 0 => false,\n\n 1 => true,\n\n _ => abort(),\n\n }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 1, "score": 280852.55076767446 }, { "content": "/// Removes the value stored under the given key.\n\n/// If key-value existed returns `true`, otherwise `false`.\n\npub fn storage_remove(key: &[u8]) -> bool {\n\n match unsafe { sys::storage_remove(key.len() as _, key.as_ptr() as _, EVICTED_REGISTER) } {\n\n 0 => false,\n\n 1 => true,\n\n _ => abort(),\n\n }\n\n}\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 2, "score": 280852.5507676745 }, { "content": "/// Returns `true` if the given account ID is valid and `false` otherwise.\n\npub fn is_valid_account_id(account_id: &[u8]) -> bool {\n\n if (account_id.len() as u64) < MIN_ACCOUNT_ID_LEN\n\n || (account_id.len() as u64) > MAX_ACCOUNT_ID_LEN\n\n {\n\n return false;\n\n }\n\n\n\n // NOTE: We don't want to use Regex here, because it requires extra time to compile it.\n\n // The valid account ID regex is /^(([a-z\\d]+[-_])*[a-z\\d]+\\.)*([a-z\\d]+[-_])*[a-z\\d]+$/\n\n // Instead the implementation is based on the previous character checks.\n\n\n\n // We can safely assume that last char was a separator.\n\n let mut last_char_is_separator = true;\n\n\n\n for c in account_id {\n\n let current_char_is_separator = match *c {\n\n b'a'..=b'z' | b'0'..=b'9' => false,\n\n b'-' | b'_' | b'.' => true,\n\n _ => return false,\n\n };\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 3, "score": 275487.5875540025 }, { "content": "pub fn state_write<T: borsh::BorshSerialize>(state: &T) {\n\n let data = state.try_to_vec().expect(\"Cannot serialize the contract state.\");\n\n storage_write(STATE_KEY, &data);\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 4, "score": 250426.80213195193 }, { "content": "/// Logs the string message message. This message is stored on chain.\n\npub fn log_str(message: &str) {\n\n #[cfg(all(debug_assertions, not(target_arch = \"wasm32\")))]\n\n eprintln!(\"{}\", message);\n\n\n\n unsafe { sys::log_utf8(message.len() as _, message.as_ptr() as _) }\n\n}\n\n\n\n/// Log the UTF-8 encodable message.\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 5, "score": 226941.34911927074 }, { "content": "/// Terminates the execution of the program with the UTF-8 encoded message.\n\npub fn panic_str(message: &str) -> ! {\n\n unsafe { sys::panic_utf8(message.len() as _, message.as_ptr() as _) }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 6, "score": 226941.34911927074 }, { "content": "/// Hashes the random sequence of bytes using sha256.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use near_sdk::env::sha256;\n\n/// use hex;\n\n///\n\n/// assert_eq!(\n\n/// sha256(b\"The phrase that will be hashed\"),\n\n/// hex::decode(\"7fc38bc74a0d0e592d2b8381839adc2649007d5bca11f92eeddef78681b4e3a3\").expect(\"Decoding failed\")\n\n/// );\n\n/// ```\n\npub fn sha256(value: &[u8]) -> Vec<u8> {\n\n sha256_array(value).to_vec()\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 7, "score": 224640.27247044793 }, { "content": "/// Hashes the bytes using the SHA-256 hash function. This returns a 32 byte hash.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use near_sdk::env::sha256_array;\n\n/// use hex;\n\n///\n\n/// assert_eq!(\n\n/// &sha256_array(b\"The phrase that will be hashed\"),\n\n/// hex::decode(\"7fc38bc74a0d0e592d2b8381839adc2649007d5bca11f92eeddef78681b4e3a3\")\n\n/// .expect(\"Decoding failed\")\n\n/// .as_slice()\n\n/// );\n\n/// ```\n\npub fn sha256_array(value: &[u8]) -> [u8; 32] {\n\n //* SAFETY: sha256 syscall will always generate 32 bytes inside of the atomic op register\n\n //* so the read will have a sufficient buffer of 32, and can transmute from uninit\n\n //* because all bytes are filled. This assumes a valid sha256 implementation.\n\n unsafe {\n\n sys::sha256(value.len() as _, value.as_ptr() as _, ATOMIC_OP_REGISTER);\n\n read_register_fixed_32(ATOMIC_OP_REGISTER)\n\n }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 8, "score": 224640.18704936135 }, { "content": "/// Hashes the random sequence of bytes using keccak256.\n\npub fn keccak256(value: &[u8]) -> Vec<u8> {\n\n keccak256_array(value).to_vec()\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 9, "score": 224639.48538489977 }, { "content": "/// Hashes the random sequence of bytes using keccak512.\n\npub fn keccak512(value: &[u8]) -> Vec<u8> {\n\n keccak512_array(value).to_vec()\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 10, "score": 224639.48538489977 }, { "content": "/// Hashes the bytes using the Keccak-512 hash function. This returns a 64 byte hash.\n\npub fn keccak512_array(value: &[u8]) -> [u8; 64] {\n\n //* SAFETY: keccak512 syscall will always generate 64 bytes inside of the atomic op register\n\n //* so the read will have a sufficient buffer of 64, and can transmute from uninit\n\n //* because all bytes are filled. This assumes a valid keccak512 implementation.\n\n unsafe {\n\n sys::keccak512(value.len() as _, value.as_ptr() as _, ATOMIC_OP_REGISTER);\n\n read_register_fixed_64(ATOMIC_OP_REGISTER)\n\n }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 11, "score": 224639.38910874142 }, { "content": "/// Hashes the bytes using the RIPEMD-160 hash function. This returns a 20 byte hash.\n\npub fn ripemd160_array(value: &[u8]) -> [u8; 20] {\n\n //* SAFETY: ripemd160 syscall will always generate 20 bytes inside of the atomic op register\n\n //* so the read will have a sufficient buffer of 20, and can transmute from uninit\n\n //* because all bytes are filled. This assumes a valid ripemd160 implementation.\n\n unsafe {\n\n sys::ripemd160(value.len() as _, value.as_ptr() as _, ATOMIC_OP_REGISTER);\n\n read_register_fixed_20(ATOMIC_OP_REGISTER)\n\n }\n\n}\n\n\n\n/// Recovers an ECDSA signer address from a 32-byte message `hash` and a corresponding `signature`\n\n/// along with `v` recovery byte.\n\n///\n\n/// Takes in an additional flag to check for malleability of the signature\n\n/// which is generally only ideal for transactions.\n\n///\n\n/// Returns 64 bytes representing the public key if the recovery was successful.\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 12, "score": 224639.38910874142 }, { "content": "/// Hashes the bytes using the Keccak-256 hash function. This returns a 32 byte hash.\n\npub fn keccak256_array(value: &[u8]) -> [u8; 32] {\n\n //* SAFETY: keccak256 syscall will always generate 32 bytes inside of the atomic op register\n\n //* so the read will have a sufficient buffer of 32, and can transmute from uninit\n\n //* because all bytes are filled. This assumes a valid keccak256 implementation.\n\n unsafe {\n\n sys::keccak256(value.len() as _, value.as_ptr() as _, ATOMIC_OP_REGISTER);\n\n read_register_fixed_32(ATOMIC_OP_REGISTER)\n\n }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 13, "score": 224639.38910874142 }, { "content": "/// Returns true if promise was successful.\n\n/// Fails if called outside a callback that received 1 promise result.\n\npub fn is_promise_success() -> bool {\n\n require!(env::promise_results_count() == 1, \"Contract expected a result on the callback\");\n\n env::promise_result_internal(0).is_ok()\n\n}\n\n\n", "file_path": "near-sdk/src/utils/mod.rs", "rank": 14, "score": 224152.38960564873 }, { "content": "/// Returns `true` if the contract state exists and `false` otherwise.\n\npub fn state_exists() -> bool {\n\n storage_has_key(STATE_KEY)\n\n}\n\n\n\n// #####################################\n\n// # Parameters exposed by the runtime #\n\n// #####################################\n\n\n\n/// Price per 1 byte of storage from mainnet genesis config.\n\n/// TODO: will be using the host function when it will be available.\n\npub const STORAGE_PRICE_PER_BYTE: Balance = 10_000_000_000_000_000_000;\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 15, "score": 224152.38960564873 }, { "content": "#[deprecated(since = \"4.0.0\", note = \"Use env::panic_str to panic with a message.\")]\n\npub fn panic(message: &[u8]) -> ! {\n\n unsafe { sys::panic_utf8(message.len() as _, message.as_ptr() as _) }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 16, "score": 218015.81794840147 }, { "content": "#[deprecated(since = \"4.0.0\", note = \"Use env::log_str for logging messages.\")]\n\npub fn log(message: &[u8]) {\n\n #[cfg(all(debug_assertions, not(target_arch = \"wasm32\")))]\n\n eprintln!(\"{}\", String::from_utf8_lossy(message));\n\n\n\n unsafe { sys::log_utf8(message.len() as _, message.as_ptr() as _) }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 17, "score": 218015.81794840144 }, { "content": "/// Reads the value stored under the given key.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use near_sdk::env::{storage_write, storage_read};\n\n///\n\n/// assert!(storage_read(b\"key\").is_none());\n\n/// storage_write(b\"key\", b\"value\");\n\n/// assert_eq!(storage_read(b\"key\").unwrap(), b\"value\");\n\n/// ```\n\npub fn storage_read(key: &[u8]) -> Option<Vec<u8>> {\n\n match unsafe { sys::storage_read(key.len() as _, key.as_ptr() as _, ATOMIC_OP_REGISTER) } {\n\n 0 => None,\n\n 1 => Some(expect_register(read_register(ATOMIC_OP_REGISTER))),\n\n _ => abort(),\n\n }\n\n}\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 18, "score": 217064.7750591839 }, { "content": "/// Returns the random seed from the current block. This 32 byte hash is based on the VRF value from\n\n/// the block. This value is not modified in any way each time this function is called within the\n\n/// same method/block.\n\npub fn random_seed_array() -> [u8; 32] {\n\n //* SAFETY: random_seed syscall will always generate 32 bytes inside of the atomic op register\n\n //* so the read will have a sufficient buffer of 32, and can transmute from uninit\n\n //* because all bytes are filled. This assumes a valid random_seed implementation.\n\n unsafe {\n\n sys::random_seed(ATOMIC_OP_REGISTER);\n\n read_register_fixed_32(ATOMIC_OP_REGISTER)\n\n }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 19, "score": 215840.20171606608 }, { "content": "/// Returns the random seed from the current block. This 32 byte hash is based on the VRF value from\n\n/// the block. This value is not modified in any way each time this function is called within the\n\n/// same method/block.\n\npub fn random_seed() -> Vec<u8> {\n\n random_seed_array().to_vec()\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 20, "score": 215840.20171606608 }, { "content": "// #####################\n\n// # Miscellaneous API #\n\n// #####################\n\n/// Sets the blob of data as the return value of the contract.\n\npub fn value_return(value: &[u8]) {\n\n unsafe { sys::value_return(value.len() as _, value.as_ptr() as _) }\n\n}\n\n/// Terminates the execution of the program with the UTF-8 encoded message.\n\n/// [`panic_str`] should be used as the bytes are required to be UTF-8\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 21, "score": 215840.20171606608 }, { "content": "/// The input to the contract call serialized as bytes. If input is not provided returns `None`.\n\npub fn input() -> Option<Vec<u8>> {\n\n try_method_into_register!(input)\n\n}\n\n\n\n/// Current block index.\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 22, "score": 210829.93235481062 }, { "content": "/// Returns the result of the promise if successful. Otherwise returns None.\n\n/// Fails if called outside a callback that received 1 promise result.\n\npub fn promise_result_as_success() -> Option<Vec<u8>> {\n\n require!(env::promise_results_count() == 1, \"Contract expected a result on the callback\");\n\n match env::promise_result(0) {\n\n PromiseResult::Successful(result) => Some(result),\n\n _ => None,\n\n }\n\n}\n\n\n\n/// Deprecated helper function which used to generate code to initialize the [`GlobalAllocator`].\n\n/// This is now initialized by default. Disable `wee_alloc` feature to configure manually.\n\n///\n\n/// [`GlobalAllocator`]: std::alloc::GlobalAlloc\n\n#[deprecated(\n\n since = \"4.0.0\",\n\n note = \"Allocator is already initialized with the default `wee_alloc` feature set. \\\n\n Please make sure you don't disable default features on the SDK or set the global \\\n\n allocator manually.\"\n\n)]\n\n#[macro_export]\n\nmacro_rules! setup_alloc {\n", "file_path": "near-sdk/src/utils/mod.rs", "rank": 23, "score": 206686.7841600818 }, { "content": "/// Reads the most recent value that was evicted with `storage_write` or `storage_remove` command.\n\npub fn storage_get_evicted() -> Option<Vec<u8>> {\n\n read_register(EVICTED_REGISTER)\n\n}\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 24, "score": 206686.7841600818 }, { "content": "// ############################################\n\n// # Saving and loading of the contract state #\n\n// ############################################\n\n/// Load the state of the given object.\n\npub fn state_read<T: borsh::BorshDeserialize>() -> Option<T> {\n\n storage_read(STATE_KEY)\n\n .map(|data| T::try_from_slice(&data).expect(\"Cannot deserialize the contract state.\"))\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 25, "score": 206215.03395973254 }, { "content": "pub fn generate_serializer(\n\n attr_sig_info: &AttrSigInfo,\n\n serializer: &SerializerType,\n\n) -> TokenStream2 {\n\n let has_input_args = attr_sig_info.input_args().next().is_some();\n\n if !has_input_args {\n\n return quote! { vec![] };\n\n }\n\n let struct_decl = attr_sig_info.input_struct_ser();\n\n let constructor_call = attr_sig_info.constructor_expr_ref();\n\n let constructor = quote! { let __args = #constructor_call; };\n\n let value_ser = match serializer {\n\n SerializerType::JSON => quote! {\n\n near_sdk::serde_json::to_vec(&__args).expect(\"Failed to serialize the cross contract args using JSON.\")\n\n },\n\n SerializerType::Borsh => quote! {\n\n near_sdk::borsh::BorshSerialize::try_to_vec(&__args).expect(\"Failed to serialize the cross contract args using Borsh.\")\n\n },\n\n };\n\n\n\n quote! {\n\n {\n\n #struct_decl\n\n #constructor\n\n #value_ser\n\n }\n\n }\n\n}\n", "file_path": "near-sdk-macros/src/core_impl/code_generator/serializer.rs", "rank": 26, "score": 203326.63504105958 }, { "content": "/// Assert that predecessor_account_id == current_account_id, meaning contract called itself.\n\npub fn assert_self() {\n\n require!(env::predecessor_account_id() == env::current_account_id(), \"Method is private\");\n\n}\n\n\n", "file_path": "near-sdk/src/utils/mod.rs", "rank": 27, "score": 200853.29440075773 }, { "content": "pub fn promise_batch_action_deploy_contract(promise_index: u64, code: &[u8]) {\n\n unsafe {\n\n sys::promise_batch_action_deploy_contract(\n\n promise_index,\n\n code.len() as _,\n\n code.as_ptr() as _,\n\n )\n\n }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 28, "score": 194709.55327748647 }, { "content": "/// Reads the content of the `register_id`. If register is not used returns `None`.\n\npub fn read_register(register_id: u64) -> Option<Vec<u8>> {\n\n // Get register length and convert to a usize. The max register size in config is much less\n\n // than the u32 max so the abort should never be hit, but is there for safety because there\n\n // would be undefined behaviour during `read_register` if the buffer length is truncated.\n\n let len: usize = register_len(register_id)?.try_into().unwrap_or_else(|_| abort());\n\n\n\n // Initialize buffer with capacity.\n\n let mut buffer = Vec::with_capacity(len);\n\n\n\n // Read register into buffer.\n\n //* SAFETY: This is safe because the buffer is initialized with the exact capacity of the\n\n //* register that is being read from.\n\n unsafe {\n\n sys::read_register(register_id, buffer.as_mut_ptr() as u64);\n\n\n\n // Set updated length after writing to buffer.\n\n buffer.set_len(len);\n\n }\n\n Some(buffer)\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 29, "score": 194512.76728459029 }, { "content": "/// The gas that was already burnt during the contract execution (cannot exceed `prepaid_gas`)\n\npub fn used_gas() -> Gas {\n\n Gas(unsafe { sys::used_gas() })\n\n}\n\n\n\n// ############\n\n// # Math API #\n\n// ############\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 30, "score": 194315.17992637205 }, { "content": "/// Assumes that the precedecessor will be refunded\n\npub fn refund_deposit(storage_used: u64) {\n\n refund_deposit_to_account(storage_used, env::predecessor_account_id())\n\n}\n\n\n", "file_path": "near-contract-standards/src/non_fungible_token/utils.rs", "rank": 31, "score": 180413.4633042501 }, { "content": "pub fn refund_approved_account_ids_iter<'a, I>(\n\n account_id: AccountId,\n\n approved_account_ids: I,\n\n) -> Promise\n\nwhere\n\n I: Iterator<Item = &'a AccountId>,\n\n{\n\n let storage_released: u64 = approved_account_ids.map(bytes_for_approved_account_id).sum();\n\n Promise::new(account_id).transfer(Balance::from(storage_released) * env::storage_byte_cost())\n\n}\n\n\n", "file_path": "near-contract-standards/src/non_fungible_token/utils.rs", "rank": 32, "score": 180007.31674523366 }, { "content": "#[proc_macro_derive(BorshStorageKey)]\n\npub fn borsh_storage_key(item: TokenStream) -> TokenStream {\n\n let name = if let Ok(input) = syn::parse::<ItemEnum>(item.clone()) {\n\n input.ident\n\n } else if let Ok(input) = syn::parse::<ItemStruct>(item) {\n\n input.ident\n\n } else {\n\n return TokenStream::from(\n\n syn::Error::new(\n\n Span::call_site(),\n\n \"BorshStorageKey can only be used as a derive on enums or structs.\",\n\n )\n\n .to_compile_error(),\n\n );\n\n };\n\n TokenStream::from(quote! {\n\n impl near_sdk::__private::BorshIntoStorageKey for #name {}\n\n })\n\n}\n\n\n\n/// `FunctionError` generates implementation for `near_sdk::FunctionError` trait.\n\n/// It allows contract runtime to panic with the type using its `ToString` implementation\n\n/// as the message.\n", "file_path": "near-sdk-macros/src/lib.rs", "rank": 33, "score": 177191.44449011466 }, { "content": "/// Converts a Borsh serializable object into a `Vec<u8>` that is used for a storage key.\n\n///\n\n/// [`BorshStorageKey`](crate::BorshStorageKey) should be used instead of implementing\n\n/// this manually.\n\n///\n\n/// ```\n\n/// use near_sdk::borsh::BorshSerialize;\n\n/// use near_sdk::BorshStorageKey;\n\n/// use near_sdk::collections::LookupMap;\n\n///\n\n/// #[derive(BorshSerialize, BorshStorageKey)]\n\n/// enum StorageKey {\n\n/// FungibleToken,\n\n/// Metadata { sub_key: String },\n\n/// }\n\n///\n\n/// let lookup_map_1: LookupMap<u64, String> = LookupMap::new(StorageKey::Metadata { sub_key: String::from(\"yo\") });\n\n/// let lookup_map_2: LookupMap<String, String> = LookupMap::new(StorageKey::FungibleToken);\n\n/// ```\n\npub trait BorshIntoStorageKey: BorshSerialize {}\n\n\n\nimpl<T> IntoStorageKey for T\n\nwhere\n\n T: BorshIntoStorageKey,\n\n{\n\n fn into_storage_key(self) -> Vec<u8> {\n\n self.try_to_vec().unwrap()\n\n }\n\n}\n", "file_path": "near-sdk/src/private/mod.rs", "rank": 34, "score": 175903.40053767542 }, { "content": "#[deprecated(since = \"4.0.0\", note = \"Case is handled internally by macro, no need to import\")]\n\n#[proc_macro_attribute]\n\npub fn serializer(_attr: TokenStream, item: TokenStream) -> TokenStream {\n\n item\n\n}\n\n\n\n/// `result_serializer` is a marker attribute it does not generate code by itself.\n", "file_path": "near-sdk-macros/src/lib.rs", "rank": 35, "score": 170028.4496303408 }, { "content": "pub fn deserialize_data(ty: &SerializerType) -> TokenStream2 {\n\n match ty {\n\n SerializerType::JSON => quote! {\n\n near_sdk::serde_json::from_slice(&data).expect(\"Failed to deserialize callback using JSON\")\n\n },\n\n SerializerType::Borsh => quote! {\n\n near_sdk::borsh::BorshDeserialize::try_from_slice(&data).expect(\"Failed to deserialize callback using Borsh\")\n\n },\n\n }\n\n}\n", "file_path": "near-sdk-macros/src/core_impl/code_generator/attr_sig_info.rs", "rank": 36, "score": 168352.12627833014 }, { "content": "#[deprecated(since = \"4.0.0\", note = \"Case is handled internally by macro, no need to import\")]\n\n#[proc_macro_attribute]\n\npub fn result_serializer(_attr: TokenStream, item: TokenStream) -> TokenStream {\n\n item\n\n}\n\n\n\n/// `init` is a marker attribute it does not generate code by itself.\n", "file_path": "near-sdk-macros/src/lib.rs", "rank": 37, "score": 168212.93023279897 }, { "content": "pub fn refund_deposit_to_account(storage_used: u64, account_id: AccountId) {\n\n let required_cost = env::storage_byte_cost() * Balance::from(storage_used);\n\n let attached_deposit = env::attached_deposit();\n\n\n\n require!(\n\n required_cost <= attached_deposit,\n\n format!(\"Must attach {} yoctoNEAR to cover storage\", required_cost)\n\n );\n\n\n\n let refund = attached_deposit - required_cost;\n\n if refund > 1 {\n\n Promise::new(account_id).transfer(refund);\n\n }\n\n}\n\n\n", "file_path": "near-contract-standards/src/non_fungible_token/utils.rs", "rank": 38, "score": 165557.89839153475 }, { "content": "/// Aborts the current contract execution without a custom message.\n\n/// To include a message, use [`panic_str`].\n\npub fn abort() -> ! {\n\n // Use wasm32 unreachable call to avoid including the `panic` external function in Wasm.\n\n #[cfg(target_arch = \"wasm32\")]\n\n //* This was stabilized recently (~ >1.51), so ignore warnings but don't enforce higher msrv\n\n #[allow(unused_unsafe)]\n\n unsafe {\n\n core::arch::wasm32::unreachable()\n\n }\n\n #[cfg(not(target_arch = \"wasm32\"))]\n\n unsafe {\n\n sys::panic()\n\n }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 39, "score": 153984.79851905053 }, { "content": "#[cfg(feature = \"unstable\")]\n\npub fn ecrecover(\n\n hash: &[u8],\n\n signature: &[u8],\n\n v: u8,\n\n malleability_flag: bool,\n\n) -> Option<[u8; 64]> {\n\n unsafe {\n\n let return_code = sys::ecrecover(\n\n hash.len() as _,\n\n hash.as_ptr() as _,\n\n signature.len() as _,\n\n signature.as_ptr() as _,\n\n v as u64,\n\n malleability_flag as u64,\n\n ATOMIC_OP_REGISTER,\n\n );\n\n if return_code == 0 {\n\n None\n\n } else {\n\n Some(read_register_fixed_64(ATOMIC_OP_REGISTER))\n\n }\n\n }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 40, "score": 153974.5840522656 }, { "content": "/// Attaches the callback that is executed after promise pointed by `promise_idx` is complete.\n\npub fn promise_then(\n\n promise_idx: PromiseIndex,\n\n account_id: AccountId,\n\n function_name: &str,\n\n arguments: &[u8],\n\n amount: Balance,\n\n gas: Gas,\n\n) -> PromiseIndex {\n\n let account_id = account_id.as_bytes();\n\n unsafe {\n\n sys::promise_then(\n\n promise_idx,\n\n account_id.len() as _,\n\n account_id.as_ptr() as _,\n\n function_name.len() as _,\n\n function_name.as_ptr() as _,\n\n arguments.len() as _,\n\n arguments.as_ptr() as _,\n\n &amount as *const Balance as _,\n\n gas.0,\n\n )\n\n }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 41, "score": 153974.5840522656 }, { "content": "// ################\n\n// # Promises API #\n\n// ################\n\n/// Creates a promise that will execute a method on account with given arguments and attaches\n\n/// the given amount and gas.\n\npub fn promise_create(\n\n account_id: AccountId,\n\n function_name: &str,\n\n arguments: &[u8],\n\n amount: Balance,\n\n gas: Gas,\n\n) -> PromiseIndex {\n\n let account_id = account_id.as_bytes();\n\n unsafe {\n\n sys::promise_create(\n\n account_id.len() as _,\n\n account_id.as_ptr() as _,\n\n function_name.len() as _,\n\n function_name.as_ptr() as _,\n\n arguments.len() as _,\n\n arguments.as_ptr() as _,\n\n &amount as *const Balance as _,\n\n gas.0,\n\n )\n\n }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 42, "score": 152442.78474503214 }, { "content": "pub fn setup() {\n\n testing_env!(VMContextBuilder::new().build());\n\n}\n\n\n", "file_path": "near-sdk/src/test_utils/test_env.rs", "rank": 43, "score": 150958.46639634093 }, { "content": "/// Assert that 1 yoctoNEAR was attached.\n\npub fn assert_one_yocto() {\n\n require!(env::attached_deposit() == 1, \"Requires attached deposit of exactly 1 yoctoNEAR\")\n\n}\n\n\n", "file_path": "near-sdk/src/utils/mod.rs", "rank": 44, "score": 150958.46639634093 }, { "content": "/// Setups panic hook to expose error info to the blockchain.\n\npub fn setup_panic_hook() {\n\n std_panic::set_hook(Box::new(panic_hook_impl));\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 45, "score": 150958.46639634093 }, { "content": "/// free == effectively unlimited gas\n\n/// Sets up the blockchain interface with a [`VMConfig`] which sets the gas costs to zero.\n\npub fn setup_free() {\n\n crate::testing_env!(VMContextBuilder::new().build(), VMConfig::free())\n\n}\n", "file_path": "near-sdk/src/test_utils/test_env.rs", "rank": 46, "score": 149519.45505849514 }, { "content": "pub fn promise_batch_action_stake(\n\n promise_index: PromiseIndex,\n\n amount: Balance,\n\n public_key: &PublicKey,\n\n) {\n\n unsafe {\n\n sys::promise_batch_action_stake(\n\n promise_index,\n\n &amount as *const Balance as _,\n\n public_key.as_bytes().len() as _,\n\n public_key.as_bytes().as_ptr() as _,\n\n )\n\n }\n\n}\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 47, "score": 149519.45505849514 }, { "content": "pub fn promise_batch_action_delete_account(\n\n promise_index: PromiseIndex,\n\n beneficiary_id: &AccountId,\n\n) {\n\n let beneficiary_id: &str = beneficiary_id.as_ref();\n\n unsafe {\n\n sys::promise_batch_action_delete_account(\n\n promise_index,\n\n beneficiary_id.len() as _,\n\n beneficiary_id.as_ptr() as _,\n\n )\n\n }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 48, "score": 148123.7075029904 }, { "content": "pub fn promise_batch_action_function_call(\n\n promise_index: PromiseIndex,\n\n function_name: &str,\n\n arguments: &[u8],\n\n amount: Balance,\n\n gas: Gas,\n\n) {\n\n unsafe {\n\n sys::promise_batch_action_function_call(\n\n promise_index,\n\n function_name.len() as _,\n\n function_name.as_ptr() as _,\n\n arguments.len() as _,\n\n arguments.as_ptr() as _,\n\n &amount as *const Balance as _,\n\n gas.0,\n\n )\n\n }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 49, "score": 148123.7075029904 }, { "content": "pub fn promise_batch_action_function_call_weight(\n\n promise_index: PromiseIndex,\n\n function_name: &str,\n\n arguments: &[u8],\n\n amount: Balance,\n\n gas: Gas,\n\n weight: GasWeight,\n\n) {\n\n unsafe {\n\n sys::promise_batch_action_function_call_weight(\n\n promise_index,\n\n function_name.len() as _,\n\n function_name.as_ptr() as _,\n\n arguments.len() as _,\n\n arguments.as_ptr() as _,\n\n &amount as *const Balance as _,\n\n gas.0,\n\n weight.0,\n\n )\n\n }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 50, "score": 146769.30154086376 }, { "content": "/// The amount of gas attached to the call that can be used to pay for the gas fees.\n\npub fn prepaid_gas() -> Gas {\n\n Gas(unsafe { sys::prepaid_gas() })\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 51, "score": 146663.91664885578 }, { "content": "/// The balance that was attached to the call that will be immediately deposited before the\n\n/// contract execution starts\n\npub fn attached_deposit() -> Balance {\n\n let data = [0u8; size_of::<Balance>()];\n\n unsafe { sys::attached_deposit(data.as_ptr() as u64) };\n\n Balance::from_le_bytes(data)\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 52, "score": 146658.80571655818 }, { "content": "/// Current block timestamp, i.e, number of non-leap-nanoseconds since January 1, 1970 0:00:00 UTC.\n\npub fn block_timestamp() -> u64 {\n\n unsafe { sys::block_timestamp() }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 53, "score": 146658.80571655818 }, { "content": "/// Current epoch height.\n\npub fn epoch_height() -> u64 {\n\n unsafe { sys::epoch_height() }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 54, "score": 146658.80571655818 }, { "content": "// #################\n\n// # Economics API #\n\n// #################\n\n/// The balance attached to the given account. This includes the attached_deposit that was\n\n/// attached to the transaction\n\npub fn account_balance() -> Balance {\n\n let data = [0u8; size_of::<Balance>()];\n\n unsafe { sys::account_balance(data.as_ptr() as u64) };\n\n Balance::from_le_bytes(data)\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 55, "score": 146658.80571655818 }, { "content": "pub fn refund_approved_account_ids(\n\n account_id: AccountId,\n\n approved_account_ids: &HashMap<AccountId, u64>,\n\n) -> Promise {\n\n refund_approved_account_ids_iter(account_id, approved_account_ids.keys())\n\n}\n\n\n", "file_path": "near-contract-standards/src/non_fungible_token/utils.rs", "rank": 56, "score": 145454.42719061457 }, { "content": "pub fn promise_batch_action_add_key_with_function_call(\n\n promise_index: PromiseIndex,\n\n public_key: &PublicKey,\n\n nonce: u64,\n\n allowance: Balance,\n\n receiver_id: &AccountId,\n\n function_names: &str,\n\n) {\n\n let receiver_id: &str = receiver_id.as_ref();\n\n unsafe {\n\n sys::promise_batch_action_add_key_with_function_call(\n\n promise_index,\n\n public_key.as_bytes().len() as _,\n\n public_key.as_bytes().as_ptr() as _,\n\n nonce,\n\n &allowance as *const Balance as _,\n\n receiver_id.len() as _,\n\n receiver_id.as_ptr() as _,\n\n function_names.len() as _,\n\n function_names.as_ptr() as _,\n\n )\n\n }\n\n}\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 57, "score": 145454.42719061457 }, { "content": "pub fn promise_batch_action_add_key_with_full_access(\n\n promise_index: PromiseIndex,\n\n public_key: &PublicKey,\n\n nonce: u64,\n\n) {\n\n unsafe {\n\n sys::promise_batch_action_add_key_with_full_access(\n\n promise_index,\n\n public_key.as_bytes().len() as _,\n\n public_key.as_bytes().as_ptr() as _,\n\n nonce,\n\n )\n\n }\n\n}\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 58, "score": 145454.42719061457 }, { "content": "#[deprecated(since = \"4.0.0\", note = \"Use block_height instead\")]\n\npub fn block_index() -> BlockHeight {\n\n block_height()\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 59, "score": 145224.969094612 }, { "content": "/// Returns the total stake of validators in the current epoch.\n\npub fn validator_total_stake() -> Balance {\n\n let data = [0u8; size_of::<Balance>()];\n\n unsafe { sys::validator_total_stake(data.as_ptr() as u64) };\n\n Balance::from_le_bytes(data)\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 60, "score": 145219.79437871242 }, { "content": "/// Current total storage usage of this smart contract that this account would be paying for.\n\npub fn storage_usage() -> StorageUsage {\n\n unsafe { sys::storage_usage() }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 61, "score": 145219.79437871242 }, { "content": "/// Current block timestamp, i.e, number of non-leap-milliseconds since January 1, 1970 0:00:00 UTC.\n\npub fn block_timestamp_ms() -> u64 {\n\n block_timestamp() / 1_000_000\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 62, "score": 145219.79437871242 }, { "content": "/// If the current function is invoked by a callback we can access the execution results of the\n\n/// promises that caused the callback. This function returns the number of complete and\n\n/// incomplete callbacks.\n\npub fn promise_results_count() -> u64 {\n\n unsafe { sys::promise_results_count() }\n\n}\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 63, "score": 145219.79437871242 }, { "content": "pub fn storage_byte_cost() -> Balance {\n\n STORAGE_PRICE_PER_BYTE\n\n}\n\n\n\n// ##################\n\n// # Helper methods #\n\n// ##################\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 64, "score": 145219.79437871242 }, { "content": "/// Returns the height of the block the transaction is being executed in.\n\npub fn block_height() -> BlockHeight {\n\n unsafe { sys::block_height() }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 65, "score": 145219.79437871242 }, { "content": "/// The balance locked for potential validator staking.\n\npub fn account_locked_balance() -> Balance {\n\n let data = [0u8; size_of::<Balance>()];\n\n unsafe { sys::account_locked_balance(data.as_ptr() as u64) };\n\n Balance::from_le_bytes(data)\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 66, "score": 145219.79437871242 }, { "content": "pub fn alice() -> AccountId {\n\n AccountId::new_unchecked(\"alice.near\".to_string())\n\n}\n\n\n", "file_path": "near-sdk/src/test_utils/test_env.rs", "rank": 67, "score": 143824.0468232077 }, { "content": "pub fn bob() -> AccountId {\n\n AccountId::new_unchecked(\"bob.near\".to_string())\n\n}\n\n\n", "file_path": "near-sdk/src/test_utils/test_env.rs", "rank": 68, "score": 143824.0468232077 }, { "content": "/// The id of the account that either signed the original transaction or issued the initial\n\n/// cross-contract call.\n\npub fn signer_account_id() -> AccountId {\n\n assert_valid_account_id(method_into_register!(signer_account_id))\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 69, "score": 143824.0468232077 }, { "content": "pub fn carol() -> AccountId {\n\n AccountId::new_unchecked(\"carol.near\".to_string())\n\n}\n\n\n\n/// Updates the blockchain interface with the config passed in.\n\n#[deprecated(\n\n since = \"4.0.0\",\n\n note = \"Use `testing_env!` macro to initialize with specific VMConfig\"\n\n)]\n", "file_path": "near-sdk/src/test_utils/test_env.rs", "rank": 70, "score": 143824.0468232077 }, { "content": "/// The id of the account that was the previous contract in the chain of cross-contract calls.\n\n/// If this is the first contract, it is equal to `signer_account_id`.\n\npub fn predecessor_account_id() -> AccountId {\n\n assert_valid_account_id(method_into_register!(predecessor_account_id))\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 71, "score": 143824.0468232077 }, { "content": "// ###############\n\n// # Context API #\n\n// ###############\n\n/// The id of the account that owns the current contract.\n\npub fn current_account_id() -> AccountId {\n\n assert_valid_account_id(method_into_register!(current_account_id))\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 72, "score": 143824.0468232077 }, { "content": "/// The public key of the account that did the signing.\n\npub fn signer_account_pk() -> PublicKey {\n\n PublicKey::try_from(method_into_register!(signer_account_pk)).unwrap_or_else(|_| abort())\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 73, "score": 143824.0468232077 }, { "content": "/// Compiles contract to wasm with release configuration and returns the code size.\n\nfn check_example_size(example: &str) -> usize {\n\n let status = std::process::Command::new(\"cargo\")\n\n .env(\"RUSTFLAGS\", \"-C link-arg=-s\")\n\n .args(&[\"build\", \"--release\", \"--target\", \"wasm32-unknown-unknown\", \"--manifest-path\"])\n\n .arg(format!(\"../examples/{}/Cargo.toml\", example))\n\n .status()\n\n .unwrap();\n\n if !status.success() {\n\n panic!(\"building wasm example returned non-zero code {}\", status);\n\n }\n\n\n\n let wasm = std::fs::read(format!(\n\n \"../examples/{}/target/wasm32-unknown-unknown/release/{}.wasm\",\n\n example,\n\n example.replace('-', \"_\")\n\n ))\n\n .unwrap();\n\n\n\n wasm.len()\n\n}\n\n\n", "file_path": "near-sdk/tests/code_size.rs", "rank": 74, "score": 143428.28723878384 }, { "content": "/// Trait used to generate keys to store data based on a serializable structure.\n\npub trait ToKey: self::private::Sealed {\n\n /// Output type for the generated lookup key.\n\n type KeyType: AsRef<[u8]>;\n\n\n\n fn to_key<Q: ?Sized>(prefix: &[u8], key: &Q, buffer: &mut Vec<u8>) -> Self::KeyType\n\n where\n\n Q: BorshSerialize;\n\n}\n\n\n\n/// Sha256 hash helper which hashes through a syscall. This type satisfies the [`ToKey`] trait.\n\n#[derive(Debug, Clone, Copy, Eq, PartialEq)]\n\npub enum Sha256 {}\n\n\n\nimpl ToKey for Sha256 {\n\n type KeyType = [u8; 32];\n\n\n\n fn to_key<Q: ?Sized>(prefix: &[u8], key: &Q, buffer: &mut Vec<u8>) -> Self::KeyType\n\n where\n\n Q: BorshSerialize,\n\n {\n", "file_path": "near-sdk/src/store/key.rs", "rank": 75, "score": 142747.90207758 }, { "content": "/// Returns a copy of logs from VMLogic. Only available in unit tests.\n\npub fn get_logs() -> Vec<String> {\n\n crate::mock::with_mocked_blockchain(|b| b.logs())\n\n}\n\n\n", "file_path": "near-sdk/src/test_utils/mod.rs", "rank": 76, "score": 140130.81573608104 }, { "content": "#[derive(BorshStorageKey, BorshSerialize)]\n\nstruct StorageKeyStruct {\n\n key: String,\n\n}\n\n\n", "file_path": "near-sdk/compilation_tests/borsh_storage_key.rs", "rank": 77, "score": 139203.65755149332 }, { "content": "/// Consider the execution result of promise under `promise_idx` as execution result of this\n\n/// function.\n\npub fn promise_return(promise_idx: PromiseIndex) {\n\n unsafe { sys::promise_return(promise_idx) }\n\n}\n\n\n\n// ###############\n\n// # Validator API #\n\n// ###############\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 78, "score": 138776.4097739544 }, { "content": "/// Accessing receipts created by the contract. Only available in unit tests.\n\npub fn get_created_receipts() -> Vec<Receipt> {\n\n crate::mock::with_mocked_blockchain(|b| b.created_receipts())\n\n}\n\n\n\n/// Objects stored on the trie directly should have identifiers. If identifier is not provided\n\n/// explicitly than `Default` trait would use this index to generate an id.\n\n#[cfg(test)]\n\npub(crate) static mut NEXT_TRIE_OBJECT_INDEX: u64 = 0;\n\n/// Get next id of the object stored on trie.\n\n#[cfg(test)]\n\npub(crate) fn next_trie_id() -> Vec<u8> {\n\n unsafe {\n\n let id = NEXT_TRIE_OBJECT_INDEX;\n\n NEXT_TRIE_OBJECT_INDEX += 1;\n\n id.to_le_bytes().to_vec()\n\n }\n\n}\n", "file_path": "near-sdk/src/test_utils/mod.rs", "rank": 79, "score": 138776.4097739544 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\npub fn set_blockchain_interface(blockchain_interface: MockedBlockchain) {\n\n crate::mock::with_mocked_blockchain(|b| {\n\n *b = blockchain_interface;\n\n })\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 80, "score": 137461.5354237052 }, { "content": "pub fn setup_with_config(vm_config: VMConfig) {\n\n testing_env!(VMContextBuilder::new().build(), vm_config)\n\n}\n\n\n\n/// Setup the blockchain interface with a default configuration.\n\n#[deprecated(\n\n since = \"4.0.0\",\n\n note = \"Mocked blockchain is now setup by default, use `testing_env!`\"\n\n)]\n", "file_path": "near-sdk/src/test_utils/test_env.rs", "rank": 81, "score": 136184.48684144928 }, { "content": "/// Helper function to convert and check the account ID from bytes from the runtime.\n\nfn assert_valid_account_id(bytes: Vec<u8>) -> AccountId {\n\n String::from_utf8(bytes)\n\n .ok()\n\n .and_then(|s| AccountId::try_from(s).ok())\n\n .unwrap_or_else(|| abort())\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 82, "score": 135700.42475326874 }, { "content": "#[proc_macro]\n\npub fn metadata(item: TokenStream) -> TokenStream {\n\n if let Ok(input) = syn::parse::<File>(item) {\n\n let mut visitor = MetadataVisitor::new();\n\n visitor.visit_file(&input);\n\n let generated = match visitor.generate_metadata_method() {\n\n Ok(x) => x,\n\n Err(err) => return TokenStream::from(err.to_compile_error()),\n\n };\n\n TokenStream::from(quote! {\n\n #input\n\n #generated\n\n })\n\n } else {\n\n TokenStream::from(\n\n syn::Error::new(\n\n Span::call_site(),\n\n \"Failed to parse code decorated with `metadata!{}` macro. Only valid Rust is supported.\",\n\n )\n\n .to_compile_error(),\n\n )\n\n }\n\n}\n\n\n\n/// `PanicOnDefault` generates implementation for `Default` trait that panics with the following\n\n/// message `The contract is not initialized` when `default()` is called.\n\n/// This is a helpful macro in case the contract is required to be initialized with either `init` or\n\n/// `init(ignore_state)`.\n", "file_path": "near-sdk-macros/src/lib.rs", "rank": 83, "score": 135569.76248327745 }, { "content": "/// Returns a pre-defined account_id from a list of 6.\n\npub fn accounts(id: usize) -> AccountId {\n\n AccountId::new_unchecked(\n\n [\"alice\", \"bob\", \"charlie\", \"danny\", \"eugene\", \"fargo\"][id].to_string(),\n\n )\n\n}\n\n\n\n/// Simple VMContext builder that allows to quickly create custom context in tests.\n\n#[derive(Clone)]\n\npub struct VMContextBuilder {\n\n pub context: VMContext,\n\n}\n\n\n\nimpl Default for VMContextBuilder {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\n/// Context for the contract execution.\n", "file_path": "near-sdk/src/test_utils/context.rs", "rank": 84, "score": 135569.76248327745 }, { "content": "pub fn promise_batch_action_create_account(promise_index: PromiseIndex) {\n\n unsafe { sys::promise_batch_action_create_account(promise_index) }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 85, "score": 134943.6549376472 }, { "content": "#[proc_macro_derive(PanicOnDefault)]\n\npub fn derive_no_default(item: TokenStream) -> TokenStream {\n\n if let Ok(input) = syn::parse::<ItemStruct>(item) {\n\n let name = &input.ident;\n\n TokenStream::from(quote! {\n\n impl Default for #name {\n\n fn default() -> Self {\n\n near_sdk::env::panic_str(\"The contract is not initialized\");\n\n }\n\n }\n\n })\n\n } else {\n\n TokenStream::from(\n\n syn::Error::new(\n\n Span::call_site(),\n\n \"PanicOnDefault can only be used on type declarations sections.\",\n\n )\n\n .to_compile_error(),\n\n )\n\n }\n\n}\n\n\n\n/// `BorshStorageKey` generates implementation for `BorshIntoStorageKey` trait.\n\n/// It allows the type to be passed as a unique prefix for persistent collections.\n\n/// The type should also implement or derive `BorshSerialize` trait.\n", "file_path": "near-sdk-macros/src/lib.rs", "rank": 86, "score": 134254.88813302826 }, { "content": "#[proc_macro_derive(FunctionError)]\n\npub fn function_error(item: TokenStream) -> TokenStream {\n\n let name = if let Ok(input) = syn::parse::<ItemEnum>(item.clone()) {\n\n input.ident\n\n } else if let Ok(input) = syn::parse::<ItemStruct>(item) {\n\n input.ident\n\n } else {\n\n return TokenStream::from(\n\n syn::Error::new(\n\n Span::call_site(),\n\n \"FunctionError can only be used as a derive on enums or structs.\",\n\n )\n\n .to_compile_error(),\n\n );\n\n };\n\n TokenStream::from(quote! {\n\n impl near_sdk::FunctionError for #name {\n\n fn panic(&self) -> ! {\n\n near_sdk::env::panic_str(&::std::string::ToString::to_string(&self))\n\n }\n\n }\n\n })\n\n}\n", "file_path": "near-sdk-macros/src/lib.rs", "rank": 87, "score": 134254.88813302826 }, { "content": "/// Creates a new promise which completes when time all promises passed as arguments complete.\n\npub fn promise_and(promise_indices: &[PromiseIndex]) -> PromiseIndex {\n\n let mut data = vec![0u8; promise_indices.len() * size_of::<PromiseIndex>()];\n\n for i in 0..promise_indices.len() {\n\n data[i * size_of::<PromiseIndex>()..(i + 1) * size_of::<PromiseIndex>()]\n\n .copy_from_slice(&promise_indices[i].to_le_bytes());\n\n }\n\n unsafe { sys::promise_and(data.as_ptr() as _, promise_indices.len() as _) }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 88, "score": 134254.88813302826 }, { "content": "/// If the current function is invoked by a callback we can access the execution results of the\n\n/// promises that caused the callback.\n\npub fn promise_result(result_idx: u64) -> PromiseResult {\n\n match promise_result_internal(result_idx) {\n\n Err(PromiseError::NotReady) => PromiseResult::NotReady,\n\n Ok(()) => {\n\n let data = expect_register(read_register(ATOMIC_OP_REGISTER));\n\n PromiseResult::Successful(data)\n\n }\n\n Err(PromiseError::Failed) => PromiseResult::Failed,\n\n }\n\n}\n\n\n\npub(crate) fn promise_result_internal(result_idx: u64) -> Result<(), PromiseError> {\n\n match unsafe { sys::promise_result(result_idx, ATOMIC_OP_REGISTER) } {\n\n 0 => Err(PromiseError::NotReady),\n\n 1 => Ok(()),\n\n 2 => Err(PromiseError::Failed),\n\n _ => abort(),\n\n }\n\n}\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 89, "score": 134254.88813302826 }, { "content": "/// For a given account return its current stake. If the account is not a validator, returns 0.\n\npub fn validator_stake(account_id: &AccountId) -> Balance {\n\n let account_id: &str = account_id.as_ref();\n\n let data = [0u8; size_of::<Balance>()];\n\n unsafe {\n\n sys::validator_stake(account_id.len() as _, account_id.as_ptr() as _, data.as_ptr() as u64)\n\n };\n\n Balance::from_le_bytes(data)\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 90, "score": 134254.88813302826 }, { "content": "fn validate_account_id(id: &str) -> Result<(), ParseAccountIdError> {\n\n if is_valid_account_id(id.as_bytes()) {\n\n Ok(())\n\n } else {\n\n Err(ParseAccountIdError {})\n\n }\n\n}\n\n\n\nimpl TryFrom<String> for AccountId {\n\n type Error = ParseAccountIdError;\n\n\n\n fn try_from(value: String) -> Result<Self, Self::Error> {\n\n validate_account_id(value.as_str())?;\n\n Ok(Self(value))\n\n }\n\n}\n\n\n\nimpl std::str::FromStr for AccountId {\n\n type Err = ParseAccountIdError;\n\n\n", "file_path": "near-sdk/src/types/account_id.rs", "rank": 91, "score": 133609.39445279023 }, { "content": "pub fn promise_batch_create(account_id: &AccountId) -> PromiseIndex {\n\n let account_id = account_id.as_ref();\n\n unsafe { sys::promise_batch_create(account_id.len() as _, account_id.as_ptr() as _) }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 92, "score": 131737.00764697025 }, { "content": "#[near_bindgen]\n\n#[derive(BorshDeserialize, BorshSerialize)]\n\nstruct Contract {\n\n map1: LookupMap<u64, u64>,\n\n map2: LookupMap<String, String>,\n\n}\n\n\n\nimpl Default for Contract {\n\n fn default() -> Self {\n\n Self {\n\n map1: LookupMap::new(StorageKeyStruct { key: \"bla\".to_string() }),\n\n map2: LookupMap::new(StorageKeyEnum::Accounts),\n\n }\n\n }\n\n}\n\n\n\n#[near_bindgen]\n\nimpl Contract {}\n\n\n", "file_path": "near-sdk/compilation_tests/borsh_storage_key.rs", "rank": 93, "score": 131463.55370221593 }, { "content": "/// Returns the size of the register. If register is not used returns `None`.\n\npub fn register_len(register_id: u64) -> Option<u64> {\n\n let len = unsafe { sys::register_len(register_id) };\n\n if len == std::u64::MAX {\n\n None\n\n } else {\n\n Some(len)\n\n }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 94, "score": 131449.78123892358 }, { "content": "/// Perform function on a mutable reference to the [`MockedBlockchain`]. This can only be used\n\n/// inside tests.\n\npub fn with_mocked_blockchain<F, R>(f: F) -> R\n\nwhere\n\n F: FnOnce(&mut MockedBlockchain) -> R,\n\n{\n\n BLOCKCHAIN_INTERFACE.with(|b| f(&mut b.borrow_mut()))\n\n}\n\n\n\nimpl From<near_vm_logic::types::AccountId> for AccountId {\n\n fn from(id: near_vm_logic::types::AccountId) -> Self {\n\n Self::new_unchecked(String::from(id))\n\n }\n\n}\n\n\n\nimpl std::convert::TryFrom<AccountId> for near_vm_logic::types::AccountId {\n\n type Error = ParseAccountError;\n\n\n\n fn try_from(value: AccountId) -> Result<Self, Self::Error> {\n\n value.as_str().parse()\n\n }\n\n}\n", "file_path": "near-sdk/src/environment/mock/mod.rs", "rank": 95, "score": 128966.55718458045 }, { "content": "pub fn hash_account_id(account_id: &AccountId) -> CryptoHash {\n\n let mut hash = CryptoHash::default();\n\n hash.copy_from_slice(&env::sha256(account_id.as_bytes()));\n\n hash\n\n}\n\n\n\n/// Assert that at least 1 yoctoNEAR was attached.\n\npub(crate) fn assert_at_least_one_yocto() {\n\n require!(env::attached_deposit() >= 1, \"Requires attached deposit of at least 1 yoctoNEAR\")\n\n}\n", "file_path": "near-contract-standards/src/non_fungible_token/utils.rs", "rank": 96, "score": 128217.0342482074 }, { "content": "// TODO: need a way for end users to determine how much an approval will cost.\n\npub fn bytes_for_approved_account_id(account_id: &AccountId) -> u64 {\n\n // The extra 4 bytes are coming from Borsh serialization to store the length of the string.\n\n account_id.as_str().len() as u64 + 4 + size_of::<u64>() as u64\n\n}\n\n\n", "file_path": "near-contract-standards/src/non_fungible_token/utils.rs", "rank": 97, "score": 128217.0342482074 }, { "content": "pub fn promise_batch_action_transfer(promise_index: PromiseIndex, amount: Balance) {\n\n unsafe { sys::promise_batch_action_transfer(promise_index, &amount as *const Balance as _) }\n\n}\n\n\n", "file_path": "near-sdk/src/environment/env.rs", "rank": 98, "score": 127720.59171253044 }, { "content": "#[proc_macro_attribute]\n\n#[deprecated(since = \"4.0.0\", note = \"Case is handled internally by macro, no need to import\")]\n\npub fn callback(_attr: TokenStream, item: TokenStream) -> TokenStream {\n\n item\n\n}\n\n\n\n/// `callback_args_vec` is a marker attribute it does not generate code by itself.\n", "file_path": "near-sdk-macros/src/lib.rs", "rank": 99, "score": 126443.59707198957 } ]
Rust
src/encrypted_stream.rs
GWBasic/bounce
2d8a86b8c33bf408ad5d5b20c440233c0746b13a
/* use async_std::io; use async_std::io::{Read, Result, Write}; use async_std::task::{Context, Poll}; use std::marker::Unpin; use std::pin::Pin; use rand_core::{CryptoRng, RngCore}; pub struct EncryptedStream<TStream, TRng> where TStream: Read + Write, TRng: CryptoRng + RngCore { wrapped_stream: TStream, write_xor: Xor<TRng>, read_xor: Xor<TRng>, } pub struct Xor<TRng> { rng: TRng, xor: [u8; 1024], ctr: usize, } impl<TStream, TRng> Unpin for EncryptedStream<TStream, TRng> where TStream: Read + Write, TRng: CryptoRng + RngCore {} impl<TStream, TRng> EncryptedStream<TStream, TRng> where TStream: Read + Write, TRng: CryptoRng + RngCore { pub fn new(wrapped_stream: TStream, write_rng: TRng, read_rng: TRng) -> EncryptedStream<TStream, TRng> { EncryptedStream { wrapped_stream, write_xor: Xor::new(write_rng), read_xor: Xor::new(read_rng), } } } impl<TRng> Xor<TRng> where TRng: CryptoRng + RngCore { fn new(rng: TRng) -> Xor<TRng> { Xor { rng, xor: [0u8; 1024], ctr: usize::MAX, } } fn next_byte(&mut self) -> u8 { if self.ctr >= self.xor.len() { self.rng.fill_bytes(&mut self.xor[..]); self.ctr = 0; } let b = self.xor[self.ctr]; self.ctr = self.ctr + 1; b } } impl<TStream: Read + Unpin, TRng> Read for EncryptedStream<TStream, TRng> where TStream: Read + Write, TRng: CryptoRng + RngCore { fn poll_read( mut self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &mut [u8], ) -> Poll<io::Result<usize>> { let result = Pin::new(&mut self.wrapped_stream).poll_read(cx, buf); match result { Poll::Ready(result) => match result { Result::Ok(size) => { for ctr in 0..size { buf[ctr] = buf[ctr] ^ self.read_xor.next_byte(); } Poll::Ready(Result::Ok(size)) }, Result::Err(err) => Poll::Ready(Result::Err(err)) }, Poll::Pending => Poll::Pending } } } impl<TStream: Write + Unpin, TRng> Write for EncryptedStream<TStream, TRng> where TStream: Read + Write, TRng: CryptoRng + RngCore { fn poll_write( mut self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &[u8], ) -> Poll<io::Result<usize>> { let mut encrypted = vec![0; buf.len()]; for ctr in 0..buf.len() { encrypted[ctr] = buf[ctr] ^ self.write_xor.next_byte(); } Pin::new(&mut self.wrapped_stream).poll_write(cx, &encrypted) } fn poll_flush( mut self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_> ) -> std::task::Poll<std::result::Result<(), std::io::Error>> { Pin::new(&mut self.wrapped_stream).poll_flush(cx) } fn poll_close( mut self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_> ) -> std::task::Poll<std::result::Result<(), std::io::Error>> { Pin::new(&mut self.wrapped_stream).poll_close(cx) } } // Test with https://docs.rs/async-std/1.7.0/async_std/io/struct.Cursor.html #[cfg(test)] mod tests { use async_std::io::Cursor; use futures::io::{AsyncReadExt, AsyncWriteExt}; use rand::{Rng, SeedableRng, thread_rng}; use rand_chacha::ChaCha8Rng; use super::*; fn create_stream_and_data_and_rng(stream_buf: Vec<u8>) -> (Cursor<Vec<u8>>, <ChaCha8Rng as SeedableRng>::Seed, ChaCha8Rng, ChaCha8Rng) { let memory_stream = Cursor::new(stream_buf); let mut test_seed: <ChaCha8Rng as SeedableRng>::Seed = Default::default(); thread_rng().fill(&mut test_seed); let test_rng = ChaCha8Rng::from_seed(test_seed.clone()); let ignored_rng = ChaCha8Rng::seed_from_u64(0); (memory_stream, test_seed, test_rng, ignored_rng) } #[async_std::test] async fn encrypted_stream_works_write() { let len = 1024 * 1024; let stream_buf = vec![0u8; len]; let (memory_stream, test_seed, test_rng, ignored_rng) = create_stream_and_data_and_rng(stream_buf); let mut test_contents = vec![0u8; len]; thread_rng().fill(&mut test_contents[..]); let mut encrypted_stream = EncryptedStream::new(memory_stream, test_rng, ignored_rng); encrypted_stream.write_all(&test_contents).await.unwrap(); let stream_buf_encrypted = encrypted_stream.wrapped_stream.into_inner(); // Verify that the contents changed assert_ne!(test_contents, stream_buf_encrypted, "Contents weren't encrypted"); // Verify each byte let test_rng = ChaCha8Rng::from_seed(test_seed.clone()); let mut xor = Xor::new(test_rng); for ctr in 0..test_contents.len() { let b = xor.next_byte(); assert_eq!(test_contents[ctr], stream_buf_encrypted[ctr] ^ b, "Encrypted content isn't as expected"); } } #[async_std::test] async fn encrypted_stream_works_read() { let len = 1024 * 1024; let mut encrypted_contents = vec![0u8; len]; thread_rng().fill(&mut encrypted_contents[..]); let (memory_stream, test_seed, test_rng, ignored_rng) = create_stream_and_data_and_rng(encrypted_contents.clone()); let mut encrypted_stream = EncryptedStream::new(memory_stream, ignored_rng, test_rng); let mut decrypted_contents = vec![0u8; encrypted_contents.len()]; let mut bytes_read = 0; loop { bytes_read = bytes_read + encrypted_stream.read(&mut decrypted_contents[bytes_read..]).await.unwrap(); if bytes_read >= decrypted_contents.len() { break; } } // Verify that the contents changed assert_ne!(encrypted_contents, decrypted_contents, "Contents weren't decrypted"); // Verify each byte let test_rng = ChaCha8Rng::from_seed(test_seed.clone()); let mut xor = Xor::new(test_rng); for ctr in 0..encrypted_contents.len() { let b = xor.next_byte(); assert_eq!(decrypted_contents[ctr], encrypted_contents[ctr] ^ b, "Decrypted content isn't as expected"); } } } */
/* use async_std::io; use async_std::io::{Read, Result, Write}; use async_std::task::{Context, Poll}; use std::marker::Unpin; use std::pin::Pin; use rand_core::{CryptoRng, RngCore}; pub struct EncryptedStream<TStream, TRng> where TStream: Read + Write, TRng: CryptoRng + RngCore { wrapped_stream: TStream, write_xor: Xor<TRng>, read_xor: Xor<TRng>, } pub struct Xor<TRng> { rng: TRng, xor: [u8; 1024], ctr: usize, } impl<TSt
^ self.write_xor.next_byte(); } Pin::new(&mut self.wrapped_stream).poll_write(cx, &encrypted) } fn poll_flush( mut self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_> ) -> std::task::Poll<std::result::Result<(), std::io::Error>> { Pin::new(&mut self.wrapped_stream).poll_flush(cx) } fn poll_close( mut self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_> ) -> std::task::Poll<std::result::Result<(), std::io::Error>> { Pin::new(&mut self.wrapped_stream).poll_close(cx) } } // Test with https://docs.rs/async-std/1.7.0/async_std/io/struct.Cursor.html #[cfg(test)] mod tests { use async_std::io::Cursor; use futures::io::{AsyncReadExt, AsyncWriteExt}; use rand::{Rng, SeedableRng, thread_rng}; use rand_chacha::ChaCha8Rng; use super::*; fn create_stream_and_data_and_rng(stream_buf: Vec<u8>) -> (Cursor<Vec<u8>>, <ChaCha8Rng as SeedableRng>::Seed, ChaCha8Rng, ChaCha8Rng) { let memory_stream = Cursor::new(stream_buf); let mut test_seed: <ChaCha8Rng as SeedableRng>::Seed = Default::default(); thread_rng().fill(&mut test_seed); let test_rng = ChaCha8Rng::from_seed(test_seed.clone()); let ignored_rng = ChaCha8Rng::seed_from_u64(0); (memory_stream, test_seed, test_rng, ignored_rng) } #[async_std::test] async fn encrypted_stream_works_write() { let len = 1024 * 1024; let stream_buf = vec![0u8; len]; let (memory_stream, test_seed, test_rng, ignored_rng) = create_stream_and_data_and_rng(stream_buf); let mut test_contents = vec![0u8; len]; thread_rng().fill(&mut test_contents[..]); let mut encrypted_stream = EncryptedStream::new(memory_stream, test_rng, ignored_rng); encrypted_stream.write_all(&test_contents).await.unwrap(); let stream_buf_encrypted = encrypted_stream.wrapped_stream.into_inner(); // Verify that the contents changed assert_ne!(test_contents, stream_buf_encrypted, "Contents weren't encrypted"); // Verify each byte let test_rng = ChaCha8Rng::from_seed(test_seed.clone()); let mut xor = Xor::new(test_rng); for ctr in 0..test_contents.len() { let b = xor.next_byte(); assert_eq!(test_contents[ctr], stream_buf_encrypted[ctr] ^ b, "Encrypted content isn't as expected"); } } #[async_std::test] async fn encrypted_stream_works_read() { let len = 1024 * 1024; let mut encrypted_contents = vec![0u8; len]; thread_rng().fill(&mut encrypted_contents[..]); let (memory_stream, test_seed, test_rng, ignored_rng) = create_stream_and_data_and_rng(encrypted_contents.clone()); let mut encrypted_stream = EncryptedStream::new(memory_stream, ignored_rng, test_rng); let mut decrypted_contents = vec![0u8; encrypted_contents.len()]; let mut bytes_read = 0; loop { bytes_read = bytes_read + encrypted_stream.read(&mut decrypted_contents[bytes_read..]).await.unwrap(); if bytes_read >= decrypted_contents.len() { break; } } // Verify that the contents changed assert_ne!(encrypted_contents, decrypted_contents, "Contents weren't decrypted"); // Verify each byte let test_rng = ChaCha8Rng::from_seed(test_seed.clone()); let mut xor = Xor::new(test_rng); for ctr in 0..encrypted_contents.len() { let b = xor.next_byte(); assert_eq!(decrypted_contents[ctr], encrypted_contents[ctr] ^ b, "Decrypted content isn't as expected"); } } } */
ream, TRng> Unpin for EncryptedStream<TStream, TRng> where TStream: Read + Write, TRng: CryptoRng + RngCore {} impl<TStream, TRng> EncryptedStream<TStream, TRng> where TStream: Read + Write, TRng: CryptoRng + RngCore { pub fn new(wrapped_stream: TStream, write_rng: TRng, read_rng: TRng) -> EncryptedStream<TStream, TRng> { EncryptedStream { wrapped_stream, write_xor: Xor::new(write_rng), read_xor: Xor::new(read_rng), } } } impl<TRng> Xor<TRng> where TRng: CryptoRng + RngCore { fn new(rng: TRng) -> Xor<TRng> { Xor { rng, xor: [0u8; 1024], ctr: usize::MAX, } } fn next_byte(&mut self) -> u8 { if self.ctr >= self.xor.len() { self.rng.fill_bytes(&mut self.xor[..]); self.ctr = 0; } let b = self.xor[self.ctr]; self.ctr = self.ctr + 1; b } } impl<TStream: Read + Unpin, TRng> Read for EncryptedStream<TStream, TRng> where TStream: Read + Write, TRng: CryptoRng + RngCore { fn poll_read( mut self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &mut [u8], ) -> Poll<io::Result<usize>> { let result = Pin::new(&mut self.wrapped_stream).poll_read(cx, buf); match result { Poll::Ready(result) => match result { Result::Ok(size) => { for ctr in 0..size { buf[ctr] = buf[ctr] ^ self.read_xor.next_byte(); } Poll::Ready(Result::Ok(size)) }, Result::Err(err) => Poll::Ready(Result::Err(err)) }, Poll::Pending => Poll::Pending } } } impl<TStream: Write + Unpin, TRng> Write for EncryptedStream<TStream, TRng> where TStream: Read + Write, TRng: CryptoRng + RngCore { fn poll_write( mut self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &[u8], ) -> Poll<io::Result<usize>> { let mut encrypted = vec![0; buf.len()]; for ctr in 0..buf.len() { encrypted[ctr] = buf[ctr]
random
[ { "content": "pub fn run_bridge<TRng>(xors: Xors<TRng>, clear_stream: TcpStream, clear_stream_name: String, encrypted_stream: TcpStream, encrypted_stream_name: String) where\n\nTRng: CryptoRng + RngCore + Clone + Any {\n\n\n\n match clear_stream.set_nodelay(true) {\n\n Err(err) => {\n\n log::error!(\"Error disabling Nagle on {}: {}\", clear_stream_name, err);\n\n return;\n\n },\n\n Ok(()) => {}\n\n }\n\n\n\n match encrypted_stream.set_nodelay(true) {\n\n Err(err) => {\n\n log::error!(\"Error disabling Nagle on {}: {}\", encrypted_stream_name, err);\n\n return;\n\n },\n\n Ok(()) => {}\n\n }\n\n\n\n task::spawn(bridge(xors.clone(), clear_stream, clear_stream_name, encrypted_stream, encrypted_stream_name));\n", "file_path": "src/bridge.rs", "rank": 0, "score": 65824.63593587192 }, { "content": "fn process(key: &Key, nonce: &Vec<u8>, to_process: &[u8]) -> Vec<u8> {\n\n let mut my_ciper = aes::ctr(key.size, &key.key, nonce);\n\n let mut processed = vec![0u8; to_process.len()];\n\n my_ciper.process(to_process, &mut processed);\n\n\n\n processed\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use async_std::net::{IpAddr, Ipv4Addr, Shutdown, TcpListener, SocketAddr};\n\n use async_std::prelude::*;\n\n\n\n use crypto::aes::KeySize;\n\n\n\n use super::*;\n\n\n\n async fn get_key_and_socket_streams() -> (Key, TcpStream, TcpStream) {\n\n let key = Key {\n\n key: vec![1 as u8, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32],\n", "file_path": "src/auth.rs", "rank": 1, "score": 46644.59789985339 }, { "content": "pub fn run_client(bounce_server: String, destination_host: String, key: Key) -> (JoinHandle<Result<(), Error>>, CancelationToken) {\n\n let (cancelation_token, cancelable) = CancelationToken::new();\n\n let client_future = task::spawn(run_client_int(bounce_server, destination_host, key, cancelable));\n\n\n\n (client_future, cancelation_token)\n\n}\n\n\n\nasync fn run_client_int(bounce_server: String, destination_host: String, key: Key, cancelable: Cancelable) -> Result<(), Error> {\n\n log::info!(\"Bounce client: Connecting to bounce server at {}, bouncing to {}\", bounce_server, destination_host);\n\n\n\n let connected = b\"connected\".to_vec();\n\n\n\n 'client_loop: loop {\n\n let mut bounce_stream = TcpStream::connect(bounce_server.clone()).await?;\n\n\n\n let xors = authenticate(key.clone(), bounce_stream.clone()).await?;\n\n\n\n let mut buf = vec!(0u8; connected.len());\n\n let mut read = 0;\n\n\n", "file_path": "src/client.rs", "rank": 2, "score": 43219.44124242963 }, { "content": "pub fn run_server(port: u16, adapter_port: u16, key: Key) -> (JoinHandle<Result<(), Error>>, CompletionToken<()>, CancelationToken) {\n\n let (listening_token, listening_completable) = CompletionToken::new();\n\n let (cancelation_token, cancelable) = CancelationToken::new();\n\n\n\n let server_future = task::spawn(run_server_int(port, adapter_port, key, listening_completable, cancelable));\n\n\n\n (server_future, listening_token, cancelation_token)\n\n}\n\n\n\nasync fn run_server_int(port: u16, adapter_port: u16, key: Key, listening_completable: Completable<()>, cancelable: Cancelable) -> Result<(), Error> {\n\n\n\n let socket_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), port);\n\n let listener = TcpListener::bind(socket_addr).await?;\n\n\n\n // There is always an ongoing task that accepts an incoming connection on the clear (not adapter) port\n\n // This task is replaced when the socket is accepted\n\n let mut incoming_future = task::spawn(accept(listener));\n\n\n\n let adapter_socket_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), adapter_port);\n\n let adapter_listener = TcpListener::bind(adapter_socket_addr).await?;\n", "file_path": "src/server.rs", "rank": 3, "score": 41811.63204178636 }, { "content": "pub fn generate_keys() {\n\n let mut key = vec![0u8; 256 / 8];\n\n OsRng.fill_bytes(&mut key);\n\n\n\n println!(\"Key: {}\", key.to_base64(STANDARD));\n\n}\n\n\n", "file_path": "src/keys.rs", "rank": 4, "score": 36788.30136526097 }, { "content": "pub fn parse_key(key_str: &str) -> Key {\n\n let key = match key_str.from_base64() {\n\n Err(err) => panic!(\"Can not parse key {}: {}\", key_str, err),\n\n Ok(v) => v\n\n };\n\n\n\n if key.len() != 256 / 8 {\n\n panic!(\"Only 256-bit keys supported\")\n\n }\n\n\n\n let size = KeySize::KeySize256;\n\n\n\n Key {key, size}\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use rustc_serialize::base64::STANDARD;\n", "file_path": "src/keys.rs", "rank": 5, "score": 30651.916952856693 }, { "content": "fn parse_port(port_str: &str) -> Result<u16, Error> {\n\n match port_str.parse::<u16>() {\n\n Ok(port) => Ok(port),\n\n Err(err) => Err(Error::new(ErrorKind::Other, format!(\"Invalid port \\\"{}\\\": {}\", port_str, err)))\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 6, "score": 28752.368981745225 }, { "content": "fn get_port_from_env(var_name: &str) -> Result<u16, Error> {\n\n let port_str = get_env_var(var_name)?;\n\n Ok(parse_port(&port_str)?)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 7, "score": 28076.018762650914 }, { "content": "fn get_env_var(var_name: &str) -> Result<String, Error> {\n\n match var(var_name) {\n\n Ok(val) => Ok(val),\n\n Err(_) => Err(Error::new(ErrorKind::Other, format!(\"{} must be set\", var_name)))\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 8, "score": 28076.018762650914 }, { "content": "fn get_key_from_env(var_name: &str) -> Result<Key, Error> {\n\n let key_str = get_env_var(var_name)?;\n\n Ok(parse_key(&key_str))\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 9, "score": 28076.018762650914 }, { "content": "use rand_core::{CryptoRng, RngCore};\n\n\n\nconst XOR_BUFFER_SIZE: usize = 1024;\n\n\n\n#[derive(Copy)]\n\n#[derive(Clone)]\n\npub struct Xor<TRng> where\n\nTRng: CryptoRng + RngCore + Clone {\n\n rng: TRng,\n\n xor: [u8; 1024],\n\n ctr: usize,\n\n}\n\n\n\n#[derive(Copy)]\n\n#[derive(Clone)]\n\npub struct Xors<TRng> where\n\nTRng: CryptoRng + RngCore + Clone {\n\n pub write_xor: Xor<TRng>,\n\n pub read_xor: Xor<TRng>\n\n}\n", "file_path": "src/xor.rs", "rank": 10, "score": 25803.851350110617 }, { "content": "\n\nunsafe impl<TRng> Send for Xor<TRng> where\n\nTRng: CryptoRng + RngCore + Clone {\n\n}\n\n\n\nimpl<TRng> Xor<TRng> where\n\nTRng: CryptoRng + RngCore + Clone {\n\n\n\n pub fn new(rng: TRng) -> Xor<TRng> {\n\n Xor {\n\n rng,\n\n xor: [0u8; XOR_BUFFER_SIZE],\n\n ctr: usize::MAX,\n\n }\n\n }\n\n\n\n fn next_byte(&mut self) -> u8 {\n\n if self.ctr >= self.xor.len() {\n\n self.rng.fill_bytes(&mut self.xor[..]);\n\n self.ctr = 0;\n", "file_path": "src/xor.rs", "rank": 11, "score": 25798.953212285247 }, { "content": " }\n\n\n\n let b = self.xor[self.ctr];\n\n self.ctr = self.ctr + 1;\n\n b\n\n }\n\n\n\n pub fn process(&mut self, data: &mut [u8]) {\n\n for ctr in 0..data.len() {\n\n let b = data[ctr] ^ self.next_byte();\n\n data[ctr] = b;\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use rand::{RngCore, SeedableRng, thread_rng};\n\n use rand_chacha::ChaCha8Rng;\n\n\n", "file_path": "src/xor.rs", "rank": 12, "score": 25798.920287558598 }, { "content": " use super::*;\n\n\n\n #[test]\n\n fn same_seed_generates_same_sequence() {\n\n let mut xor = Xor::new(ChaCha8Rng::seed_from_u64(1));\n\n let mut rng = ChaCha8Rng::seed_from_u64(1);\n\n\n\n for _ in 0..5 {\n\n let mut buf = vec![0u8; XOR_BUFFER_SIZE];\n\n rng.fill_bytes(&mut buf);\n\n\n\n for ctr in 0..XOR_BUFFER_SIZE {\n\n assert_eq!(buf[ctr], xor.next_byte());\n\n }\n\n }\n\n }\n\n\n\n #[test]\n\n fn process() {\n\n let mut xor = Xor::new(ChaCha8Rng::seed_from_u64(1));\n", "file_path": "src/xor.rs", "rank": 13, "score": 25795.750231221526 }, { "content": " let mut rng = ChaCha8Rng::seed_from_u64(1);\n\n\n\n for _ in 0..5 {\n\n let mut xor_buf = vec![0u8; XOR_BUFFER_SIZE];\n\n rng.fill_bytes(&mut xor_buf);\n\n\n\n let mut buf_for_test = vec![0u8; XOR_BUFFER_SIZE];\n\n thread_rng().fill_bytes(&mut buf_for_test);\n\n\n\n let mut buf_for_xor = buf_for_test.clone();\n\n\n\n for ctr in 0..XOR_BUFFER_SIZE {\n\n buf_for_test[ctr] = buf_for_test[ctr] ^ xor_buf[ctr];\n\n }\n\n\n\n xor.process(&mut buf_for_xor);\n\n\n\n assert_eq!(buf_for_test, buf_for_xor);\n\n }\n\n }\n\n}", "file_path": "src/xor.rs", "rank": 14, "score": 25794.98562710722 }, { "content": "use async_std::io;\n\nuse async_std::io::{Read, Write};\n\nuse async_std::net::TcpStream;\n\nuse async_std::prelude::*;\n\nuse async_std::task;\n\nuse core::time::Duration;\n\nuse std::any::Any;\n\nuse std::convert::TryInto;\n\nuse std::io::{Error, ErrorKind};\n\nuse std::marker::Unpin;\n\n\n\nuse crypto::aes;\n\nuse rand::{RngCore, thread_rng};\n\nuse rand::{Rng, SeedableRng};\n\nuse rand_chacha::ChaCha12Rng;\n\n\n\nuse crate::keys::Key;\n\nuse crate::xor::{Xor, Xors};\n\n\n\npub async fn authenticate(key: Key, stream: TcpStream) -> Result<Xors<ChaCha12Rng>, Error> {\n", "file_path": "src/auth.rs", "rank": 21, "score": 14.180882360729568 }, { "content": "\n\n let mut a = outgoing_stream.clone();\n\n let mut b = incoming_stream.clone();\n\n\n\n for _ in 0usize..100 {\n\n let len = (rng.next_u64() % 2000) as usize;\n\n let mut write_buf = vec!(0u8; len);\n\n rng.fill_bytes(&mut write_buf);\n\n\n\n let write_future = task::spawn(write_all(a.clone(), write_buf.clone()));\n\n\n\n let mut read_buf = vec!(0u8; len);\n\n\n\n let mut total_bytes_read = 0;\n\n\n\n 'read_loop: loop {\n\n let bytes_read = b.read(&mut read_buf).await.expect(\"Can't read\");\n\n\n\n if bytes_read == 0 {\n\n panic!(\"Socket closed early\")\n", "file_path": "src/main.rs", "rank": 23, "score": 12.609998477838449 }, { "content": " for _ in 0usize..256 {\n\n let mut rng = thread_rng();\n\n\n\n let size = (rng.next_u64() % 4098) as usize;\n\n let mut send_buf = vec![0u8; size];\n\n rng.fill_bytes(&mut send_buf);\n\n\n\n write_stream.write_all(&send_buf[..]).await.expect(\"Can not write to initiating_client_clear_stream\");\n\n\n\n let mut recieve_buf = vec![0u8; size];\n\n let mut total_bytes_read = 0usize;\n\n\n\n while total_bytes_read < size {\n\n let bytes_read = read_stream.read(&mut recieve_buf[total_bytes_read..]).await.expect(\"Can not read from final_client_clear_stream\");\n\n assert_ne!(bytes_read, 0, \"Unexpected end of stream\");\n\n total_bytes_read = total_bytes_read + bytes_read;\n\n }\n\n\n\n assert_eq!(send_buf, recieve_buf, \"Wrong contents sent\");\n\n\n", "file_path": "src/bridge.rs", "rank": 24, "score": 12.137932295306273 }, { "content": " let their_seed_encrypted = read_and_write(stream.clone(), &my_seed_encrypted, Duration::from_secs_f32(0.5)).await?;\n\n\n\n let their_seed = process(&key, &their_nonce, &their_seed_encrypted)[0..32].try_into().expect(\"Unexpected seed size\");\n\n let their_seed = <ChaCha12Rng as SeedableRng>::Seed::from(their_seed);\n\n\n\n let write_rng = ChaCha12Rng::from_seed(my_seed);\n\n let read_rng = ChaCha12Rng::from_seed(their_seed);\n\n\n\n // Create the xors\n\n let mut write_xor = Xor::new(write_rng);\n\n let mut read_xor = Xor::new(read_rng);\n\n\n\n // Read and write encrypted \"bounce\"\n\n let mut my_bounce = b\"bounce\".to_vec();\n\n write_xor.process(&mut my_bounce[..]);\n\n\n\n let mut their_bounce = read_and_write(stream.clone(), &my_bounce, Duration::from_secs_f32(0.5)).await.expect(\"Handshake error\");\n\n read_xor.process(&mut their_bounce[..]);\n\n\n\n if their_bounce[..] != b\"bounce\"[..] {\n", "file_path": "src/auth.rs", "rank": 25, "score": 11.728592594815682 }, { "content": "}\n\n\n\npub async fn bridge<TRng>(xors: Xors<TRng>, clear_stream: TcpStream, clear_stream_name: String, encrypted_stream: TcpStream, encrypted_stream_name: String) where\n\nTRng: CryptoRng + RngCore + Clone + Any {\n\n\n\n let write_future = task::spawn(run_bridge_loop(\n\n xors.write_xor,\n\n clear_stream.clone(),\n\n clear_stream_name.clone(),\n\n encrypted_stream.clone(),\n\n encrypted_stream_name.clone()));\n\n\n\n let read_future = task::spawn(run_bridge_loop(\n\n xors.read_xor,\n\n encrypted_stream.clone(),\n\n encrypted_stream_name.clone(),\n\n clear_stream.clone(),\n\n clear_stream_name.clone()));\n\n\n\n match select(write_future, read_future).await {\n", "file_path": "src/bridge.rs", "rank": 26, "score": 11.480828617375238 }, { "content": "\n\nasync fn run_bridge_loop<TRng>(mut xor: Xor<TRng>, mut reader: TcpStream, reader_name: String, mut writer: TcpStream, writer_name: String) -> Result<(), Error> where\n\nTRng: CryptoRng + RngCore + Clone {\n\n \n\n let mut buf = vec![0u8; 4098];\n\n\n\n loop {\n\n let bytes_read = reader.read(&mut buf).await?;\n\n\n\n if bytes_read == 0 {\n\n log::debug!(\"Connected ending: {}\", reader_name);\n\n return Ok(());\n\n }\n\n\n\n log::trace!(\"Read {} bytes from {}\", bytes_read, reader_name);\n\n\n\n // Decrypt\n\n xor.process(&mut buf[..bytes_read]);\n\n\n\n // Forward\n", "file_path": "src/bridge.rs", "rank": 27, "score": 11.172008115232138 }, { "content": " if total_bytes_read >= buffer.len() {\n\n return Ok(());\n\n }\n\n }\n\n}\n\n\n\nasync fn write_buffer<TStream>(mut stream: TStream, buffer: Vec<u8>) -> Result<(), Error>\n\nwhere TStream : Read + Write + Unpin {\n\n stream.write_all(&buffer).await\n\n}\n\n\n\nasync fn read_and_write<TStream>(stream: TStream, buffer_to_write: &Vec<u8>, timeout: Duration) -> Result<Vec<u8>, Error>\n\nwhere TStream : Read + Write + Unpin + Clone + Send + Any {\n\n\n\n let write_future = task::spawn(write_buffer(stream.clone(), buffer_to_write.clone()));\n\n\n\n let mut buffer_to_read = vec![0u8; buffer_to_write.len()];\n\n\n\n read_buffer(stream, &mut buffer_to_read, timeout).await?;\n\n\n\n write_future.await?;\n\n Ok(buffer_to_read)\n\n}\n\n\n", "file_path": "src/auth.rs", "rank": 28, "score": 10.681644493662194 }, { "content": " return Err(Error::new(ErrorKind::InvalidData, \"Authentication failed\"));\n\n }\n\n\n\n Ok(Xors {\n\n write_xor,\n\n read_xor\n\n })\n\n}\n\n\n\nasync fn read_buffer<TStream>(mut stream: TStream, buffer: &mut [u8], timeout: Duration) -> Result<(), Error>\n\nwhere TStream : Read + Write + Unpin {\n\n let mut total_bytes_read = 0;\n\n loop {\n\n let bytes_read = io::timeout(timeout, stream.read(buffer)).await?;\n\n\n\n if bytes_read == 0 {\n\n return Err(Error::new(ErrorKind::InvalidData, \"Socket closed prematurely\"));\n\n }\n\n\n\n total_bytes_read = total_bytes_read + bytes_read;\n", "file_path": "src/auth.rs", "rank": 29, "score": 10.666947287201285 }, { "content": "extern crate rand;\n\n\n\nuse crypto::aes::KeySize;\n\nuse rand::RngCore;\n\nuse rand::rngs::OsRng;\n\nuse rustc_serialize::base64::{FromBase64, STANDARD, ToBase64};\n\n\n\n#[derive(Clone)]\n\npub struct Key {\n\n pub key: Vec<u8>,\n\n // This is always KeySize::KeySize256\n\n pub size: KeySize\n\n}\n\n\n", "file_path": "src/keys.rs", "rank": 30, "score": 10.436864257509228 }, { "content": " async fn start() -> TcpStreams {\n\n\n\n let streams = get_socket_streams().await;\n\n\n\n let xors = Xors {\n\n read_xor: Xor::new(ChaCha8Rng::seed_from_u64(1)),\n\n write_xor: Xor::new(ChaCha8Rng::seed_from_u64(2))\n\n };\n\n\n\n // server\n\n run_bridge(\n\n xors,\n\n streams.bounce_server_clear_stream.clone(),\n\n \"bounce_server_clear_stream\".to_string(),\n\n streams.bounce_server_encrypted_stream.clone(),\n\n \"bounce_server_encrypted_stream\".to_string());\n\n\n\n let xors = Xors {\n\n read_xor: Xor::new(ChaCha8Rng::seed_from_u64(2)),\n\n write_xor: Xor::new(ChaCha8Rng::seed_from_u64(1))\n", "file_path": "src/bridge.rs", "rank": 31, "score": 10.021123777854774 }, { "content": " Ok(_) => panic!(\"Failure not detected\"),\n\n Err(err) => assert_eq!(ErrorKind::InvalidData, err.kind())\n\n }\n\n\n\n match server_authenticate_result {\n\n Ok(_) => panic!(\"Failure not detected\"),\n\n Err(err) => assert_eq!(ErrorKind::InvalidData, err.kind())\n\n }\n\n }\n\n\n\n async fn read_and_write_take(stream: TcpStream, buffer_to_write: Vec<u8>, timeout: Duration) -> Result<Vec<u8>, Error> {\n\n read_and_write(stream, &buffer_to_write, timeout).await\n\n }\n\n\n\n #[async_std::test]\n\n async fn verify_read_and_write() {\n\n let a = vec![1 as u8, 2, 3, 4, 5, 6, 7, 8, 9, 10];\n\n let b = vec![10 as u8, 9, 8, 7, 6, 5, 4, 3, 2, 1];\n\n\n\n let (client_stream, server_stream) = get_socket_streams().await;\n", "file_path": "src/auth.rs", "rank": 32, "score": 9.840194596565635 }, { "content": "\n\n // TODO: A potential optimization is to send \"bounce\", nonce, and challenges as one single write\n\n\n\n // Read and write \"bounce\"\n\n let bounce_buffer = read_and_write(stream.clone(), &b\"bounce\".to_vec(), Duration::from_secs_f32(0.5)).await?;\n\n if bounce_buffer[..] != b\"bounce\"[..] {\n\n return Err(Error::new(ErrorKind::InvalidData, \"This is not a bounce server or client\"));\n\n }\n\n\n\n // Read and write nonces\n\n let mut my_nonce = vec![0u8; key.key.len()];\n\n thread_rng().fill_bytes(&mut my_nonce);\n\n\n\n let their_nonce = read_and_write(stream.clone(), &my_nonce, Duration::from_secs_f32(0.5)).await?;\n\n\n\n // Read and write seeds\n\n let mut my_seed: <ChaCha12Rng as SeedableRng>::Seed = Default::default();\n\n thread_rng().fill(&mut my_seed);\n\n let my_seed_encrypted = process(&key, &my_nonce, &my_seed);\n\n\n", "file_path": "src/auth.rs", "rank": 33, "score": 9.238379621649674 }, { "content": "use async_std::net::{Shutdown, TcpStream};\n\nuse async_std::prelude::*;\n\nuse async_std::task;\n\nuse std::io::Error;\n\n\n\nuse core::any::Any;\n\n\n\nuse futures::future::{Either, join, select};\n\nuse rand_core::{CryptoRng, RngCore};\n\n\n\nuse crate::xor::{Xor, Xors};\n\n\n", "file_path": "src/bridge.rs", "rank": 35, "score": 8.904279180732917 }, { "content": "async fn peek(stream: TcpStream) -> Result<usize, Error> {\n\n let mut peek_buf = [0u8; 1];\n\n let bytes = stream.peek(&mut peek_buf).await?;\n\n Ok(bytes)\n\n}\n\n\n\n// Note: Tests are error conditions only, happy-path tests are in main.rs\n\n#[cfg(test)]\n\nmod tests {\n\n use async_std::net::{IpAddr, Ipv4Addr, Shutdown, TcpListener, SocketAddr};\n\n use async_std::prelude::*;\n\n use async_std::task::JoinHandle;\n\n use std::io::Error;\n\n\n\n use crypto::aes::KeySize;\n\n\n\n use super::*;\n\n\n\n async fn get_adapter_stream_and_server_future() -> (TcpStream, SocketAddr, JoinHandle<Result<(), Error>>, CancelationToken) {\n\n let key = Key {\n", "file_path": "src/server.rs", "rank": 38, "score": 7.643025135906674 }, { "content": " use crypto::aes::KeySize;\n\n use rand::{RngCore, thread_rng};\n\n use sync_tokens::cancelation_token::CancelationToken;\n\n\n\n use super::*;\n\n\n\n async fn get_server_and_client_futures() -> (JoinHandle<Result<(), Error>>, JoinHandle<Result<(), Error>>, SocketAddr, CancelationToken, TcpListener, CancelationToken) {\n\n let key = Key {\n\n key: vec![1 as u8, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32],\n\n size: KeySize::KeySize256\n\n };\n\n\n\n let socket_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0);\n\n let client_listener = TcpListener::bind(socket_addr).await.unwrap();\n\n let adapter_listener = TcpListener::bind(socket_addr).await.unwrap();\n\n\n\n let client_address = client_listener.local_addr().unwrap();\n\n let adapter_address = adapter_listener.local_addr().unwrap();\n\n\n\n drop(client_listener);\n", "file_path": "src/main.rs", "rank": 39, "score": 7.432277428403673 }, { "content": " // Exchange\n\n let i = write_stream;\n\n write_stream = read_stream;\n\n read_stream = i;\n\n }\n\n }\n\n\n\n async fn shutdown_read(write_stream: &TcpStream, read_stream: &mut TcpStream) {\n\n write_stream.shutdown(Shutdown::Both).unwrap();\n\n\n\n let mut read_buf = vec![0u8, 16];\n\n let bytes_read = read_stream.read(&mut read_buf[..]).await.unwrap();\n\n\n\n assert_eq!(bytes_read, 0, \"Socket should be shut down\");\n\n }\n\n\n\n #[async_std::test]\n\n async fn shutdown_incoming_read() {\n\n let mut streams = start().await;\n\n shutdown_read(&streams.initiating_client_clear_stream, &mut streams.final_client_clear_stream).await;\n\n }\n\n\n\n #[async_std::test]\n\n async fn shutdown_outgoing_read() {\n\n let mut streams = start().await;\n\n shutdown_read(&streams.final_client_clear_stream, &mut streams.initiating_client_clear_stream).await;\n\n }\n\n}\n", "file_path": "src/bridge.rs", "rank": 40, "score": 6.859934042025413 }, { "content": "mod auth;\n\nmod bridge;\n\nmod client;\n\nmod keys;\n\nmod server;\n\nmod xor;\n\n\n\nuse std::env::{args, var};\n\nuse std::io::{ Error, ErrorKind, Write };\n\n\n\nuse chrono::Local;\n\nuse env_logger::Builder;\n\nuse log::LevelFilter;\n\n\n\nuse client::run_client;\n\nuse keys::{Key, generate_keys, parse_key};\n\nuse server::run_server;\n\n\n\n#[async_std::main]\n\nasync fn main() {\n", "file_path": "src/main.rs", "rank": 42, "score": 6.639423137921799 }, { "content": "\n\n let a_sent_future = task::spawn(read_and_write_take(server_stream.clone(), b.clone(), Duration::from_secs_f32(0.5)));\n\n let b_sent = read_and_write_take(client_stream, a.clone(), Duration::from_secs_f32(0.5)).await.unwrap();\n\n let a_sent = a_sent_future.await.unwrap();\n\n\n\n assert_eq!(a, a_sent);\n\n assert_eq!(b, b_sent);\n\n }\n\n\n\n #[test]\n\n fn different_keys() {\n\n\n\n let key_1 = Key {\n\n key: vec![1 as u8, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16],\n\n size: KeySize::KeySize128\n\n };\n\n\n\n let key_2 = Key {\n\n key: vec![2 as u8, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17],\n\n size: KeySize::KeySize128\n", "file_path": "src/auth.rs", "rank": 43, "score": 6.483307822710046 }, { "content": "}\n\n\n\n// Note: Tests are error conditions only, happy-path tests will be handled in general integration tests\n\n#[cfg(test)]\n\nmod tests {\n\n use async_std::net::{IpAddr, Ipv4Addr, Shutdown, TcpListener, SocketAddr};\n\n use async_std::prelude::*;\n\n use async_std::task::JoinHandle;\n\n use std::io::{Error, ErrorKind};\n\n\n\n use crypto::aes::KeySize;\n\n\n\n use super::*;\n\n\n\n async fn get_server_stream_and_client_future() -> (TcpStream, JoinHandle<Result<(), Error>>) {\n\n let key = Key {\n\n key: vec![1 as u8, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32],\n\n size: KeySize::KeySize256\n\n };\n\n\n", "file_path": "src/client.rs", "rank": 44, "score": 5.91743287537904 }, { "content": " }\n\n\n\n total_bytes_read = total_bytes_read + bytes_read;\n\n\n\n if total_bytes_read >= len {\n\n break 'read_loop;\n\n }\n\n }\n\n\n\n write_future.await.expect(\"Problem writing\");\n\n\n\n assert_eq!(write_buf, read_buf, \"Contents garbled\");\n\n\n\n let c = a;\n\n a = b;\n\n b = c;\n\n }\n\n\n\n outgoing_stream.shutdown(Shutdown::Both).expect(\"Can't shutdown outgoing_stream\");\n\n incoming_stream.shutdown(Shutdown::Both).expect(\"Can't shutdown incoming_stream\");\n", "file_path": "src/main.rs", "rank": 45, "score": 5.830725541308322 }, { "content": "mod tests {\n\n use async_std::net::{IpAddr, Ipv4Addr, Shutdown, TcpListener, SocketAddr};\n\n use async_std::prelude::*;\n\n\n\n use rand::{RngCore, SeedableRng, thread_rng};\n\n use rand_chacha::ChaCha8Rng;\n\n\n\n use super::*;\n\n\n\n struct TcpStreams {\n\n initiating_client_clear_stream: TcpStream,\n\n bounce_server_clear_stream: TcpStream,\n\n bounce_server_encrypted_stream: TcpStream,\n\n bounce_client_encrypted_stream: TcpStream,\n\n bounce_client_clear_stream: TcpStream,\n\n final_client_clear_stream: TcpStream\n\n }\n\n\n\n impl Drop for TcpStreams {\n\n fn drop(&mut self) {\n", "file_path": "src/bridge.rs", "rank": 46, "score": 5.533968929584008 }, { "content": "\n\n server_cancelation_token.cancel();\n\n\n\n let err = server_future.await.expect_err(\"Server terminated in error\");\n\n assert_eq!(ErrorKind::Interrupted, err.kind(), \"Unexpected error when the server exits\");\n\n\n\n client_cancelation_token.cancel();\n\n let err = client_future.await.expect_err(\"Client terminated without error\");\n\n assert_eq!(ErrorKind::Interrupted, err.kind(), \"Unexpected error when the client exits\");\n\n }\n\n\n\n async fn write_all(mut stream: TcpStream, buf: Vec<u8>) -> Result<(), Error> {\n\n stream.write_all(&buf).await\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 47, "score": 5.062623643891374 }, { "content": " 'read_loop: loop {\n\n let read_future = bounce_stream.read(&mut buf[read..]);\n\n let r = cancelable.allow_cancel(\n\n read_future,\n\n Err(Error::new(ErrorKind::Interrupted, \"Canceled\"))).await?;\n\n\n\n if r == 0 {\n\n log::error!(\"Connection to bounce server {} ended\", bounce_server);\n\n bounce_stream.shutdown(Shutdown::Write)?;\n\n continue 'client_loop;\n\n }\n\n\n\n read = read + r;\n\n\n\n if read >= connected.len() {\n\n break 'read_loop;\n\n }\n\n }\n\n\n\n if connected != buf {\n", "file_path": "src/client.rs", "rank": 48, "score": 4.999650967377203 }, { "content": "\n\n match adapter_stream.write_all(b\"connected\").await {\n\n Err(err) => {\n\n log::error!(\"Error starting connection: {}\", err);\n\n continue 'adapter_accept;\n\n },\n\n Ok(()) => {}\n\n }\n\n\n\n run_bridge(xors, stream, \"incoming\".to_string(), adapter_stream, \"bounce-outgoing\".to_string());\n\n }\n\n}\n\n\n\nasync fn accept(listener: TcpListener) -> Result<(TcpListener, TcpStream), Error> {\n\n match listener.accept().await {\n\n Ok((s, _)) => Ok((listener, s)),\n\n Err(err) => Err(err)\n\n }\n\n}\n\n\n", "file_path": "src/server.rs", "rank": 49, "score": 4.976075283606169 }, { "content": " };\n\n \n\n // client\n\n run_bridge(\n\n xors,\n\n streams.bounce_client_clear_stream.clone(),\n\n \"bounce_client_clear_stream\".to_string(),\n\n streams.bounce_client_encrypted_stream.clone(),\n\n \"bounce_client_encrypted_stream\".to_string());\n\n\n\n streams\n\n }\n\n\n\n #[async_std::test]\n\n async fn bridge_works() {\n\n let streams = start().await;\n\n\n\n let mut write_stream = &streams.initiating_client_clear_stream;\n\n let mut read_stream = &streams.final_client_clear_stream;\n\n\n", "file_path": "src/bridge.rs", "rank": 50, "score": 4.891972953114815 }, { "content": "\n\n let key_1 = Key {\n\n key: vec![1 as u8, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32],\n\n size: KeySize::KeySize128\n\n };\n\n\n\n let key_2 = Key {\n\n key: vec![2 as u8, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33],\n\n size: KeySize::KeySize128\n\n };\n\n\n\n let (client_stream, server_stream) = get_socket_streams().await;\n\n\n\n let client_authenticate_future = task::spawn(authenticate(key_1, client_stream));\n\n let server_authenticate_future = task::spawn(authenticate(key_2, server_stream));\n\n\n\n let client_authenticate_result = client_authenticate_future.await;\n\n let server_authenticate_result = server_authenticate_future.await;\n\n\n\n match client_authenticate_result {\n", "file_path": "src/auth.rs", "rank": 51, "score": 4.719729306103696 }, { "content": " };\n\n\n\n let mut secret = vec![0u8; key_1.key.len()];\n\n thread_rng().fill_bytes(&mut secret);\n\n\n\n let mut nonce = vec![0u8; key_1.key.len()];\n\n thread_rng().fill_bytes(&mut nonce);\n\n\n\n let encrypted = process(&key_1, &nonce, &secret);\n\n \n\n let decrypted = process(&key_2, &nonce, &encrypted);\n\n assert_ne!(secret, decrypted);\n\n\n\n let decrypted = process(&key_1, &nonce, &encrypted);\n\n assert_eq!(secret, decrypted);\n\n }\n\n}", "file_path": "src/auth.rs", "rank": 52, "score": 4.621841659606011 }, { "content": "\n\n match select(incoming_future, select(peek_future, cancelable.future())).await {\n\n Either::Left((r, _)) => {\n\n let (listener, s) = r?;\n\n incoming_future = task::spawn(accept(listener));\n\n stream = s;\n\n },\n\n Either::Right((select_result, i)) => {\n\n match select_result {\n\n Either::Left((peek_result, _)) => {\n\n incoming_future = i;\n\n match peek_result {\n\n Ok(bytes_sent) => {\n\n let shutdown_result = if bytes_sent > 0 {\n\n log::warn!(\"Adapter stream sent unexpected data: {:?}\", adapter_stream.peer_addr().unwrap());\n\n adapter_stream.shutdown(Shutdown::Both)\n\n } else {\n\n log::info!(\"Adapter stream ended: {:?}\", adapter_stream.peer_addr().unwrap());\n\n adapter_stream.shutdown(Shutdown::Write)\n\n };\n", "file_path": "src/server.rs", "rank": 53, "score": 4.262125004778543 }, { "content": " #[async_std::test]\n\n async fn client_unexpected_write_connection() {\n\n\n\n let (mut adapter_stream, adapter_address, server_future, cancelation_token) = get_adapter_stream_and_server_future().await;\n\n\n\n let buf = [0u8];\n\n adapter_stream.write_all(&buf).await.expect(\"Can not write to client stream\");\n\n\n\n let adapter_stream = TcpStream::connect(adapter_address).await.expect(\"Can not connect to the server\");\n\n adapter_stream.shutdown(Shutdown::Both).expect(\"Can not shut down client stream\");\n\n\n\n cancelation_token.cancel();\n\n let err = server_future.await.expect_err(\"Server should terminate\");\n\n assert_eq!(ErrorKind::Interrupted, err.kind(), \"\");\n\n }\n\n}\n\n\n", "file_path": "src/server.rs", "rank": 54, "score": 3.649906986399014 }, { "content": "use async_std::net::{Shutdown, TcpStream};\n\nuse async_std::prelude::*;\n\nuse async_std::task;\n\nuse async_std::task::JoinHandle;\n\nuse std::io::{ Error, ErrorKind };\n\nuse sync_tokens::cancelation_token::{ CancelationToken, Cancelable };\n\n\n\nuse crate::auth::authenticate;\n\nuse crate::bridge::run_bridge;\n\nuse crate::keys::Key;\n\n\n", "file_path": "src/client.rs", "rank": 55, "score": 3.5605172773040246 }, { "content": "use async_std::net::{IpAddr, Ipv4Addr, Shutdown, TcpListener, TcpStream, SocketAddr};\n\nuse async_std::prelude::*;\n\nuse async_std::task;\n\nuse async_std::task::JoinHandle;\n\nuse std::io::{ Error, ErrorKind };\n\nuse sync_tokens::cancelation_token::{ CancelationToken, Cancelable };\n\nuse sync_tokens::completion_token::{ CompletionToken, Completable };\n\n\n\nuse futures::future::{Either, select};\n\n\n\nuse crate::auth::authenticate;\n\nuse crate::bridge::run_bridge;\n\nuse crate::keys::Key;\n\n\n", "file_path": "src/server.rs", "rank": 56, "score": 3.520926108858293 }, { "content": "\n\n Ok(())\n\n}\n\n\n\nasync fn main_args() -> Result<(), Error> {\n\n let args: Vec<String> = args().collect();\n\n\n\n // Panics are used instead of logging because it's assumed that bounce is being run interactively\n\n\n\n if args.len() < 2 {\n\n panic!(\"Must pass the mode (Server or Client) as the first argument\");\n\n }\n\n\n\n match parse_mode(&args[1]) {\n\n Mode::Server => {\n\n if args.len() != 5 {\n\n panic!(\"Please specify the ports as command-line arguments:\\n\\t bounce server [port] [adapter port] [key]\");\n\n }\n\n \n\n let port = parse_port(&args[2]).unwrap();\n", "file_path": "src/main.rs", "rank": 57, "score": 3.346355247962579 }, { "content": " writer.write_all(&mut buf[..bytes_read]).await?;\n\n\n\n log::trace!(\"Wrote {} bytes to {}\", bytes_read, writer_name);\n\n }\n\n}\n\n\n\nasync fn shutdown_both(\n\n clear_stream: TcpStream,\n\n clear_stream_name: String,\n\n clear_stream_shutdown: Shutdown,\n\n encrypted_stream: TcpStream,\n\n encrypted_stream_name: String,\n\n encrypted_stream_shutdown: Shutdown) {\n\n \n\n let clear_flush_future = task::spawn(shutdown(clear_stream.clone(), clear_stream_name.clone(), clear_stream_shutdown));\n\n let encrypted_flush_future = task::spawn(shutdown(encrypted_stream.clone(), encrypted_stream_name.clone(), encrypted_stream_shutdown));\n\n\n\n join(clear_flush_future, encrypted_flush_future).await;\n\n\n\n log::info!(\"Connection ended: {} <-> {}\", clear_stream_name, encrypted_stream_name);\n", "file_path": "src/bridge.rs", "rank": 58, "score": 3.063967514792751 }, { "content": " println!(\"Bounce\");\n\n\n\n setup_logging();\n\n\n\n let result = match var(\"BOUNCE_MODE\") {\n\n Ok(mode) => main_env(mode).await,\n\n Err(_) => main_args().await\n\n };\n\n\n\n match result {\n\n Ok(()) => {},\n\n Err(err) => log::error!(\"Bounce terminated in error:\\n\\t{}\", err)\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 59, "score": 2.767542076770108 }, { "content": " server_stream.shutdown(Shutdown::Both).expect(\"Can not shut down server stream\");\n\n\n\n let err = client_future.await.expect_err(\"The client should end in error\");\n\n\n\n assert_eq!(err.kind(), ErrorKind::ConnectionRefused);\n\n }\n\n\n\n #[async_std::test]\n\n async fn server_sends_incorrect_token() {\n\n\n\n let (mut server_stream, client_future) = get_server_stream_and_client_future().await;\n\n\n\n server_stream.write_all(b\"xxxxxxxxx\").await.expect(\"Can not send incorrect data\");\n\n\n\n let err = client_future.await.expect_err(\"The client should end in error\");\n\n\n\n assert_eq!(err.kind(), ErrorKind::ConnectionRefused);\n\n\n\n server_stream.shutdown(Shutdown::Write).expect(\"Can not shut down server stream\");\n\n }\n\n}\n\n\n", "file_path": "src/client.rs", "rank": 60, "score": 1.868665338824827 }, { "content": " Either::Left(r) => match r.0 {\n\n Ok(()) => {\n\n shutdown_both(clear_stream, clear_stream_name.clone(), Shutdown::Write, encrypted_stream, encrypted_stream_name.clone(), Shutdown::Both).await;\n\n },\n\n Err(err) => {\n\n shutdown_both(clear_stream, clear_stream_name.clone(), Shutdown::Both, encrypted_stream, encrypted_stream_name.clone(), Shutdown::Both).await;\n\n log::error!(\"{} -> {} ended in error: {}\", clear_stream_name, encrypted_stream_name, err);\n\n }\n\n },\n\n Either::Right(r) => match r.0 {\n\n Ok(()) => {\n\n shutdown_both(encrypted_stream, encrypted_stream_name.clone(), Shutdown::Write, clear_stream, clear_stream_name.clone(), Shutdown::Both).await;\n\n },\n\n Err(err) => {\n\n shutdown_both(encrypted_stream, encrypted_stream_name.clone(), Shutdown::Both, clear_stream, clear_stream_name.clone(), Shutdown::Both).await;\n\n log::error!(\"{} -> {} ended in error: {}\", encrypted_stream_name, clear_stream_name, err);\n\n }\n\n },\n\n };\n\n}\n", "file_path": "src/bridge.rs", "rank": 61, "score": 1.7117846858762922 }, { "content": "\n\n match shutdown_result {\n\n Ok(_) => {},\n\n Err(err) => log::error!(\"Error shutting down adapter stream: {:?}:, {}\", adapter_stream.peer_addr().unwrap(), err)\n\n }\n\n\n\n continue 'adapter_accept;\n\n },\n\n Err(err) => {\n\n log::error!(\"Adapter stream aborted: {}\", err);\n\n continue 'adapter_accept;\n\n }\n\n }\n\n },\n\n Either::Right(_) => return Err(Error::new(ErrorKind::Interrupted, \"Server terminated\"))\n\n }\n\n }\n\n }\n\n\n\n log::info!(\"Incoming clear stream: {:?}\", stream.peer_addr().unwrap());\n", "file_path": "src/server.rs", "rank": 62, "score": 1.7009544400835592 }, { "content": " drop(adapter_listener);\n\n\n\n let (server_future, listening_token, server_cancelation_token) = run_server(client_address.port(), adapter_address.port(), key.clone());\n\n\n\n listening_token.await;\n\n\n\n let listener = TcpListener::bind(socket_addr).await.unwrap();\n\n let (client_future, client_cancelation_token) = run_client(adapter_address.to_string(), listener.local_addr().unwrap().to_string(), key.clone());\n\n\n\n (server_future, client_future, client_address, server_cancelation_token, listener, client_cancelation_token)\n\n }\n\n\n\n #[async_std::test]\n\n async fn happy_path() {\n\n let (server_future, client_future, client_address, server_cancelation_token, listener, client_cancelation_token) = get_server_and_client_futures().await;\n\n\n\n let outgoing_stream = TcpStream::connect(client_address).await.expect(\"Can't connect\");\n\n let (incoming_stream, _) = listener.accept().await.expect(\"Incoming socket didn't come\");\n\n\n\n let mut rng = thread_rng();\n", "file_path": "src/main.rs", "rank": 63, "score": 1.687725174646947 }, { "content": "\n\n // https://stackoverflow.com/a/25577080/1711103\n\n macro_rules! matches(\n\n ($expected:pat, $actual:expr) => (\n\n match $actual {\n\n $expected => (),\n\n _ => panic!(\"Unexpected value\")\n\n }\n\n )\n\n );\n\n\n\n #[test]\n\n fn parse_key_test_256() {\n\n let key = vec![1 as u8, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32];\n\n let key_str = key.to_base64(STANDARD);\n\n\n\n let parsed_key = parse_key(&key_str);\n\n assert_eq!(key, parsed_key.key);\n\n matches!(KeySize::KeySize256, parsed_key.size);\n\n }\n\n}", "file_path": "src/keys.rs", "rank": 64, "score": 1.6708928779480177 }, { "content": " log::error!(\"Bounce server did not initiate the connection correctly\");\n\n bounce_stream.shutdown(Shutdown::Both)?;\n\n continue 'client_loop;\n\n }\n\n\n\n match TcpStream::connect(destination_host.clone()).await {\n\n Err(err) => {\n\n log::error!(\"Can not connect to host \\\"{}\\\": {}\", destination_host, err);\n\n break 'client_loop;\n\n },\n\n Ok(destination_stream) => {\n\n\n\n log::info!(\"Bridging connection\");\n\n\n\n run_bridge(xors, destination_stream, \"outgoing\".to_string(), bounce_stream, \"bounce-incoming\".to_string());\n\n }\n\n } \n\n }\n\n\n\n Ok(())\n", "file_path": "src/client.rs", "rank": 65, "score": 1.6179010411904584 }, { "content": " server_emulate_future.await.unwrap();\n\n \n\n match client_authenticate_future.await {\n\n Ok(_) => panic!(\"Failure not detected\"),\n\n Err(err) => assert_eq!(\"This is not a bounce server or client\", err.to_string())\n\n }\n\n }\n\n\n\n #[async_std::test]\n\n async fn authenticate_short_id() {\n\n\n\n let (key, client_stream, server_stream) = get_key_and_socket_streams().await;\n\n\n\n let client_authenticate_future = task::spawn(authenticate(key.clone(), client_stream));\n\n let server_emulate_future = task::spawn(write_buffer(server_stream.clone(), b\"short\".to_vec()));\n\n\n\n server_emulate_future.await.unwrap();\n\n \n\n match client_authenticate_future.await {\n\n Ok(_) => panic!(\"Failure not detected\"),\n", "file_path": "src/auth.rs", "rank": 66, "score": 1.3045838274285733 }, { "content": " key: vec![1 as u8, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32],\n\n size: KeySize::KeySize256\n\n };\n\n\n\n let socket_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0);\n\n let listener = TcpListener::bind(socket_addr).await.unwrap();\n\n let adapter_listener = TcpListener::bind(socket_addr).await.unwrap();\n\n\n\n let client_address = listener.local_addr().unwrap();\n\n let adapter_address = adapter_listener.local_addr().unwrap();\n\n\n\n drop(listener);\n\n drop(adapter_listener);\n\n\n\n let (server_future, listening_token, cancelation_token) = run_server(client_address.port(), adapter_address.port(), key.clone());\n\n\n\n listening_token.await;\n\n\n\n let adapter_stream = TcpStream::connect(adapter_address).await.expect(\"Can not connect to the server\");\n\n\n", "file_path": "src/server.rs", "rank": 67, "score": 1.2578129628666241 }, { "content": " #[async_std::test]\n\n async fn authenticate_works() {\n\n\n\n let (key, client_stream, server_stream) = get_key_and_socket_streams().await;\n\n\n\n let client_authenticate_future = task::spawn(authenticate(key.clone(), client_stream));\n\n let server_authenticate_future = task::spawn(authenticate(key.clone(), server_stream));\n\n\n\n client_authenticate_future.await.unwrap();\n\n server_authenticate_future.await.unwrap();\n\n }\n\n\n\n #[async_std::test]\n\n async fn authenticate_wrong_id() {\n\n\n\n let (key, client_stream, server_stream) = get_key_and_socket_streams().await;\n\n\n\n let client_authenticate_future = task::spawn(authenticate(key.clone(), client_stream));\n\n let server_emulate_future = task::spawn(write_buffer(server_stream.clone(), b\"boXXce\".to_vec()));\n\n\n", "file_path": "src/auth.rs", "rank": 68, "score": 1.2099637605816667 }, { "content": "\n\n // This is the ongoing task to wait for adapter sockets\n\n let mut incoming_adapter_future = task::spawn(accept(adapter_listener));\n\n\n\n listening_completable.complete(());\n\n\n\n log::info!(\"Bounce server: Listening for incoming connections on {}, accepting adapter on port {}\", port, adapter_port);\n\n \n\n 'adapter_accept: loop {\n\n\n\n let (listener, mut adapter_stream) = cancelable.allow_cancel(incoming_adapter_future, Err(Error::new(ErrorKind::Interrupted, \"Server terminated\"))).await?;\n\n incoming_adapter_future = task::spawn(accept(listener));\n\n\n\n log::info!(\"Incoming adapter stream: {:?}\", adapter_stream.peer_addr().unwrap());\n\n\n\n let xors = match authenticate(key.clone(), adapter_stream.clone()).await {\n\n Err(err) => {\n\n log::error!(\"Bad client: {}\", err);\n\n match adapter_stream.shutdown(Shutdown::Both) {\n\n Err(err) => log::error!(\"Problem shutting down socket after an authentication error: {}\", err),\n", "file_path": "src/server.rs", "rank": 69, "score": 1.1686566822920836 } ]
Rust
src/routes/proxy/img_proxy.rs
Marc3842h/gitarena
c8c53a7331d4ebc12ea69dfbf6220c11c40526cd
use crate::die; use crate::prelude::HttpRequestExtensions; use crate::utils::reqwest_actix_stream::ResponseStream; use actix_web::http::header::CONTENT_LENGTH; use actix_web::{HttpRequest, HttpResponse, Responder, web}; use anyhow::{Context, Result}; use gitarena_macros::route; use log::debug; use reqwest::Client; use serde::Deserialize; use tokio_compat_02::FutureExt; const PASSTHROUGH_HEADERS: [&str; 6] = [ "cache-control", "content-encoding", "etag", "expires", "last-modified", "transfer-encoding" ]; const ACCEPTED_MIME_TYPES: [&str; 43] = [ "image/bmp", "image/cgm", "image/g3fax", "image/gif", "image/ief", "image/jp2", "image/jpeg", "image/jpg", "image/pict", "image/png", "image/prs.btif", "image/svg+xml", "image/tiff", "image/vnd.adobe.photoshop", "image/vnd.djvu", "image/vnd.dwg", "image/vnd.dxf", "image/vnd.fastbidsheet", "image/vnd.fpx", "image/vnd.fst", "image/vnd.fujixerox.edmics-mmr", "image/vnd.fujixerox.edmics-rlc", "image/vnd.microsoft.icon", "image/vnd.ms-modi", "image/vnd.net-fpx", "image/vnd.wap.wbmp", "image/vnd.xiff", "image/webp", "image/x-cmu-raster", "image/x-cmx", "image/x-icon", "image/x-macpaint", "image/x-pcx", "image/x-pict", "image/x-portable-anymap", "image/x-portable-bitmap", "image/x-portable-graymap", "image/x-portable-pixmap", "image/x-quicktime", "image/x-rgb", "image/x-xbitmap", "image/x-xpixmap", "image/x-xwindowdump" ]; #[route("/api/proxy/{url}", method = "GET", err = "text")] pub(crate) async fn proxy(uri: web::Path<ProxyRequest>, request: HttpRequest) -> Result<impl Responder> { let url = &uri.url; if url.is_empty() { die!(NOT_FOUND, "Invalid url"); } let bytes = hex::decode(url)?; let url = String::from_utf8(bytes)?; let mut client = Client::new().get(&url); if let Some(header_value) = request.get_header("if-modified-since") { client = client.header("if-modified-since", header_value); } if let Some(header_value) = request.get_header("if-none-match") { client = client.header("if-none-match", header_value); } if let Some(header_value) = request.get_header("cache-control") { client = client.header("cache-control", header_value); } debug!("Image proxy request for {}", &url); let gateway_response = client.send().compat().await.context("Failed to send request to gateway")?; let mut response = HttpResponse::build(gateway_response.status()); if let Some(length) = gateway_response.content_length() { if length > 5242880 { die!(BAD_GATEWAY, "Content too big"); } response.append_header((CONTENT_LENGTH, length.to_string())); } for (name, value) in gateway_response.headers() { let lowered_name = name.as_str().to_lowercase(); let value_str = value.to_str()?; if PASSTHROUGH_HEADERS.contains(&lowered_name.as_str()) { response.append_header((name.as_str(), value_str)); } if lowered_name == "content-type" && !ACCEPTED_MIME_TYPES.contains(&value_str) { die!(BAD_GATEWAY, "Response was not an image"); } } Ok(response.streaming(ResponseStream { stream: gateway_response.bytes_stream() })) } #[derive(Deserialize)] pub(crate) struct ProxyRequest { pub(crate) url: String }
use crate::die; use crate::prelude::HttpRequestExtensions; use crate::utils::reqwest_actix_stream::ResponseStream; use actix_web::http::header::CONTENT_LENGTH; use actix_web::{HttpRequest, HttpResponse, Responder, web}; use anyhow::{Context, Result}; use gitarena_macros::route; use log::debug; use reqwest::Client; use serde::Deserialize; use tokio_compat_02::FutureExt; const PASSTHROUGH_HEADERS: [&str; 6] = [ "cache-control", "content-encoding", "etag", "expires", "last-modified", "transfer-encoding" ]; const ACCEPTED_MIME_TYPES: [&str; 43] = [ "image/bmp", "image/cgm", "image/g3fax", "image/gif", "image/ief", "image/jp2", "image/jpeg", "image/jpg", "image/pict", "image/png", "image/prs.btif", "image/svg+xml", "image/tiff", "image/vnd.adobe.photoshop", "image/vnd.djvu", "image/vnd.dwg", "image/vnd.dxf", "image/vnd.fastbidsheet", "image/vnd.fpx", "image/vnd.fst", "image/vnd.fujixerox.edmics-mmr", "image/vnd.fujixerox.edmics-rlc", "image/vnd.microsoft.icon", "image/vnd.ms-modi", "image/vnd.net-fpx", "image/vnd.wap.wbmp", "image/vnd.xiff", "image/webp", "image/x-cmu-raster", "image/x-cmx", "image/x-icon", "image/x-macpaint", "image/x-pcx", "image/x-pict", "image/x-portable-anymap", "image/x-portable-bitmap", "image/x-portable-graymap", "image/x-portable-pixmap", "image/x-quicktime", "image/x-rgb", "image/x-xbitmap", "image/x-xpixmap", "image/x-xwindowdump" ]; #[route("/api/proxy/{url}", method = "GET", err = "text")] pub(crate) async fn proxy(uri: web::Path<ProxyRequest>, request: HttpRequest) -> Result<impl Responder> { let url = &uri.url; if url.is_empty() { die!(NOT_FOUND, "Invalid url"); } let bytes = hex::decode(url)?; let url = String::from_utf8(bytes)?; let mut client = Client::new().get(&url); if let Some(header_value) = request.get_header("if-modified-since") { client = client.header("if-modified-since", header_value); } if let Some(header_value) = request.get_header("if-none-match") { client = client.header("if-none-match", header_value); } if let Some(header_value) = request.get_header("cache-control") { client = client.header("cache-control", header_value); } debug!("Image proxy request for {}", &url); let gateway_response = client.send().compat().await.context("Failed to send request to gateway")?; let mut response = HttpResponse::build(gateway_response.status()); if let Some(length) = gateway_response.content_length() { if length > 5242880 { die!(BAD_GATEWAY, "Content too big"); } response.append_header((CONTENT_LENGTH, length.to_string())); } for (name, value) in gateway_response.headers() { let lowered_name = name.as_str().to_lowercase(); let value_str = value.to_str()?; if PASSTHROUGH_HEADERS.contains(&lowered_name.as_str()) { respon
#[derive(Deserialize)] pub(crate) struct ProxyRequest { pub(crate) url: String }
se.append_header((name.as_str(), value_str)); } if lowered_name == "content-type" && !ACCEPTED_MIME_TYPES.contains(&value_str) { die!(BAD_GATEWAY, "Response was not an image"); } } Ok(response.streaming(ResponseStream { stream: gateway_response.bytes_stream() })) }
function_block-function_prefixed
[ { "content": "fn extract_ip(request: &HttpRequest) -> IpNetwork {\n\n let connection_info = request.connection_info();\n\n let ip_str = connection_info.realip_remote_addr().unwrap_or(\"No user agent sent\");\n\n\n\n match IpNetwork::from_str(ip_str) {\n\n Ok(ip_network) => ip_network,\n\n Err(err) => {\n\n // If we got the local address, it includes the port so try again but with port stripped\n\n if let Some((ip, _)) = ip_str.split_once(':') {\n\n IpNetwork::from_str(ip).unwrap_or_else(|err| default_ip_address(Some(err)))\n\n } else {\n\n default_ip_address(Some(err))\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/session.rs", "rank": 0, "score": 125714.74497743486 }, { "content": "fn init_logger() -> Result<WorkerGuard> {\n\n let env_filter = EnvFilter::try_from_default_env().unwrap_or_else(|err| {\n\n let not_found = err.source()\n\n .map(|o| o.downcast_ref::<VarError>().map_or_else(|| false, |err| matches!(err, VarError::NotPresent)))\n\n .unwrap_or(false);\n\n\n\n if !not_found {\n\n eprintln!(\"Warning: Unable to parse `{}` environment variable, using default values: {}\", EnvFilter::DEFAULT_ENV, err);\n\n }\n\n\n\n let level = if cfg!(debug_assertions) {\n\n LevelFilter::DEBUG\n\n } else {\n\n LevelFilter::INFO\n\n };\n\n\n\n EnvFilter::default()\n\n .add_directive(level.into())\n\n .add_directive(\"askalono=warn\".parse().unwrap_or_log())\n\n .add_directive(\"globset=info\".parse().unwrap_or_log())\n", "file_path": "src/main.rs", "rank": 1, "score": 105225.97832783294 }, { "content": "#[cfg(not(windows))]\n\n#[inline]\n\nfn internal_is_fs_legal(_: &str) -> bool {\n\n true\n\n}\n", "file_path": "src/utils/identifiers.rs", "rank": 2, "score": 99624.46976929058 }, { "content": "#[cfg(windows)]\n\nfn internal_is_fs_legal(input: &str) -> bool {\n\n const ILLEGAL_FILENAMES: [&str; 25] = [\n\n \"con\", \"prn\", \"aux\", \"nul\", \"lst\",\n\n \"com0\", \"com1\", \"com2\", \"com3\", \"com4\", \"com5\", \"com6\", \"com7\", \"com8\", \"com9\",\n\n \"lpt0\", \"lpt1\", \"lpt2\", \"lpt3\", \"lpt4\", \"lpt5\", \"lpt6\", \"lpt7\", \"lpt8\", \"lpt9\"\n\n ];\n\n\n\n // Strip the extension if one exists (as Windows ignores them as well)\n\n let lowercase = if let Some((file_name, _)) = input.rsplit_once('.') {\n\n file_name\n\n } else {\n\n input\n\n }.to_lowercase(); // These invalid file names are valid for both cases\n\n\n\n !ILLEGAL_FILENAMES.contains(&lowercase.as_str())\n\n}\n\n\n", "file_path": "src/utils/identifiers.rs", "rank": 3, "score": 97703.70830889847 }, { "content": "fn default_ip_address<E: Error>(err: Option<E>) -> IpNetwork {\n\n if let Some(error) = err {\n\n warn!(\"Unable to parse ip address: {}\", error);\n\n }\n\n\n\n // 100::/64 is a valid, reserved black hole IPv6 address block: https://en.wikipedia.org/wiki/Reserved_IP_addresses#IPv6\n\n const RESERVED_IP: Ipv6Addr = Ipv6Addr::new(0x100, 0, 0, 0, 0, 0, 0, 0);\n\n\n\n Ipv6Network::new(RESERVED_IP, 64).unwrap_or_log().into()\n\n}\n", "file_path": "src/session.rs", "rank": 4, "score": 85193.68076087149 }, { "content": "fn u16_to_hex(value: u16) -> [u8; 4] {\n\n let mut buffer = [0u8; 4];\n\n hex::encode_to_slice((value as u16).to_be_bytes(), &mut buffer).unwrap_or_log();\n\n buffer\n\n}\n", "file_path": "src/git/io/writer.rs", "rank": 5, "score": 78447.61019616158 }, { "content": "fn heim_size_to_bytes(info: Information) -> usize {\n\n info.get::<information::byte>() as usize\n\n}\n", "file_path": "src/routes/admin/dashboard.rs", "rank": 6, "score": 78025.43675542154 }, { "content": "fn main() {\n\n println!(\"GitArena does currently not provide SSH access.\");\n\n\n\n exit(1);\n\n}\n", "file_path": "gitarena-ssh/src/main.rs", "rank": 7, "score": 58743.17115376743 }, { "content": "fn init_askalono() -> Store {\n\n let file = File::open(\"askalono-cache.bin.zstd\").expect_or_log(\"Failed to open askalono cache file\");\n\n\n\n Store::from_cache(file).expect_or_log(\"Failed to parse askalono cache file\")\n\n}\n\n\n\npub(crate) const fn license_file_names() -> [&'static [u8]; 18] {\n\n [\n\n b\"copying\", b\"copyright\", b\"eula\", b\"license\", b\"notice\", b\"patents\", b\"unlicense\", b\"agpl\", b\"gpl\",\n\n b\"lgpl\", b\"apache-\", b\"bsd-\", b\"cc-by-\", b\"gfdl-\", b\"gnu-\", b\"mit-\", b\"mpl-\", b\"ofl-\"\n\n ]\n\n}\n", "file_path": "src/licenses.rs", "rank": 8, "score": 56170.195351018214 }, { "content": "fn init_tera() -> Tera {\n\n let mut tera = match Tera::new(\"templates/html/**/*\") {\n\n Ok(tera) => tera,\n\n Err(err) => panic!(\"{}\", err)\n\n };\n\n\n\n tera.register_filter(\"human_prefix\", filters::human_prefix);\n\n tera.register_filter(\"human_time\", filters::human_time);\n\n\n\n tera.register_tester(\"empty\", tests::empty);\n\n tera.register_tester(\"none\", tests::none);\n\n tera.register_tester(\"some\", tests::some);\n\n\n\n tera\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! template_context {\n\n ($input:expr) => {\n\n Some($input.iter().cloned().collect())\n", "file_path": "src/templates/mod.rs", "rank": 9, "score": 55013.89599272496 }, { "content": "fn parse_template(template_path: String) -> Template {\n\n match plain::parse(template_path) {\n\n Ok(template) => template,\n\n Err(err) => panic!(\"Failed to parse template: {}\", err)\n\n }\n\n}\n\n\n", "file_path": "src/templates/mod.rs", "rank": 10, "score": 48215.47659178794 }, { "content": "fn format_heim_time(time: Time) -> String {\n\n let duration = Duration::seconds(time.get::<heim::units::time::second>() as i64);\n\n let human_time = HumanTime::from(duration);\n\n\n\n human_time.to_text_en(Accuracy::Rough, Tense::Present)\n\n}\n\n\n", "file_path": "src/routes/admin/dashboard.rs", "rank": 11, "score": 47313.02867227541 }, { "content": "fn query_string_without_offset(input: &QString) -> String {\n\n input.to_pairs()\n\n .iter()\n\n .filter(|(key, _)| key != &\"offset\")\n\n .map(|(key, value)| format!(\"{}={}\", key, value))\n\n .collect::<Vec<String>>()\n\n .join(\"&\")\n\n}\n", "file_path": "src/routes/explore.rs", "rank": 12, "score": 46460.96281065082 }, { "content": "#[proc_macro]\n\npub fn from_config(input: TokenStream) -> TokenStream {\n\n internal_from_config(input)\n\n}\n\n\n", "file_path": "gitarena-macros/src/lib.rs", "rank": 18, "score": 45059.40033114758 }, { "content": "/// Transforms routes which are only a \"/\" to an empty string. This allows scoped routes to have index\n\n/// pages without having to declare their route with a literal empty string (which is quite confusing).\n\nfn sanitize_first_argument(literal: &Lit) -> Option<NestedMeta> {\n\n if let Lit::Str(str) = literal {\n\n let value = str.value();\n\n\n\n if value.is_empty() {\n\n emit_error! {\n\n str.span(),\n\n \"route cannot be empty\";\n\n help = \"if you want to match on index, use \\\"/\\\"\"\n\n }\n\n } else if value == \"/\" {\n\n return Some(NestedMeta::Lit(Lit::Str(LitStr::new(\"\", str.span()))));\n\n }\n\n }\n\n\n\n None\n\n}\n", "file_path": "gitarena-macros/src/route.rs", "rank": 21, "score": 44253.61199122056 }, { "content": "#[proc_macro]\n\npub fn from_optional_config(input: TokenStream) -> TokenStream {\n\n internal_from_optional_config(input)\n\n}\n", "file_path": "gitarena-macros/src/lib.rs", "rank": 22, "score": 44253.61199122056 }, { "content": "fn match_error_type(input: &Lit) -> Option<ErrorDisplayType> {\n\n if let Lit::Str(str) = input {\n\n let value = str.value().to_lowercase();\n\n\n\n return match value.as_str() {\n\n \"html\" => Some(ErrorDisplayType::Html),\n\n \"json\" => Some(ErrorDisplayType::Json),\n\n \"git\" => Some(ErrorDisplayType::Git),\n\n \"text\" | \"plain\" => Some(ErrorDisplayType::Plain),\n\n \"htmx!\" => Some(ErrorDisplayType::Htmx(Box::new(ErrorDisplayType::Unset))),\n\n \"htmx+html\" => Some(ErrorDisplayType::Htmx(Box::new(ErrorDisplayType::Html))),\n\n \"htmx+json\" => Some(ErrorDisplayType::Htmx(Box::new(ErrorDisplayType::Json))),\n\n \"htmx+git\" => Some(ErrorDisplayType::Htmx(Box::new(ErrorDisplayType::Git))),\n\n \"htmx+text\" | \"htmx+plain\" => Some(ErrorDisplayType::Htmx(Box::new(ErrorDisplayType::Plain))),\n\n \"htmx\" => {\n\n emit_error! {\n\n input.span(),\n\n \"htmx error handler requires fallback\";\n\n help = \"if this can never happen, define err as \\\"htmx!\\\" (dangerous!)\"\n\n }\n", "file_path": "gitarena-macros/src/route.rs", "rank": 23, "score": 43490.43070160739 }, { "content": "#[proc_macro_attribute]\n\n#[proc_macro_error]\n\npub fn route(args: TokenStream, input: TokenStream) -> TokenStream {\n\n internal_route(args, input)\n\n}\n\n\n", "file_path": "gitarena-macros/src/lib.rs", "rank": 24, "score": 40565.03722146187 }, { "content": "use chrono::{DateTime, FixedOffset};\n\nuse serde::Serialize;\n\n\n\n#[derive(Serialize)]\n\npub(crate) struct RepoFile<'a> {\n\n pub(crate) file_type: u16,\n\n pub(crate) file_name: &'a str,\n\n pub(crate) commit: GitCommit,\n\n pub(crate) submodule_target_oid: Option<String>,\n\n}\n\n\n\n#[derive(Serialize)]\n\npub(crate) struct RepoReadme<'a> {\n\n pub(crate) file_name: &'a str,\n\n pub(crate) content: &'a str\n\n}\n\n\n\n#[derive(Serialize)]\n\npub(crate) struct GitCommit {\n\n pub(crate) oid: String,\n", "file_path": "src/templates/web.rs", "rank": 25, "score": 38453.927227495755 }, { "content": " pub(crate) message: String,\n\n\n\n pub(crate) time: i64, // Unix timestamp\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub(crate) date: Option<DateTime<FixedOffset>>,\n\n\n\n pub(crate) author_name: String,\n\n pub(crate) author_uid: Option<i32>,\n\n pub(crate) author_email: String\n\n}\n", "file_path": "src/templates/web.rs", "rank": 26, "score": 38442.38372293303 }, { "content": "use anyhow::Result;\n\nuse async_trait::async_trait;\n\nuse serde::de::DeserializeOwned;\n\nuse serde_json::{Map, Value};\n\n\n\n#[async_trait]\n\npub(crate) trait OAuthRequest<T: DeserializeOwned = SerdeMap> {\n\n async fn request_data(endpoint: &'static str, token: &str) -> Result<T>;\n\n}\n\n\n\npub(crate) type SerdeMap = Map<String, Value>;\n", "file_path": "src/sso/oauth_request.rs", "rank": 27, "score": 37130.70103202549 }, { "content": "use actix_web::web::ServiceConfig;\n\n\n\npub(crate) mod img_proxy;\n\n\n\npub(crate) fn init(config: &mut ServiceConfig) {\n\n config.service(img_proxy::proxy);\n\n}\n", "file_path": "src/routes/proxy/mod.rs", "rank": 28, "score": 37036.99647866374 }, { "content": "create type type_constraint as enum ('boolean', 'char', 'int', 'string', 'bytes');\n\n\n", "file_path": "schema.sql", "rank": 29, "score": 30712.40808314614 }, { "content": "use crate::die;\n\nuse crate::git::basic_auth;\n\nuse crate::git::capabilities::capabilities;\n\nuse crate::git::ls_refs::ls_refs_all;\n\nuse crate::prelude::*;\n\nuse crate::repository::Repository;\n\nuse crate::routes::repository::GitRequest;\n\n\n\nuse actix_web::http::header::CONTENT_TYPE;\n\nuse actix_web::{Either, HttpRequest, HttpResponse, Responder, web};\n\nuse anyhow::Result;\n\nuse gitarena_macros::route;\n\nuse sqlx::{Executor, PgPool, Pool, Postgres};\n\n\n\n#[route(\"/{username}/{repository}.git/info/refs\", method = \"GET\", err = \"text\")]\n\npub(crate) async fn info_refs(uri: web::Path<GitRequest>, request: HttpRequest, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n let query_string = request.q_string();\n\n\n\n let service = match query_string.get(\"service\") {\n\n Some(value) => value.trim(),\n", "file_path": "src/routes/repository/git/info_refs.rs", "rank": 30, "score": 39.00436356810795 }, { "content": "use sqlx::PgPool;\n\n\n\n#[route(\"/sso/{service}\", method = \"GET\", err = \"html\")]\n\npub(crate) async fn initiate_sso(sso_request: web::Path<SSORequest>, web_user: WebUser, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n if matches!(web_user, WebUser::Authenticated(_)) {\n\n die!(UNAUTHORIZED, \"Already logged in\");\n\n }\n\n\n\n let provider = SSOProviderType::from_str(sso_request.service.as_str())\n\n .map_err(|_| err!(BAD_REQUEST, \"Unknown service\"))?;\n\n let provider_impl = provider.get_implementation();\n\n\n\n // TODO: Save token in cache to check for CSRF\n\n let (url, _token) = SSOProvider::generate_auth_url(provider_impl.deref(), &provider, &db_pool).await?;\n\n\n\n Ok(HttpResponse::TemporaryRedirect().append_header((LOCATION, url.to_string())).finish())\n\n}\n\n\n\n#[route(\"/sso/{service}/callback\", method = \"GET\", err = \"html\")]\n\npub(crate) async fn sso_callback(sso_request: web::Path<SSORequest>, id: Identity, request: HttpRequest, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n", "file_path": "src/routes/user/sso.rs", "rank": 31, "score": 38.313153480533444 }, { "content": "use crate::prelude::HttpRequestExtensions;\n\nuse crate::privileges::privilege;\n\nuse crate::repository::Repository;\n\nuse crate::routes::repository::GitRequest;\n\nuse crate::user::{User, WebUser};\n\nuse crate::{die, err};\n\n\n\nuse actix_web::{HttpRequest, HttpResponse, Responder, web};\n\nuse anyhow::Result;\n\nuse gitarena_macros::route;\n\nuse log::debug;\n\nuse serde_json::json;\n\nuse sqlx::{Executor, PgPool, Postgres};\n\n\n\n#[route(\"/api/repo/{username}/{repository}/star\", method = \"GET\", err = \"htmx+json\")]\n\npub(crate) async fn get_star(uri: web::Path<GitRequest>, web_user: WebUser, request: HttpRequest, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n let mut transaction = db_pool.begin().await?;\n\n\n\n let repo_owner = User::find_using_name(&uri.username, &mut transaction).await.ok_or_else(|| err!(NOT_FOUND, \"Repository not found\"))?;\n\n let repo = Repository::open(&repo_owner, &uri.repository, &mut transaction).await.ok_or_else(|| err!(NOT_FOUND, \"Repository not found\"))?;\n", "file_path": "src/routes/repository/api/star.rs", "rank": 32, "score": 36.701037492058965 }, { "content": "use serde::Deserialize;\n\nuse sqlx::PgPool;\n\nuse tokio_compat_02::FutureExt;\n\n\n\n#[route(\"/api/avatar/{user_id}\", method = \"GET\", err = \"text\")]\n\npub(crate) async fn get_avatar(avatar_request: web::Path<AvatarRequest>, request: HttpRequest, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n let (gravatar_enabled, avatars_dir): (bool, String) = from_config!(\n\n \"avatars.gravatar\" => bool,\n\n \"avatars.dir\" => String\n\n );\n\n\n\n let query_string = request.q_string();\n\n\n\n if !query_string.has(\"override\") {\n\n let path_str = format!(\"{}/{}.jpg\", avatars_dir, avatar_request.user_id);\n\n let path = Path::new(path_str.as_str());\n\n\n\n // User has set an avatar, return it\n\n if path.is_file() {\n\n return Ok(send_image(path, &request).await.context(\"Failed to read local image file\")?);\n", "file_path": "src/routes/user/avatar.rs", "rank": 33, "score": 36.621980874453044 }, { "content": "use crate::git::GitoxideCacheList;\n\nuse crate::git::utils::{read_blob_content, repo_files_at_ref};\n\nuse crate::privileges::privilege;\n\nuse crate::repository::Repository;\n\nuse crate::routes::repository::GitTreeRequest;\n\nuse crate::user::{User, WebUser};\n\nuse crate::{die, err};\n\n\n\nuse actix_web::{HttpResponse, Responder, web};\n\nuse anyhow::Result;\n\nuse bstr::ByteSlice;\n\nuse git_repository::objs::Tree;\n\nuse git_repository::refs::file::find::existing::Error as GitoxideFindError;\n\nuse gitarena_macros::route;\n\nuse serde_json::json;\n\nuse sqlx::PgPool;\n\n\n\n#[route(\"/api/repo/{username}/{repository}/tree/{tree:.*}/readme\", method = \"GET\", err = \"json\")]\n\npub(crate) async fn readme(uri: web::Path<GitTreeRequest>, web_user: WebUser, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n let mut transaction = db_pool.begin().await?;\n", "file_path": "src/routes/repository/api/repo_readme.rs", "rank": 34, "score": 35.38478524216688 }, { "content": "use git_repository::protocol::transport::packetline::{PacketLineRef, StreamingPeekableIter};\n\nuse gitarena_macros::route;\n\nuse log::warn;\n\nuse memmem::{Searcher, TwoWaySearcher};\n\nuse sqlx::PgPool;\n\n\n\n#[route(\"/{username}/{repository}.git/git-receive-pack\", method = \"POST\", err = \"git\")]\n\npub(crate) async fn git_receive_pack(uri: web::Path<GitRequest>, mut body: web::Payload, request: HttpRequest, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n let content_type = request.get_header(\"content-type\").unwrap_or_default();\n\n let accept_header = request.get_header(\"accept\").unwrap_or_default();\n\n\n\n if content_type != \"application/x-git-receive-pack-request\" || accept_header != \"application/x-git-receive-pack-result\" {\n\n die!(BAD_REQUEST);\n\n }\n\n\n\n let mut transaction = db_pool.begin().await?;\n\n\n\n let user_option: Option<(i32,)> = sqlx::query_as(\"select id from users where lower(username) = lower($1) limit 1\")\n\n .bind(&uri.username)\n\n .fetch_optional(&mut transaction)\n", "file_path": "src/routes/repository/git/git_receive_pack.rs", "rank": 35, "score": 34.654206160190896 }, { "content": "use crate::config::{get_optional_setting, get_setting};\n\nuse crate::prelude::*;\n\nuse crate::session::Session;\n\nuse crate::user::{User, WebUser};\n\nuse crate::utils::identifiers::{is_username_taken, validate_username};\n\nuse crate::verification::send_verification_mail;\n\nuse crate::{captcha, crypto, die, render_template};\n\n\n\nuse actix_identity::Identity;\n\nuse actix_web::{HttpRequest, HttpResponse, Responder, web};\n\nuse anyhow::Result;\n\nuse gitarena_macros::route;\n\nuse log::info;\n\nuse serde::{Deserialize, Serialize};\n\nuse sqlx::PgPool;\n\nuse tera::Context;\n\n\n\n#[route(\"/register\", method = \"GET\", err = \"html\")]\n\npub(crate) async fn get_register(web_user: WebUser, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n let mut transaction = db_pool.begin().await?;\n", "file_path": "src/routes/user/user_create.rs", "rank": 36, "score": 34.60775766365179 }, { "content": "use crate::git::history::{all_branches, all_commits, all_tags};\n\nuse crate::prelude::*;\n\nuse crate::privileges::privilege;\n\nuse crate::repository::Repository;\n\nuse crate::routes::repository::GitTreeRequest;\n\nuse crate::templates::web::GitCommit;\n\nuse crate::user::{User, WebUser};\n\nuse crate::{die, err, render_template};\n\n\n\nuse actix_web::{HttpRequest, Responder, web};\n\nuse anyhow::Result;\n\nuse bstr::ByteSlice;\n\nuse git_repository::refs::file::find::existing::Error as GitoxideFindError;\n\nuse gitarena_macros::route;\n\nuse sqlx::PgPool;\n\nuse tera::Context;\n\n\n\n#[route(\"/{username}/{repository}/tree/{tree:.*}/commits\", method = \"GET\", err = \"htmx+html\")]\n\npub(crate) async fn commits(uri: web::Path<GitTreeRequest>, web_user: WebUser, request: HttpRequest, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n let mut transaction = db_pool.begin().await?;\n", "file_path": "src/routes/repository/commits.rs", "rank": 37, "score": 34.55526589515874 }, { "content": " let repo = Repository::open(repo_owner, &uri.repository, &mut transaction).await.ok_or_else(|| err!(NOT_FOUND, \"Repository not found\"))?;\n\n\n\n render(Some(uri.tree.as_str()), repo, &uri.username, web_user, transaction).await\n\n}\n\n\n\n#[route(\"/{username}/{repository}\", method = \"GET\", err = \"html\")]\n\npub(crate) async fn view_repo(uri: web::Path<GitRequest>, web_user: WebUser, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n let mut transaction = db_pool.begin().await?;\n\n\n\n let repo_owner = User::find_using_name(&uri.username, &mut transaction).await.ok_or_else(|| err!(NOT_FOUND, \"Repository not found\"))?;\n\n let repo = Repository::open(repo_owner, &uri.repository, &mut transaction).await.ok_or_else(|| err!(NOT_FOUND, \"Repository not found\"))?;\n\n\n\n render(None, repo, &uri.username, web_user, transaction).await\n\n}\n", "file_path": "src/routes/repository/repo_view.rs", "rank": 38, "score": 33.92387723376152 }, { "content": "use crate::die;\n\n\n\nuse actix_web::{Responder, web};\n\nuse anyhow::Result;\n\nuse gitarena_macros::route;\n\nuse log::info;\n\nuse serde::Deserialize;\n\nuse serde_json::json;\n\nuse sqlx::PgPool;\n\nuse tracing_unwrap::OptionExt;\n\n\n\n#[route(\"/api/verify/{token}\", method = \"GET\", err = \"html\")]\n\npub(crate) async fn verify(verify_request: web::Path<VerifyRequest>, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n let token = &verify_request.token;\n\n\n\n if token.len() != 32 || !token.chars().all(|c| c.is_ascii_hexdigit()) {\n\n die!(BAD_REQUEST, \"Token is illegal\");\n\n }\n\n\n\n let mut transaction = db_pool.begin().await?;\n", "file_path": "src/routes/user/user_verify.rs", "rank": 39, "score": 33.893532544047865 }, { "content": "use crate::privileges::privilege;\n\nuse crate::repository::Repository;\n\nuse crate::routes::repository::GitRequest;\n\nuse crate::user::WebUser;\n\nuse crate::{die, err};\n\n\n\nuse actix_web::{HttpResponse, Responder, web};\n\nuse anyhow::Result;\n\nuse gitarena_macros::route;\n\nuse sqlx::PgPool;\n\n\n\n#[route(\"/api/repo/{username}/{repository}\", method = \"GET\", err = \"json\")]\n\npub(crate) async fn meta(uri: web::Path<GitRequest>, web_user: WebUser, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n let mut transaction = db_pool.begin().await?;\n\n\n\n let (user_id,): (i32,) = sqlx::query_as(\"select id from users where lower(username) = lower($1) limit 1\")\n\n .bind(&uri.username)\n\n .fetch_optional(&mut transaction)\n\n .await?\n\n .ok_or_else(|| err!(NOT_FOUND, \"Not found\"))?;\n", "file_path": "src/routes/repository/api/repo_meta.rs", "rank": 40, "score": 33.8378224154106 }, { "content": " let avatars_dir: String = from_config!(\"avatars.dir\" => String);\n\n\n\n let mut field = match payload.try_next().await {\n\n Ok(Some(field)) => field,\n\n Ok(None) => die!(BAD_REQUEST, \"No multipart field found\"),\n\n Err(err) => return Err(err.into())\n\n };\n\n\n\n let content_disposition = field.content_disposition();\n\n let file_name = content_disposition.get_filename().ok_or_else(|| err!(BAD_REQUEST, \"No file name\"))?;\n\n let extension = file_name.rsplit_once('.')\n\n .map(|(_, ext)| ext.to_owned())\n\n .ok_or_else(|| err!(BAD_REQUEST, \"Invalid file name\"))?;\n\n\n\n let mut bytes = web::BytesMut::new();\n\n\n\n while let Some(chunk) = field.try_next().await.context(\"Failed to read multipart data chunk\")? {\n\n bytes.extend_from_slice(chunk.as_ref());\n\n }\n\n\n", "file_path": "src/routes/user/avatar.rs", "rank": 41, "score": 33.53625324785531 }, { "content": "use crate::config::get_optional_setting;\n\nuse crate::die;\n\nuse crate::privileges::repo_visibility::RepoVisibility;\n\nuse crate::repository::Repository;\n\nuse crate::user::WebUser;\n\nuse crate::utils::identifiers::{is_fs_legal, is_reserved_repo_name, is_valid};\n\n\n\nuse actix_web::{HttpResponse, Responder, web};\n\nuse sqlx::PgPool;\n\nuse anyhow::Result;\n\nuse gitarena_macros::route;\n\nuse serde::{Deserialize, Serialize};\n\nuse log::info;\n\n\n\n#[route(\"/api/repo\", method = \"POST\", err = \"json\")]\n\npub(crate) async fn create(web_user: WebUser, body: web::Json<CreateJsonRequest>, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n let mut transaction = db_pool.begin().await?;\n\n\n\n let user = web_user.into_user()?;\n\n\n", "file_path": "src/routes/repository/api/create_repo.rs", "rank": 42, "score": 33.44516468384678 }, { "content": " // TODO: Maybe generate own identicons? -> There are crates for this\n\n\n\n let path_str = format!(\"{}/default.jpg\", avatars_dir);\n\n let path = Path::new(path_str.as_str());\n\n\n\n Ok(send_image(path, &request).await.context(\"Failed to read default avatar file\")?)\n\n}\n\n\n\n#[route(\"/api/avatar\", method = \"PUT\", err = \"text\")]\n\npub(crate) async fn put_avatar(web_user: WebUser, mut payload: Multipart, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n if matches!(web_user, WebUser::Anonymous) {\n\n die!(UNAUTHORIZED, \"No logged in\");\n\n }\n\n\n\n let user = web_user.into_user()?;\n\n\n\n if user.disabled {\n\n die!(FORBIDDEN, \"User is disabled\");\n\n }\n\n\n", "file_path": "src/routes/user/avatar.rs", "rank": 43, "score": 33.083824135890886 }, { "content": "\n\n if let Some(header_value) = request.get_header(\"if-modified-since\") {\n\n client = client.header(\"if-modified-since\", header_value);\n\n }\n\n\n\n let gateway_response = client.send().compat().await.context(\"Failed to send request to Gravatar\")?;\n\n let mut response = HttpResponse::build(gateway_response.status());\n\n\n\n let headers = gateway_response.headers();\n\n\n\n if let Some(cache_control) = headers.get(\"cache-control\") {\n\n response.append_header((CACHE_CONTROL, cache_control.to_str()?));\n\n }\n\n\n\n if let Some(last_modified) = headers.get(\"last-modified\") {\n\n response.append_header((LAST_MODIFIED, last_modified.to_str()?));\n\n }\n\n\n\n Ok(response.streaming(ResponseStream {\n\n stream: gateway_response.bytes_stream()\n\n }))\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub(crate) struct AvatarRequest {\n\n user_id: i32\n\n}\n", "file_path": "src/routes/user/avatar.rs", "rank": 44, "score": 32.985915155305726 }, { "content": "\n\n context.try_insert(\"settings\", &settings)?;\n\n\n\n render_template!(\"admin/settings.html\", context, transaction)\n\n}\n\n\n\n#[route(\"/settings\", method = \"PATCH\", err = \"htmx+text\")]\n\npub(crate) async fn patch_settings(data: web::Form<HashMap<String, String>>, web_user: WebUser, request: HttpRequest, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n let user = web_user.into_user()?;\n\n\n\n if !user.admin {\n\n die!(FORBIDDEN, \"Not allowed\");\n\n }\n\n\n\n let mut transaction = db_pool.begin().await?;\n\n let once = Once::new();\n\n\n\n for (key, value) in data.iter() {\n\n let setting = sqlx::query_as::<_, Setting>(\"select * from settings where key = $1 limit 1\")\n\n .bind(key.as_str())\n", "file_path": "src/routes/admin/settings.rs", "rank": 45, "score": 32.4542739827296 }, { "content": "use crate::config::{Setting, TypeConstraint};\n\nuse crate::prelude::HttpRequestExtensions;\n\nuse crate::user::WebUser;\n\nuse crate::{config, die, err, render_template};\n\n\n\nuse std::collections::HashMap;\n\nuse std::sync::Once;\n\n\n\nuse actix_web::{HttpRequest, HttpResponse, Responder, web};\n\nuse anyhow::{Context as _, Result};\n\nuse gitarena_macros::route;\n\nuse multimap::MultiMap;\n\nuse sqlx::PgPool;\n\nuse tera::Context;\n\n\n\n#[route(\"/settings\", method = \"GET\", err = \"html\")]\n\npub(crate) async fn get_settings(web_user: WebUser, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n let user = web_user.into_user()?;\n\n\n\n if !user.admin {\n", "file_path": "src/routes/admin/settings.rs", "rank": 46, "score": 32.40594396783404 }, { "content": "use crate::prelude::{ContextExtensions, HttpRequestExtensions};\n\nuse crate::privileges::repo_visibility::RepoVisibility;\n\nuse crate::user::WebUser;\n\nuse crate::{err, render_template};\n\n\n\nuse std::fmt::{Display, Formatter, Result as FmtResult};\n\n\n\nuse actix_web::{HttpRequest, Responder, web};\n\nuse anyhow::Result;\n\nuse derive_more::Display;\n\nuse gitarena_macros::route;\n\nuse qstring::QString;\n\nuse serde::{Deserialize, Serialize};\n\nuse sqlx::FromRow;\n\nuse sqlx::{Executor, PgPool, Postgres};\n\nuse tera::Context;\n\n\n\n#[route(\"/explore\", method = \"GET\", err = \"htmx+html\")]\n\npub(crate) async fn explore(web_user: WebUser, request: HttpRequest, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n let query_string = request.q_string();\n", "file_path": "src/routes/explore.rs", "rank": 47, "score": 32.31184147716054 }, { "content": "\n\n let sorting = query_string.get(\"sort\").unwrap_or(\"stars_desc\");\n\n let (sort_method, order) = Order::parse(sorting).ok_or_else(|| err!(BAD_REQUEST, \"Invalid order\"))?;\n\n let htmx_request = request.get_header(\"hx-request\").is_some();\n\n let options = ExploreOptions::parse(&query_string, &web_user, sort_method, order, htmx_request);\n\n\n\n let mut transaction = db_pool.begin().await?;\n\n let mut context = Context::new();\n\n\n\n context.insert_web_user(&web_user)?;\n\n\n\n context.try_insert(\"repositories\", &get_repositories(&options, &mut transaction).await?)?;\n\n context.try_insert(\"options\", &options)?;\n\n context.try_insert(\"query_string\", query_string_without_offset(&query_string).as_str())?;\n\n\n\n // Only send a partial result (only the component) if it's a request by htmx\n\n if options.htmx_request {\n\n return render_template!(\"explore_list_component.html\", context, transaction);\n\n }\n\n\n", "file_path": "src/routes/explore.rs", "rank": 48, "score": 32.04358742348846 }, { "content": " let (author_name, author_uid, author_email) = last_commit.author().try_disassemble(&mut transaction).await;\n\n\n\n context.try_insert(\"last_commit\", &GitCommit {\n\n oid: format!(\"{}\", last_commit_oid),\n\n message: last_commit.message().unwrap_or_default().to_owned(),\n\n time: last_commit.time().seconds(),\n\n date: None,\n\n author_name,\n\n author_uid,\n\n author_email\n\n })?;\n\n\n\n render_template!(\"repo/index.html\", context, transaction)\n\n}\n\n\n\n#[route(\"/{username}/{repository}/tree/{tree:.*}\", method = \"GET\", err = \"html\")]\n\npub(crate) async fn view_repo_tree(uri: web::Path<GitTreeRequest>, web_user: WebUser, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n let mut transaction = db_pool.begin().await?;\n\n\n\n let repo_owner = User::find_using_name(&uri.username, &mut transaction).await.ok_or_else(|| err!(NOT_FOUND, \"Repository not found\"))?;\n", "file_path": "src/routes/repository/repo_view.rs", "rank": 49, "score": 31.2522742526641 }, { "content": " die!(NOT_FOUND, \"Not found\");\n\n }\n\n\n\n if !has_star(&user, &repo, &mut transaction).await? {\n\n die!(CONFLICT, \"Not starred\");\n\n }\n\n\n\n remove_star(&user, &repo, &mut transaction).await?;\n\n\n\n transaction.commit().await?;\n\n\n\n Ok(HttpResponse::NoContent().finish())\n\n}\n\n\n\n#[route(\"/api/repo/{username}/{repository}/star\", method = \"PUT\", err = \"text\")]\n\npub(crate) async fn put_star(uri: web::Path<GitRequest>, web_user: WebUser, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n let user = web_user.into_user()?;\n\n\n\n let mut transaction = db_pool.begin().await?;\n\n\n", "file_path": "src/routes/repository/api/star.rs", "rank": 50, "score": 31.197255959485695 }, { "content": "use crate::mail::Email;\n\nuse crate::render_template;\n\nuse crate::session::Session;\n\nuse crate::user::{User, WebUser};\n\nuse crate::{crypto, die, err};\n\n\n\nuse actix_identity::Identity;\n\nuse actix_web::http::header::LOCATION;\n\nuse actix_web::http::StatusCode;\n\nuse actix_web::{HttpRequest, HttpResponse, Responder, web};\n\nuse anyhow::Result;\n\nuse gitarena_macros::{from_config, route};\n\nuse serde::Deserialize;\n\nuse sqlx::PgPool;\n\nuse tera::Context;\n\nuse tracing_unwrap::OptionExt;\n\nuse log::debug;\n\n\n\n#[route(\"/login\", method = \"GET\", err = \"html\")]\n\npub(crate) async fn get_login(web_user: WebUser, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n", "file_path": "src/routes/user/user_login.rs", "rank": 51, "score": 30.96134523486527 }, { "content": "use crate::die;\n\nuse crate::prelude::HttpRequestExtensions;\n\nuse crate::session::Session;\n\n\n\nuse actix_identity::Identity;\n\nuse actix_web::http::header::LOCATION;\n\nuse actix_web::{HttpRequest, HttpResponse, Responder, web};\n\nuse anyhow::Result;\n\nuse gitarena_macros::route;\n\nuse log::debug;\n\nuse sqlx::PgPool;\n\n\n\n#[route(\"/logout\", method = \"POST\", err = \"htmx+html\")]\n\npub(crate) async fn logout(request: HttpRequest, id: Identity, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n if id.identity().is_none() {\n\n // Maybe just redirect to home page?\n\n die!(UNAUTHORIZED, \"Already logged out\");\n\n }\n\n\n\n let mut transaction = db_pool.begin().await?;\n", "file_path": "src/routes/user/user_logout.rs", "rank": 52, "score": 30.671355509115152 }, { "content": "use crate::die;\n\nuse crate::git::basic_auth;\n\nuse crate::git::fetch::fetch;\n\nuse crate::git::io::reader::{read_data_lines, read_until_command};\n\nuse crate::git::ls_refs::ls_refs;\n\nuse crate::prelude::*;\n\nuse crate::privileges::privilege;\n\nuse crate::repository::Repository;\n\nuse crate::routes::repository::GitRequest;\n\n\n\nuse actix_web::http::header::CONTENT_TYPE;\n\nuse actix_web::{Either, HttpRequest, HttpResponse, Responder, web};\n\nuse anyhow::Result;\n\nuse futures::StreamExt;\n\nuse git_repository::protocol::transport::packetline::{PacketLineRef, StreamingPeekableIter};\n\nuse gitarena_macros::route;\n\nuse sqlx::PgPool;\n\n\n\n#[route(\"/{username}/{repository}.git/git-upload-pack\", method = \"POST\", err = \"git\")]\n\npub(crate) async fn git_upload_pack(uri: web::Path<GitRequest>, mut body: web::Payload, request: HttpRequest, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n", "file_path": "src/routes/repository/git/git_upload_pack.rs", "rank": 53, "score": 30.62755575618236 }, { "content": "use gitarena_macros::route;\n\nuse sqlx::{PgPool, Postgres, Transaction};\n\nuse tera::Context;\n\nuse tracing_unwrap::OptionExt;\n\n\n\nasync fn render(tree_option: Option<&str>, repo: Repository, username: &str, web_user: WebUser, mut transaction: Transaction<'_, Postgres>) -> Result<impl Responder> {\n\n let tree_name = tree_option.unwrap_or_else(|| repo.default_branch.as_str());\n\n\n\n if !privilege::check_access(&repo, web_user.as_ref(), &mut transaction).await? {\n\n die!(NOT_FOUND, \"Not found\");\n\n }\n\n\n\n let mut context = Context::new();\n\n\n\n let libgit2_repo = repo.libgit2(&mut transaction).await?;\n\n let gitoxide_repo = repo.gitoxide(&mut transaction).await?;\n\n\n\n let loose_ref = match gitoxide_repo.refs.find_loose(tree_name) {\n\n Ok(loose_ref) => Ok(loose_ref),\n\n Err(GitoxideFindError::Find(err)) => Err(err),\n", "file_path": "src/routes/repository/repo_view.rs", "rank": 54, "score": 30.616658747087563 }, { "content": "}\n\n\n\n#[route(\"/{username}/{repository}/tree/{tree:.*}/archive/zip\", method = \"GET\", err = \"html\")]\n\npub(crate) async fn zip_file(uri: web::Path<GitTreeRequest>, web_user: WebUser, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n let mut transaction = db_pool.begin().await?;\n\n\n\n let repo_owner = User::find_using_name(&uri.username, &mut transaction).await.ok_or_else(|| err!(NOT_FOUND, \"Repository not found\"))?;\n\n let repo = Repository::open(repo_owner, &uri.repository, &mut transaction).await.ok_or_else(|| err!(NOT_FOUND, \"Repository not found\"))?;\n\n\n\n if !privilege::check_access(&repo, web_user.as_ref(), &mut transaction).await? {\n\n die!(NOT_FOUND, \"Not found\");\n\n }\n\n\n\n let gitoxide_repo = repo.gitoxide(&mut transaction).await?;\n\n\n\n let loose_ref = match gitoxide_repo.refs.find_loose(&uri.tree) {\n\n Ok(loose_ref) => Ok(loose_ref),\n\n Err(GitoxideFindError::Find(err)) => Err(err),\n\n Err(GitoxideFindError::NotFound(_)) => die!(NOT_FOUND, \"Not found\")\n\n }?; // Handle 404\n", "file_path": "src/routes/repository/archive.rs", "rank": 55, "score": 30.228203655861414 }, { "content": " ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # let request = actix_web::test::TestRequest::with_header(\"content-type\", \"text/plain\").to_http_request();\n\n ///\n\n /// use crate::prelude::*;\n\n ///\n\n /// let content_type = request.get_header(\"content-type\");\n\n /// assert_eq!(content_type, Some(\"text/plain\"));\n\n /// ```\n\n ///\n\n /// [header]: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers\n\n fn get_header<S: AsRef<str>>(&self, header: S) -> Option<&str>;\n\n\n\n /// Gets a [QString](qstring::QString) built from the current request.\n\n ///\n\n /// This function is a shorthand for `QString::from(request.query_string())`. It is\n\n /// guaranteed to not fail or panic. If no query string was sent with the request,\n\n /// a empty QString struct is returned. This method will always allocate.\n", "file_path": "src/prelude.rs", "rank": 56, "score": 29.922790397437616 }, { "content": " \"stars\": count,\n\n \"self\": self_stargazer\n\n })))\n\n }\n\n}\n\n\n\n#[route(\"/api/repo/{username}/{repository}/star\", method = \"POST\", err = \"json\")]\n\npub(crate) async fn post_star(uri: web::Path<GitRequest>, web_user: WebUser, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n let user = web_user.into_user()?;\n\n\n\n let mut transaction = db_pool.begin().await?;\n\n\n\n let repo_owner = User::find_using_name(&uri.username, &mut transaction).await.ok_or_else(|| err!(NOT_FOUND, \"Repository not found\"))?;\n\n let repo = Repository::open(repo_owner, &uri.repository, &mut transaction).await.ok_or_else(|| err!(NOT_FOUND, \"Repository not found\"))?;\n\n\n\n if !privilege::check_access(&repo, Some(&user), &mut transaction).await? {\n\n die!(NOT_FOUND, \"Not found\");\n\n }\n\n\n\n if has_star(&user, &repo, &mut transaction).await? {\n", "file_path": "src/routes/repository/api/star.rs", "rank": 57, "score": 29.88493767424272 }, { "content": "use git_repository::odb::pack::cache::DecodeEntry;\n\nuse git_repository::odb::pack::FindExt;\n\nuse git_repository::refs::file::find::existing::Error as GitoxideFindError;\n\nuse git_repository::Repository as GitoxideRepository;\n\nuse gitarena_macros::route;\n\nuse sqlx::PgPool;\n\nuse tokio_tar::{Builder as TarBuilder, Header as TarHeader};\n\nuse zip::write::FileOptions as ZipFileOptions;\n\nuse zip::ZipWriter;\n\n\n\n#[route(\"/{username}/{repository}/tree/{tree:.*}/archive/targz\", method = \"GET\", err = \"html\")]\n\npub(crate) async fn tar_gz_file(uri: web::Path<GitTreeRequest>, web_user: WebUser, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n let mut transaction = db_pool.begin().await?;\n\n\n\n let repo_owner = User::find_using_name(&uri.username, &mut transaction).await.ok_or_else(|| err!(NOT_FOUND, \"Repository not found\"))?;\n\n let repo = Repository::open(repo_owner, &uri.repository, &mut transaction).await.ok_or_else(|| err!(NOT_FOUND, \"Repository not found\"))?;\n\n\n\n if !privilege::check_access(&repo, web_user.as_ref(), &mut transaction).await? {\n\n die!(NOT_FOUND, \"Not found\");\n\n }\n", "file_path": "src/routes/repository/archive.rs", "rank": 58, "score": 29.882990822338048 }, { "content": "use anyhow::Result;\n\nuse async_recursion::async_recursion;\n\nuse git2::{DiffOptions, Oid, Repository as Git2Repository, Sort};\n\nuse tracing::instrument;\n\n\n\n#[instrument(err, skip(repo))]\n\npub(crate) async fn last_commit_for_blob(repo: &Git2Repository, reference_name: &str, file_name: &str) -> Result<Option<Oid>> {\n\n let commits = commits_for_blob(repo, reference_name, file_name, Some(1)).await?;\n\n\n\n Ok(commits.get(0).copied())\n\n}\n\n\n\n#[instrument(err, skip(repo))]\n\n#[async_recursion(?Send)]\n\npub(crate) async fn last_commit_for_ref(repo: &Git2Repository, reference_name: &str) -> Result<Option<Oid>> {\n\n let reference = repo.find_reference(reference_name)?;\n\n\n\n if let Some(target) = reference.symbolic_target() {\n\n return last_commit_for_ref(repo, target).await;\n\n }\n", "file_path": "src/git/history.rs", "rank": 59, "score": 29.53181772178302 }, { "content": " die!(CONFLICT, \"Already starred\");\n\n }\n\n\n\n add_star(&user, &repo, &mut transaction).await?;\n\n\n\n transaction.commit().await?;\n\n\n\n Ok(HttpResponse::Created().finish())\n\n}\n\n\n\n#[route(\"/api/repo/{username}/{repository}/star\", method = \"DELETE\", err = \"json\")]\n\npub(crate) async fn delete_star(uri: web::Path<GitRequest>, web_user: WebUser, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n let user = web_user.into_user()?;\n\n\n\n let mut transaction = db_pool.begin().await?;\n\n\n\n let repo_owner = User::find_using_name(&uri.username, &mut transaction).await.ok_or_else(|| err!(NOT_FOUND, \"Repository not found\"))?;\n\n let repo = Repository::open(repo_owner, &uri.repository, &mut transaction).await.ok_or_else(|| err!(NOT_FOUND, \"Repository not found\"))?;\n\n\n\n if !privilege::check_access(&repo, Some(&user), &mut transaction).await? {\n", "file_path": "src/routes/repository/api/star.rs", "rank": 60, "score": 29.186103952614143 }, { "content": "use crate::git::GitoxideCacheList;\n\nuse crate::git::utils::{read_raw_blob_content, repo_files_at_ref};\n\nuse crate::privileges::privilege;\n\nuse crate::repository::Repository;\n\nuse crate::routes::repository::GitTreeRequest;\n\nuse crate::user::{User, WebUser};\n\nuse crate::{die, err};\n\n\n\nuse std::borrow::Borrow;\n\nuse std::io::{Cursor, Write};\n\nuse std::path::Path;\n\n\n\nuse actix_web::http::header::CONTENT_DISPOSITION;\n\nuse actix_web::{HttpResponse, Responder, web};\n\nuse anyhow::Result;\n\nuse async_compression::tokio::write::GzipEncoder;\n\nuse async_recursion::async_recursion;\n\nuse bstr::ByteSlice;\n\nuse git_repository::objs::tree::EntryMode;\n\nuse git_repository::objs::Tree;\n", "file_path": "src/routes/repository/archive.rs", "rank": 61, "score": 28.9387232164248 }, { "content": "\n\n#[route(\"/login\", method = \"POST\", err = \"html\")]\n\npub(crate) async fn post_login(body: web::Form<LoginRequest>, request: HttpRequest, id: Identity, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n let redirect = body.redirect.as_deref().unwrap_or(\"/\");\n\n\n\n // User is already logged in\n\n if id.identity().is_some() {\n\n return Ok(HttpResponse::Found().append_header((LOCATION, redirect)).finish());\n\n }\n\n\n\n // TODO: Maybe allow login with email address?\n\n let username = &body.username;\n\n let password = &body.password;\n\n\n\n let mut context = Context::new();\n\n context.try_insert(\"username\", username.as_str())?;\n\n context.try_insert(\"password\", password.as_str())?;\n\n context.try_insert(\"error\", &true)?; // The login template only gets rendered if an error occurs\n\n\n\n if username.is_empty() {\n", "file_path": "src/routes/user/user_login.rs", "rank": 62, "score": 28.758649404855454 }, { "content": " self.get_scopes_as_str()\n\n .iter()\n\n .map(|scope| Scope::new(scope.to_string()))\n\n .collect()\n\n }\n\n\n\n async fn generate_auth_url(&self, provider: &SSOProviderType, db_pool: &PgPool) -> Result<(Url, CsrfToken)> {\n\n let client = self.build_client(provider, db_pool).await?;\n\n let mut request = client.authorize_url(CsrfToken::new_random);\n\n\n\n for scope in self.get_scopes() {\n\n request = request.add_scope(scope);\n\n }\n\n\n\n Ok(request.url())\n\n }\n\n\n\n /// Exchanges a response (provide by `state` and `code` in `query_string`) into an oauth access token\n\n async fn exchange_response(&self, query_string: &QString, provider: &SSOProviderType, db_pool: &PgPool) -> Result<BasicTokenResponse> {\n\n let code_option = query_string.get(\"code\");\n", "file_path": "src/sso/sso_provider.rs", "rank": 63, "score": 28.651986487976785 }, { "content": "\n\n if matches!(web_user, WebUser::Authenticated(_)) {\n\n die!(UNAUTHORIZED, \"Already logged in\");\n\n }\n\n\n\n let mut context = Context::new();\n\n\n\n if !get_setting::<bool, _>(\"allow_registrations\", &mut transaction).await? {\n\n die!(FORBIDDEN, \"User registrations are disabled\");\n\n }\n\n\n\n if let Some(site_key) = get_optional_setting::<String, _>(\"hcaptcha.site_key\", &mut transaction).await? {\n\n context.try_insert(\"hcaptcha_site_key\", &site_key)?;\n\n }\n\n\n\n render_template!(\"user/register.html\", context, transaction)\n\n}\n\n\n\n#[route(\"/api/user\", method = \"POST\", err = \"htmx+html\")]\n\npub(crate) async fn post_register(body: web::Json<RegisterJsonRequest>, id: Identity, request: HttpRequest, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n", "file_path": "src/routes/user/user_create.rs", "rank": 64, "score": 28.631389925215593 }, { "content": "use crate::git::io::writer::GitWriter;\n\n\n\nuse actix_web::web::Bytes;\n\nuse anyhow::Result;\n\nuse tracing::instrument;\n\n\n\n// https://git-scm.com/docs/protocol-v2#_capabilities\n\n#[instrument(err)]\n\npub(crate) async fn capabilities(service: &str) -> Result<Bytes> {\n\n let mut writer = GitWriter::new();\n\n\n\n writer.write_text(format!(\"# service={}\", service)).await?;\n\n\n\n writer.flush().await?;\n\n writer.write_text(\"version 2\").await?;\n\n\n\n writer.write_text(concat!(\"agent=git/gitarena-\", env!(\"CARGO_PKG_VERSION\"))).await?;\n\n writer.write_text(\"ls-refs\").await?;\n\n writer.write_text(\"unborn\").await?;\n\n writer.write_text(\"fetch\").await?;\n\n writer.write_text(\"server-option\").await?;\n\n writer.write_text(\"object-format=sha1\").await?;\n\n\n\n writer.flush().await?;\n\n\n\n writer.serialize().await\n\n}\n", "file_path": "src/git/capabilities.rs", "rank": 65, "score": 28.60442575135639 }, { "content": "\n\n // Image is still OK on client side cache\n\n if duration > Duration::seconds(0) {\n\n return Ok(HttpResponse::NotModified().append_header((LAST_MODIFIED, format)).finish());\n\n }\n\n }\n\n\n\n response.append_header((LAST_MODIFIED, format));\n\n }\n\n\n\n let file_content = fs::read(path)?;\n\n\n\n Ok(response.body(file_content))\n\n}\n\n\n\n/// Returns a streaming HttpResponse with the gravatar image\n\nasync fn send_gravatar(email: &str, request: &HttpRequest) -> Result<HttpResponse> {\n\n let md5hash = md5::compute(email);\n\n\n\n let mut client = Client::new().get(&format!(\"https://www.gravatar.com/avatar/{:x}?s=500&r=pg&d=identicon\", md5hash));\n", "file_path": "src/routes/user/avatar.rs", "rank": 66, "score": 28.36681904349215 }, { "content": "use crate::git::utils::{read_blob_content, repo_files_at_head};\n\nuse crate::licenses::{license_file_names, LICENSE_STORE};\n\nuse crate::repository::Repository;\n\n\n\nuse anyhow::Result;\n\nuse askalono::TextData;\n\nuse bstr::ByteSlice;\n\nuse git_repository::objs::tree::EntryMode;\n\nuse git_repository::odb::pack::cache::DecodeEntry;\n\nuse tracing::instrument;\n\n\n\n#[instrument(err, skip(gitoxide_repo, cache))]\n\npub(crate) async fn detect_license(repo: &mut Repository, gitoxide_repo: &git_repository::Repository, cache: &mut impl DecodeEntry) -> Result<()> {\n\n let mut buffer = Vec::<u8>::new();\n\n\n\n let tree = repo_files_at_head(gitoxide_repo, &mut buffer, cache).await?;\n\n\n\n for entry in tree.entries {\n\n let lowered_file_name = entry.filename.to_lowercase();\n\n\n", "file_path": "src/git/hooks/detect_license.rs", "rank": 67, "score": 27.793966580749036 }, { "content": "use crate::user::{User, WebUser};\n\n\n\nuse actix_web::HttpRequest;\n\nuse anyhow::{anyhow, bail, Result};\n\nuse async_trait::async_trait;\n\nuse bstr::BString;\n\nuse chrono::{DateTime, FixedOffset, LocalResult, TimeZone, Utc};\n\nuse git2::{Signature as LibGit2Signature, Time as LibGit2Time};\n\nuse git_repository::actor::{Sign, Signature as GitoxideSignature, Time as GitoxideTime};\n\nuse log::warn;\n\nuse qstring::QString;\n\nuse sqlx::{Executor, Postgres};\n\nuse tera::Context;\n\n\n\npub(crate) trait HttpRequestExtensions {\n\n /// Gets a specific header from the current request.\n\n ///\n\n /// This function gets a specific [http header][header] from the current request.\n\n /// If the requested header does not exist in the current request or is not valid utf-8, returns `None`.\n\n /// This method does not allocate but instead returns a `&str`.\n", "file_path": "src/prelude.rs", "rank": 68, "score": 27.512249122328683 }, { "content": "use crate::{die, render_template};\n\nuse crate::repository::Repository;\n\nuse crate::user::{User, WebUser};\n\n\n\nuse actix_web::{Responder, web};\n\nuse anyhow::Result;\n\nuse chrono::Duration;\n\nuse chrono_humanize::{Accuracy, HumanTime, Tense};\n\nuse git2::Version as LibGit2Version;\n\nuse gitarena_macros::route;\n\nuse heim::units::{Information, information, Time};\n\nuse sqlx::PgPool;\n\nuse tera::Context;\n\n\n\n#[route(\"/\", method = \"GET\", err = \"html\")]\n\npub(crate) async fn dashboard(web_user: WebUser, db_pool: web::Data<PgPool>) -> Result<impl Responder> {\n\n let user = web_user.into_user()?;\n\n\n\n if !user.admin {\n\n die!(FORBIDDEN, \"Not allowed\");\n", "file_path": "src/routes/admin/dashboard.rs", "rank": 69, "score": 27.241272294032772 }, { "content": " pub(crate) async fn write_text_sideband_pktline<S: AsRef<str>>(&mut self, band: Band, text: S) -> Result<&mut GitWriter> {\n\n let str_ref = text.as_ref();\n\n let hex_prefix = &u16_to_hex((str_ref.len() + 4 + 1) as u16); // 4 for length, 1 for newline\n\n let with_band = [band.serialize(), hex_prefix, str_ref.as_bytes()].concat();\n\n\n\n self.inner.write(with_band.as_slice()).await.with_context(|| {\n\n format!(\"Unable to write text to sideband {} in Git writer: `{}`\", band, str_ref)\n\n })?;\n\n Ok(self)\n\n }\n\n\n\n pub(crate) async fn write_text_bytes(&mut self, text: &[u8]) -> Result<&mut GitWriter> {\n\n self.inner.write(text).await.with_context(|| format!(\"Unable to write text bytes to Git writer: {:?}\", text))?;\n\n Ok(self)\n\n }\n\n\n\n pub(crate) async fn write_binary(&mut self, binary: &[u8]) -> Result<&mut GitWriter> {\n\n self.inner.enable_binary_mode();\n\n self.inner.write(binary).await.with_context(|| format!(\"Unable to write binary to Git writer: {:?}\", binary))?;\n\n\n", "file_path": "src/git/io/writer.rs", "rank": 70, "score": 27.205422902143255 }, { "content": "use crate::{crypto, die};\n\nuse crate::git::basic_auth;\n\nuse crate::prelude::*;\n\nuse crate::privileges::repo_visibility::RepoVisibility;\n\nuse crate::repository::Repository;\n\nuse crate::user::User;\n\n\n\nuse actix_web::http::header::{CONTENT_TYPE, WWW_AUTHENTICATE};\n\nuse actix_web::{Either, HttpRequest, HttpResponse};\n\nuse anyhow::Result;\n\nuse sqlx::{Executor, Postgres};\n\nuse tracing::instrument;\n\nuse tracing_unwrap::OptionExt;\n\n\n\n#[instrument(skip(request, executor), err)]\n\npub(crate) async fn validate_repo_access<'e, E>(repo: Option<Repository>, content_type: &str, request: &HttpRequest, executor: E) -> Result<Either<(Option<User>, Repository), HttpResponse>>\n\n where E: Executor<'e, Database = Postgres>\n\n{\n\n match repo {\n\n Some(repo) => {\n", "file_path": "src/git/basic_auth.rs", "rank": 71, "score": 26.801499661482797 }, { "content": " None\n\n }\n\n }\n\n}\n\n\n\n// Used by git-receive-pack ref discovery\n\n#[instrument(err, skip(repo))]\n\npub(crate) async fn ls_refs_all(repo: &Git2Repository) -> Result<Bytes> {\n\n let mut writer = GitWriter::new();\n\n\n\n writer.write_text(\"# service=git-receive-pack\").await?;\n\n writer.flush().await?;\n\n\n\n let once = Once::new();\n\n\n\n for result in repo.references()? {\n\n match result {\n\n Ok(reference) => {\n\n if let Some(name) = reference.name() {\n\n if let Some(oid) = reference.target() {\n", "file_path": "src/git/ls_refs.rs", "rank": 72, "score": 26.737319285627358 }, { "content": " .fetch_one(&mut transaction)\n\n .await\n\n .map_err(|_| err!(BAD_REQUEST, \"Setting not found\"))?;\n\n\n\n let valid = match setting.type_constraint {\n\n TypeConstraint::Boolean => value.parse::<bool>().is_ok(),\n\n TypeConstraint::Char => value.parse::<char>().is_ok(),\n\n TypeConstraint::Int => value.parse::<i32>().is_ok(),\n\n TypeConstraint::String | TypeConstraint::Bytes => true\n\n };\n\n\n\n if !valid {\n\n die!(BAD_REQUEST, \"Value for {} does not follow type constraint\", key);\n\n }\n\n\n\n // This does on purpose not use config::set_setting as that method requires a key: &'static str\n\n // aka it is meant to only be used within the program itself with known, safe values\n\n sqlx::query(\"update settings set value = $1 where key = $2\")\n\n .bind(value)\n\n .bind(key)\n", "file_path": "src/routes/admin/settings.rs", "rank": 73, "score": 26.0799252390499 }, { "content": " fn get_name(&self) -> &'static str;\n\n\n\n fn get_auth_url(&self) -> AuthUrl;\n\n fn get_token_url(&self) -> Option<TokenUrl>;\n\n\n\n async fn build_client(&self, provider: &SSOProviderType, db_pool: &PgPool) -> Result<BasicClient> {\n\n let mut transaction = db_pool.begin().await?;\n\n\n\n let (client_id, client_secret) = match provider {\n\n SSOProviderType::BitBucket => (\n\n DatabaseSSOProvider::get_client_id(&BitBucketSSO, &mut transaction).await.context(\"Failed to get client id\")?,\n\n DatabaseSSOProvider::get_client_secret(&BitBucketSSO, &mut transaction).await.context(\"Failed to get client secret\")?\n\n ),\n\n SSOProviderType::GitHub => (\n\n DatabaseSSOProvider::get_client_id(&GitHubSSO, &mut transaction).await.context(\"Failed to get client id\")?,\n\n DatabaseSSOProvider::get_client_secret(&GitHubSSO, &mut transaction).await.context(\"Failed to get client secret\")?\n\n ),\n\n SSOProviderType::GitLab => (\n\n DatabaseSSOProvider::get_client_id(&GitLabSSO, &mut transaction).await.context(\"Failed to get client id\")?,\n\n DatabaseSSOProvider::get_client_secret(&GitLabSSO, &mut transaction).await.context(\"Failed to get client secret\")?\n", "file_path": "src/sso/sso_provider.rs", "rank": 74, "score": 25.938261454854572 }, { "content": "\n\n let repo_owner = User::find_using_name(&uri.username, &mut transaction).await.ok_or_else(|| err!(NOT_FOUND, \"Repository not found\"))?;\n\n let repo = Repository::open(repo_owner, &uri.repository, &mut transaction).await.ok_or_else(|| err!(NOT_FOUND, \"Repository not found\") )?;\n\n\n\n if !privilege::check_access(&repo, web_user.as_ref(), &mut transaction).await? {\n\n die!(NOT_FOUND, \"Not found\");\n\n }\n\n\n\n let gitoxide_repo = repo.gitoxide(&mut transaction).await?;\n\n\n\n let loose_ref = match gitoxide_repo.refs.find_loose(uri.tree.as_str()) {\n\n Ok(loose_ref) => Ok(loose_ref),\n\n Err(GitoxideFindError::Find(err)) => Err(err),\n\n Err(GitoxideFindError::NotFound(_)) => die!(NOT_FOUND, \"Not found\")\n\n }?; // Handle 404\n\n\n\n let full_tree_name = loose_ref.name.as_bstr().to_str()?;\n\n\n\n let query_string = request.q_string();\n\n let after_oid = query_string.get(\"after\");\n", "file_path": "src/routes/repository/commits.rs", "rank": 75, "score": 25.38218814371916 }, { "content": " let domain = config::get_setting::<String, _>(\"domain\", executor).await?;\n\n let url = format!(\"{}/sso/{}/callback\", domain, self.get_name());\n\n\n\n Ok(RedirectUrl::new(url)?)\n\n }\n\n\n\n async fn get_client_id<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result<ClientId>;\n\n async fn get_client_secret<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result<Option<ClientSecret>>;\n\n}\n", "file_path": "src/sso/sso_provider.rs", "rank": 76, "score": 25.36333273630508 }, { "content": "use crate::config::get_setting;\n\nuse crate::templates::plain::render;\n\nuse crate::user::User;\n\nuse crate::{crypto, mail, template_context, templates};\n\n\n\nuse anyhow::{Context, Result};\n\nuse sqlx::{Pool, Postgres};\n\n\n\npub(crate) async fn send_verification_mail(user: &User, db_pool: &Pool<Postgres>) -> Result<()> {\n\n assert!(user.id >= 0);\n\n\n\n let hash = crypto::random_hex_string(32);\n\n let mut transaction = db_pool.begin().await?;\n\n\n\n sqlx::query(\"insert into user_verifications (user_id, hash, expires) values ($1, $2, now() + interval '1 day')\")\n\n .bind(&user.id)\n\n .bind(&hash)\n\n .execute(&mut transaction)\n\n .await?;\n\n\n", "file_path": "src/verification.rs", "rank": 77, "score": 25.25517303950505 }, { "content": "use crate::git::io::band::Band;\n\nuse crate::git::io::progress_writer::ProgressWriter;\n\nuse crate::git::io::writer::GitWriter;\n\n\n\nuse actix_web::web::Bytes;\n\nuse anyhow::Result;\n\nuse async_recursion::async_recursion;\n\nuse git2::{Buf, Commit, ObjectType, Oid, PackBuilder, Repository as Git2Repository};\n\nuse log::warn;\n\nuse tracing::instrument;\n\n\n\n#[instrument(err, skip(repo))]\n\npub(crate) async fn fetch(input: Vec<Vec<u8>>, repo: &Git2Repository) -> Result<Bytes> {\n\n let mut options = Fetch::default();\n\n let mut writer = GitWriter::new();\n\n\n\n for raw_line in input.iter() {\n\n let line = String::from_utf8(raw_line.to_vec())?;\n\n\n\n if line == \"thin-pack\" {\n", "file_path": "src/git/fetch.rs", "rank": 78, "score": 25.071282793837263 }, { "content": "use crate::sso::oauth_request::{OAuthRequest, SerdeMap};\n\nuse crate::sso::sso_provider::{DatabaseSSOProvider, SSOProvider};\n\nuse crate::sso::sso_provider_type::SSOProviderType;\n\nuse crate::user::User;\n\nuse crate::utils::identifiers::{is_username_taken, validate_username};\n\nuse crate::{config, crypto};\n\n\n\nuse std::sync::Once;\n\n\n\nuse anyhow::{anyhow, bail, Context, Result};\n\nuse async_trait::async_trait;\n\nuse oauth2::{AuthUrl, ClientId, ClientSecret, TokenUrl};\n\nuse reqwest::Client;\n\nuse reqwest::header::{AUTHORIZATION, USER_AGENT};\n\nuse serde::de::DeserializeOwned;\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json::Value;\n\nuse sqlx::{Executor, PgPool, Postgres};\n\nuse tokio_compat_02::FutureExt;\n\nuse tracing_unwrap::ResultExt;\n", "file_path": "src/sso/gitlab_sso.rs", "rank": 79, "score": 24.754902496149086 }, { "content": "use crate::git::io::writer::GitWriter;\n\n\n\nuse core::result::Result as CoreResult;\n\nuse std::sync::Once;\n\n\n\nuse actix_web::web::Bytes;\n\nuse anyhow::Result;\n\nuse git2::{Error as Git2Error, ErrorCode, Reference, Repository as Git2Repository};\n\nuse log::{error, warn};\n\nuse tracing::instrument;\n\n\n\n// TODO: Combine ls_refs and ls_refs_all to be shared (currently some code is duplicated)\n\n\n\n// Used by git-upload-pack ref discovery\n\n#[instrument(err, skip(repo))]\n\npub(crate) async fn ls_refs(input: Vec<Vec<u8>>, repo: &Git2Repository) -> Result<Bytes> {\n\n let mut options = LsRefs::default();\n\n let mut writer = GitWriter::new();\n\n\n\n for raw_line in input.iter() {\n", "file_path": "src/git/ls_refs.rs", "rank": 80, "score": 24.68147343791105 }, { "content": "use crate::{config, crypto};\n\nuse crate::sso::oauth_request::{OAuthRequest, SerdeMap};\n\nuse crate::sso::sso_provider::{DatabaseSSOProvider, SSOProvider};\n\nuse crate::sso::sso_provider_type::SSOProviderType;\n\nuse crate::user::User;\n\nuse crate::utils::identifiers::{is_username_taken, validate_username};\n\n\n\nuse anyhow::{anyhow, bail, Context, Result};\n\nuse async_trait::async_trait;\n\nuse oauth2::{AuthUrl, ClientId, ClientSecret, TokenUrl};\n\nuse reqwest::Client;\n\nuse reqwest::header::{ACCEPT, AUTHORIZATION, USER_AGENT};\n\nuse serde::de::DeserializeOwned;\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json::Value;\n\nuse sqlx::{Executor, PgPool, Postgres};\n\nuse tokio_compat_02::FutureExt;\n\nuse tracing_unwrap::ResultExt;\n\n\n\npub(crate) struct BitBucketSSO;\n", "file_path": "src/sso/bitbucket_sso.rs", "rank": 81, "score": 24.544086907693064 }, { "content": "use crate::{config, crypto};\n\nuse crate::sso::oauth_request::{OAuthRequest, SerdeMap};\n\nuse crate::sso::sso_provider::{DatabaseSSOProvider, SSOProvider};\n\nuse crate::sso::sso_provider_type::SSOProviderType;\n\nuse crate::user::User;\n\nuse crate::utils::identifiers::{is_username_taken, validate_username};\n\n\n\nuse anyhow::{anyhow, bail, Context, Result};\n\nuse async_trait::async_trait;\n\nuse oauth2::{AuthUrl, ClientId, ClientSecret, Scope, TokenUrl};\n\nuse reqwest::Client;\n\nuse reqwest::header::{ACCEPT, AUTHORIZATION, USER_AGENT};\n\nuse serde::de::DeserializeOwned;\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json::Value;\n\nuse sqlx::{Executor, PgPool, Postgres};\n\nuse tokio_compat_02::FutureExt;\n\nuse tracing_unwrap::ResultExt;\n\n\n\npub(crate) struct GitHubSSO;\n", "file_path": "src/sso/github_sso.rs", "rank": 82, "score": 24.48206049992889 }, { "content": " // Example [hexl]text\n\n pub(crate) async fn write_text<S: AsRef<str>>(&mut self, text: S) -> Result<&mut GitWriter> {\n\n let str_ref = text.as_ref();\n\n\n\n self.inner.write(str_ref.as_bytes()).await.with_context(|| format!(\"Unable to write text to Git writer: `{}`\", str_ref))?;\n\n Ok(self)\n\n }\n\n\n\n // Example: [hexl]\\x01text\n\n pub(crate) async fn write_text_sideband<S: AsRef<str>>(&mut self, band: Band, text: S) -> Result<&mut GitWriter> {\n\n let str_ref = text.as_ref();\n\n let with_band = [band.serialize(), str_ref.as_bytes()].concat();\n\n\n\n self.inner.write(with_band.as_slice()).await.with_context(|| {\n\n format!(\"Unable to write text to sideband {} in Git writer: `{}`\", band, str_ref)\n\n })?;\n\n Ok(self)\n\n }\n\n\n\n // Example: \"[hexl]\\x01[hexl]text\"\n", "file_path": "src/git/io/writer.rs", "rank": 83, "score": 24.375563714989713 }, { "content": "use crate::git::io::band::Band;\n\n\n\nuse actix_web::web::{Bytes, BytesMut};\n\nuse anyhow::{Context, Result};\n\nuse futures::AsyncWriteExt;\n\nuse git_repository::protocol::transport::packetline::{PacketLineRef, Writer as PacketlineWriter};\n\nuse tracing::instrument;\n\nuse tracing_unwrap::ResultExt;\n\n\n\npub(crate) struct GitWriter {\n\n inner: PacketlineWriter<Vec<u8>>\n\n}\n\n\n\nimpl GitWriter {\n\n pub(crate) fn new() -> GitWriter {\n\n GitWriter {\n\n inner: PacketlineWriter::new(Vec::<u8>::new()).text_mode()\n\n }\n\n }\n\n\n", "file_path": "src/git/io/writer.rs", "rank": 84, "score": 24.36345226467686 }, { "content": "/// ```\n\n/// use crate::utils::oid::from_hex_str;\n\n///\n\n/// assert!(from_hex_str(Some(\"b52f683ce73e8be06428b8c6cf0eb421eae21772\")).is_ok());\n\n/// assert!(from_hex_str(None).is_ok());\n\n/// assert!(from_hex_str(Some(\"invalid length string\")).is_err()); // Invalid length\n\n/// assert!(from_hex_str(Some(\"yZ0r3ny0K55qqxoz0HZhCWzqAdyFdZ3L9GmXG7EU\")).is_err()); // Not hexadecimal\n\n/// ```\n\n///\n\n/// [oid]: git_hash::ObjectId\n\npub(crate) fn from_hex_str(option: Option<&str>) -> Result<ObjectId> {\n\n match option {\n\n Some(oid) => if oid.chars().all(|c| c.is_ascii_hexdigit()) {\n\n Ok(ObjectId::from_hex(oid.as_bytes())?)\n\n } else {\n\n Err(anyhow!(\"Input string is not hexadecimal: {}\", oid))\n\n }\n\n None => Ok(ObjectId::null_sha1())\n\n }\n\n}\n", "file_path": "src/utils/oid.rs", "rank": 85, "score": 24.133336363153695 }, { "content": "use crate::mail::Email;\n\nuse crate::prelude::HttpRequestExtensions;\n\nuse crate::session::Session;\n\nuse crate::sso::SSO;\n\nuse crate::sso::sso_provider::SSOProvider;\n\nuse crate::sso::sso_provider_type::SSOProviderType;\n\nuse crate::user::{User, WebUser};\n\nuse crate::{die, err};\n\n\n\nuse std::ops::Deref;\n\nuse std::str::FromStr;\n\n\n\nuse actix_identity::Identity;\n\nuse actix_web::http::header::LOCATION;\n\nuse actix_web::{HttpRequest, HttpResponse, Responder, web};\n\nuse anyhow::{Context, Result};\n\nuse gitarena_macros::route;\n\nuse log::debug;\n\nuse oauth2::TokenResponse;\n\nuse serde::Deserialize;\n", "file_path": "src/routes/user/sso.rs", "rank": 86, "score": 24.088182698209703 }, { "content": " where E: Executor<'e, Database = Postgres>\n\n{\n\n if !basic_auth::is_present(request).await {\n\n return Ok(Either::Right(prompt(content_type).await));\n\n }\n\n\n\n Ok(Either::Left(basic_auth::authenticate(request, executor).await?))\n\n}\n\n\n\n#[allow(clippy::async_yields_async)] // False positive on this method\n\n#[instrument]\n\npub(crate) async fn prompt(content_type: &str) -> HttpResponse {\n\n HttpResponse::Unauthorized()\n\n .append_header((CONTENT_TYPE, content_type))\n\n .append_header((WWW_AUTHENTICATE, \"Basic realm=\\\"GitArena\\\", charset=\\\"UTF-8\\\"\"))\n\n .finish()\n\n}\n\n\n\n#[instrument(skip_all, err)]\n\npub(crate) async fn authenticate<'e, E>(request: &HttpRequest, transaction: E) -> Result<User>\n", "file_path": "src/git/basic_auth.rs", "rank": 87, "score": 23.879482635614917 }, { "content": "use crate::git::GitoxideCacheList;\n\nuse crate::git::history::{all_branches, all_commits, all_tags, last_commit_for_blob, last_commit_for_ref};\n\nuse crate::git::utils::{read_blob_content, repo_files_at_ref};\n\nuse crate::prelude::LibGit2SignatureExtensions;\n\nuse crate::privileges::privilege;\n\nuse crate::repository::Repository;\n\nuse crate::routes::repository::{GitRequest, GitTreeRequest};\n\nuse crate::templates::web::{GitCommit, RepoFile};\n\nuse crate::user::{User, WebUser};\n\nuse crate::{die, err, render_template};\n\n\n\nuse std::cmp::Ordering;\n\n\n\nuse actix_web::{Responder, web};\n\nuse anyhow::Result;\n\nuse bstr::ByteSlice;\n\nuse git_repository::hash::ObjectId;\n\nuse git_repository::objs::tree::EntryMode;\n\nuse git_repository::objs::Tree;\n\nuse git_repository::refs::file::find::existing::Error as GitoxideFindError;\n", "file_path": "src/routes/repository/repo_view.rs", "rank": 88, "score": 23.789825649242708 }, { "content": "\n\n#[async_trait]\n\nimpl<T: DeserializeOwned> OAuthRequest<T> for BitBucketSSO {\n\n async fn request_data(endpoint: &'static str, token: &str) -> Result<T> {\n\n let client = Client::new();\n\n\n\n Ok(client.get(format!(\"https://api.bitbucket.org/2.0/{}\", endpoint).as_str())\n\n .header(ACCEPT, \"application/json\")\n\n .header(AUTHORIZATION, format!(\"Bearer {}\", token))\n\n .header(USER_AGENT, concat!(\"GitArena \", env!(\"CARGO_PKG_VERSION\")))\n\n .send()\n\n .compat()\n\n .await\n\n .context(\"Failed to connect to BitBucket api\")?\n\n .json::<T>()\n\n .compat()\n\n .await\n\n .context(\"Failed to parse BitBucket response as JSON\")?)\n\n }\n\n}\n", "file_path": "src/sso/bitbucket_sso.rs", "rank": 89, "score": 23.729019448719065 }, { "content": "\n\npub(crate) struct GitLabSSO;\n\n\n\n#[async_trait]\n\nimpl<T: DeserializeOwned> OAuthRequest<T> for GitLabSSO {\n\n async fn request_data(endpoint: &'static str, token: &str) -> Result<T> {\n\n let client = Client::new();\n\n\n\n Ok(client.get(format!(\"https://gitlab.com/api/v4/{}\", endpoint).as_str())\n\n .header(AUTHORIZATION, format!(\"Bearer {}\", token))\n\n .header(USER_AGENT, concat!(\"GitArena \", env!(\"CARGO_PKG_VERSION\")))\n\n .send()\n\n .compat()\n\n .await\n\n .context(\"Failed to connect to GitLab api\")?\n\n .json::<T>()\n\n .compat()\n\n .await\n\n .context(\"Failed to parse GitLab response as JSON\")?)\n\n }\n", "file_path": "src/sso/gitlab_sso.rs", "rank": 90, "score": 23.555992920553138 }, { "content": "\n\n#[async_trait]\n\nimpl<T: DeserializeOwned> OAuthRequest<T> for GitHubSSO {\n\n async fn request_data(endpoint: &'static str, token: &str) -> Result<T> {\n\n let client = Client::new();\n\n\n\n Ok(client.get(format!(\"https://api.github.com/{}\", endpoint).as_str())\n\n .header(ACCEPT, \"application/vnd.github.v3+json\")\n\n .header(AUTHORIZATION, format!(\"token {}\", token))\n\n .header(USER_AGENT, concat!(\"GitArena \", env!(\"CARGO_PKG_VERSION\")))\n\n .send()\n\n .compat()\n\n .await\n\n .context(\"Failed to connect to GitHub api\")?\n\n .json::<T>()\n\n .compat()\n\n .await\n\n .context(\"Failed to parse GitHub response as JSON\")?)\n\n }\n\n}\n", "file_path": "src/sso/github_sso.rs", "rank": 91, "score": 23.470496614054543 }, { "content": "\n\n /// Returns true if the granted scopes are OK or not\n\n fn validate_scopes(&self, scopes_option: Option<&Vec<Scope>>) -> bool {\n\n let granted_scopes = match scopes_option {\n\n Some(scopes) => scopes.iter().map(|scope| scope.as_str()).collect::<Vec<_>>(),\n\n None => return true // If not provided it is identical to our asked scopes\n\n };\n\n\n\n let requested_scopes = self.get_scopes_as_str();\n\n granted_scopes.iter().all(|item| requested_scopes.contains(item))\n\n }\n\n\n\n async fn get_provider_id(&self, token: &str) -> Result<String>;\n\n\n\n async fn create_user(&self, token: &str, db_pool: &PgPool) -> Result<User>;\n\n}\n\n\n\n#[async_trait]\n\npub(crate) trait DatabaseSSOProvider: SSOProvider {\n\n async fn get_redirect_url<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result<RedirectUrl> {\n", "file_path": "src/sso/sso_provider.rs", "rank": 92, "score": 23.465736283078407 }, { "content": "\n\n Ok(writer.serialize().await?)\n\n}\n\n\n\n#[instrument(err, skip(repo))]\n\npub(crate) async fn process_haves(repo: &Git2Repository, options: &Fetch) -> Result<Option<GitWriter>> {\n\n if options.have.is_empty() {\n\n return Ok(None);\n\n }\n\n\n\n let mut written_one = false;\n\n let mut writer = GitWriter::new();\n\n writer.write_text(\"acknowledgments\").await?;\n\n\n\n for have in &options.have {\n\n match repo.find_reference(have.as_str()) {\n\n Ok(reference) => {\n\n if let Some(name) = reference.name() {\n\n writer.write_text(format!(\"ACK {}\", name)).await?;\n\n written_one = true;\n", "file_path": "src/git/fetch.rs", "rank": 93, "score": 23.384028992402722 }, { "content": "use crate::mail::Email;\n\nuse crate::prelude::HttpRequestExtensions;\n\nuse crate::user::WebUser;\n\nuse crate::utils::reqwest_actix_stream::ResponseStream;\n\nuse crate::{die, err};\n\n\n\nuse std::fs;\n\nuse std::io::Cursor;\n\nuse std::path::Path;\n\nuse std::time::SystemTime;\n\n\n\nuse actix_multipart::Multipart;\n\nuse actix_web::http::header::{CACHE_CONTROL, LAST_MODIFIED};\n\nuse actix_web::{HttpRequest, HttpResponse, Responder, web};\n\nuse anyhow::{Context, Result};\n\nuse chrono::{Duration, NaiveDateTime};\n\nuse futures::TryStreamExt;\n\nuse gitarena_macros::{from_config, route};\n\nuse image::ImageFormat;\n\nuse reqwest::Client;\n", "file_path": "src/routes/user/avatar.rs", "rank": 94, "score": 23.117664710948702 }, { "content": " .await\n\n .with_context(|| format!(\"Unable to read setting {} from database\", key))?;\n\n\n\n let result: T = setting.try_into().map_err(|err: T::Error| err.into_inner())?;\n\n Ok(result)\n\n}\n\n\n\npub(crate) async fn get_all_settings<'e, E: Executor<'e, Database = Postgres>>(executor: E) -> Result<Vec<Setting>> {\n\n Ok(sqlx::query_as::<_, Setting>(\"select * from settings order by key\").fetch_all(executor).await?)\n\n}\n\n\n\n// This function returns impl Future instead of relying on async fn to automatically convert it into doing just that\n\n// Because async fn tries to unify lifetimes, we need to do this. More info: https://stackoverflow.com/a/68733302\n\npub(crate) fn set_setting<'e, 'q, T, E>(key: &'static str, value: T, executor: E) -> impl Future<Output = Result<()>> + 'q\n\n where T: TryFrom<Setting> + Encode<'q, Postgres> + Type<Postgres> + Send + 'q,\n\n E: Executor<'e, Database = Postgres> + 'q\n\n{\n\n async move {\n\n sqlx::query(\"update settings set value = $1 where key = $2\")\n\n .bind(value)\n", "file_path": "src/config.rs", "rank": 95, "score": 23.03832431451754 }, { "content": " })))\n\n}\n\n\n\nasync fn web_not_found(request: HttpRequest, web_user: WebUser, db_pool: web::Data<PgPool>) -> Result<HttpResponse> {\n\n let mut transaction = db_pool.begin().await?;\n\n let mut context = Context::new();\n\n\n\n if let Some(user) = web_user.ok() {\n\n context.try_insert(\"user\", &user)?;\n\n }\n\n\n\n context.try_insert(\"path\", request.path())?;\n\n\n\n render_template!(StatusCode::NOT_FOUND, \"error/404.html\", context, transaction)\n\n}\n\n\n\n#[instrument(skip_all)]\n\npub(crate) async fn default_handler(request: HttpRequest, web_user: WebUser, db_pool: web::Data<PgPool>) -> ActixResult<impl Responder> {\n\n debug!(\"Got request for non-existent resource: {}\", request.path());\n\n\n", "file_path": "src/routes/not_found.rs", "rank": 96, "score": 22.809175907842942 }, { "content": "\n\n#[async_trait]\n\nimpl DatabaseSSOProvider for BitBucketSSO {\n\n async fn get_client_id<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result<ClientId> {\n\n let client_id = config::get_setting::<String, _>(\"sso.bitbucket.key\", executor).await?;\n\n\n\n Ok(ClientId::new(client_id))\n\n }\n\n\n\n async fn get_client_secret<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result<Option<ClientSecret>> {\n\n let client_secret = config::get_setting::<String, _>(\"sso.bitbucket.secret\", executor).await?;\n\n\n\n Ok(Some(ClientSecret::new(client_secret)))\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl SSOProvider for BitBucketSSO {\n\n fn get_name(&self) -> &'static str {\n\n \"bitbucket\"\n", "file_path": "src/sso/bitbucket_sso.rs", "rank": 97, "score": 22.7283870880989 }, { "content": "}\n\n\n\n#[async_trait]\n\nimpl DatabaseSSOProvider for GitLabSSO {\n\n async fn get_client_id<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result<ClientId> {\n\n let client_id = config::get_setting::<String, _>(\"sso.gitlab.app_id\", executor).await?;\n\n\n\n Ok(ClientId::new(client_id))\n\n }\n\n\n\n async fn get_client_secret<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result<Option<ClientSecret>> {\n\n let client_secret = config::get_setting::<String, _>(\"sso.gitlab.client_secret\", executor).await?;\n\n\n\n Ok(Some(ClientSecret::new(client_secret)))\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl SSOProvider for GitLabSSO {\n\n fn get_name(&self) -> &'static str {\n", "file_path": "src/sso/gitlab_sso.rs", "rank": 98, "score": 22.722267353629253 }, { "content": "\n\n#[async_trait]\n\nimpl DatabaseSSOProvider for GitHubSSO {\n\n async fn get_client_id<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result<ClientId> {\n\n let client_id = config::get_setting::<String, _>(\"sso.github.client_id\", executor).await?;\n\n\n\n Ok(ClientId::new(client_id))\n\n }\n\n\n\n async fn get_client_secret<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result<Option<ClientSecret>> {\n\n let client_secret = config::get_setting::<String, _>(\"sso.github.client_secret\", executor).await?;\n\n\n\n Ok(Some(ClientSecret::new(client_secret)))\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl SSOProvider for GitHubSSO {\n\n fn get_name(&self) -> &'static str {\n\n \"github\"\n", "file_path": "src/sso/github_sso.rs", "rank": 99, "score": 22.709390250321114 } ]
Rust
fusequery/query/src/datasources/local/csv_table_test.rs
leiysky/fuse-query
49e268ba7938469c2a3c0541a0c713ef447ccffb
use std::sync::Arc; #[tokio::test] async fn test_csv_table() -> anyhow::Result<()> { use std::env; use common_datavalues::*; use common_planners::*; use futures::TryStreamExt; use crate::datasources::local::*; let options: TableOptions = [( "location".to_string(), env::current_dir()? .join("../../tests/data/sample.csv") .display() .to_string() )] .iter() .cloned() .collect(); let ctx = crate::tests::try_create_context()?; let table = CsvTable::try_create( "default".into(), "test_csv".into(), DataSchema::new(vec![DataField::new("column1", DataType::UInt64, false)]).into(), options )?; let scan_plan = &ScanPlan { schema_name: "".to_string(), table_schema: Arc::new(DataSchema::new(vec![])), table_args: None, projection: None, projected_schema: Arc::new(DataSchema::new(vec![DataField::new( "column1", DataType::UInt64, false )])), filters: vec![], limit: None }; let source_plan = table.read_plan(ctx.clone(), &scan_plan)?; ctx.try_set_partitions(source_plan.partitions)?; let stream = table.read(ctx).await?; let result = stream.try_collect::<Vec<_>>().await?; let block = &result[0]; assert_eq!(block.num_columns(), 1); let expected = vec![ "+---------+", "| column1 |", "+---------+", "| 1 |", "| 2 |", "| 3 |", "| 4 |", "| 5 |", "| 6 |", "+---------+", ]; common_datablocks::assert_blocks_sorted_eq(expected, result.as_slice()); Ok(()) } #[tokio::test] async fn test_csv_table_parse_error() -> anyhow::Result<()> { use std::env; use common_datavalues::*; use common_planners::*; use futures::TryStreamExt; use pretty_assertions::assert_eq; use crate::datasources::local::*; let options: TableOptions = [( "location".to_string(), env::current_dir()? .join("../../tests/data/sample.csv") .display() .to_string() )] .iter() .cloned() .collect(); let ctx = crate::tests::try_create_context()?; let table = CsvTable::try_create( "default".into(), "test_csv".into(), DataSchema::new(vec![ DataField::new("column1", DataType::UInt64, false), DataField::new("column2", DataType::UInt64, false), DataField::new("column3", DataType::UInt64, false), DataField::new("column4", DataType::UInt64, false), ]) .into(), options )?; let scan_plan = &ScanPlan { schema_name: "".to_string(), table_schema: Arc::new(DataSchema::new(vec![])), table_args: None, projection: None, projected_schema: Arc::new(DataSchema::new(vec![DataField::new( "column2", DataType::UInt64, false )])), filters: vec![], limit: None }; let source_plan = table.read_plan(ctx.clone(), &scan_plan)?; ctx.try_set_partitions(source_plan.partitions)?; let stream = table.read(ctx).await?; let result = stream.try_collect::<Vec<_>>().await; assert_eq!(true, result.is_err()); if let Err(e) = result { assert_eq!( "Code: 1002, displayText = Parser error: Error while parsing value \'Shanghai\' for column 1 at line 1.", e.to_string() ); }; Ok(()) }
use std::sync::Arc; #[tokio::test] async fn test_csv_table() -> anyhow::Result<()> { use std::env; use common_datavalues::*; use common_planners::*; use futures::TryStreamExt; use crate::datasources::local::*; let options: TableOptions = [( "location".to_string(), env::current_dir()? .join("../../tests/data/sample.csv") .display() .to_string() )] .iter() .cloned() .collect(); let ctx = crate::tests::try_create_context()?; let table = CsvTable::try_create( "default".into(), "test_csv".into(), DataSchema::new(vec![DataField::new("column1", DataType::UInt64, false)]).into(), options )?; let scan_plan = &ScanPlan { schema_name: "".to_string(), table_schema: Arc::new(DataSchema::new(vec![])), table_args: None, projection: None, projected_schema: Arc::new(DataSchema::new(vec![DataField::new( "column1", DataType::UInt64, false )])), filters: vec![], limit: None }; let source_plan = table.read_plan(ctx.clone(), &scan_plan)?; ctx.try_set_partitions(source_plan.partitions)?; let stream = table.read(ctx).await?; let result = stream.try_collect::<Vec<_>>().await?; let block = &result[0]; assert_eq!(block.num_columns(), 1); let expected = vec![ "+---------+", "| column1 |", "+---------+", "| 1 |", "| 2 |", "| 3 |", "| 4 |", "| 5 |", "| 6 |", "+---------+", ]; common_datablocks::assert_blocks_sorted_eq(expected, result.as_slice()); Ok(()) } #[tokio::test]
async fn test_csv_table_parse_error() -> anyhow::Result<()> { use std::env; use common_datavalues::*; use common_planners::*; use futures::TryStreamExt; use pretty_assertions::assert_eq; use crate::datasources::local::*; let options: TableOptions = [( "location".to_string(), env::current_dir()? .join("../../tests/data/sample.csv") .display() .to_string() )] .iter() .cloned() .collect(); let ctx = crate::tests::try_create_context()?; let table = CsvTable::try_create( "default".into(), "test_csv".into(), DataSchema::new(vec![ DataField::new("column1", DataType::UInt64, false), DataField::new("column2", DataType::UInt64, false), DataField::new("column3", DataType::UInt64, false), DataField::new("column4", DataType::UInt64, false), ]) .into(), options )?; let scan_plan = &ScanPlan { schema_name: "".to_string(), table_schema: Arc::new(DataSchema::new(vec![])), table_args: None, projection: None, projected_schema: Arc::new(DataSchema::new(vec![DataField::new( "column2", DataType::UInt64, false )])), filters: vec![], limit: None }; let source_plan = table.read_plan(ctx.clone(), &scan_plan)?; ctx.try_set_partitions(source_plan.partitions)?; let stream = table.read(ctx).await?; let result = stream.try_collect::<Vec<_>>().await; assert_eq!(true, result.is_err()); if let Err(e) = result { assert_eq!( "Code: 1002, displayText = Parser error: Error while parsing value \'Shanghai\' for column 1 at line 1.", e.to_string() ); }; Ok(()) }
function_block-full_function
[ { "content": "fn build_boolean_column(values: &DataArrayRef) -> Result<Vec<Option<u8>>> {\n\n let values = as_boolean_array(values);\n\n\n\n Ok(match values.null_count() {\n\n //faster path\n\n 0 => (0..values.len())\n\n .map(|i| Some(values.value(i) as u8))\n\n .collect::<Vec<Option<u8>>>(),\n\n _ => (0..values.len())\n\n .map(|i| {\n\n if values.is_null(i) {\n\n None\n\n } else {\n\n Some(values.value(i) as u8)\n\n }\n\n })\n\n .collect::<Vec<Option<u8>>>()\n\n })\n\n}\n\n\n", "file_path": "fusequery/query/src/servers/clickhouse/clickhouse_stream.rs", "rank": 0, "score": 280063.5481783934 }, { "content": "fn build_string_column(values: &DataArrayRef) -> Result<Vec<Option<&str>>> {\n\n let values = as_string_array(values);\n\n Ok(match values.null_count() {\n\n //faster path\n\n 0 => (0..values.len())\n\n .map(|i| Some(values.value(i)))\n\n .collect::<Vec<Option<&str>>>(),\n\n _ => (0..values.len())\n\n .map(|i| {\n\n if values.is_null(i) {\n\n None\n\n } else {\n\n Some(values.value(i))\n\n }\n\n })\n\n .collect::<Vec<Option<&str>>>()\n\n })\n\n}\n", "file_path": "fusequery/query/src/servers/clickhouse/clickhouse_stream.rs", "rank": 1, "score": 280063.5481783934 }, { "content": "fn build_primitive_column<T>(values: &DataArrayRef) -> Result<Vec<Option<T::Native>>>\n\nwhere T: ArrowPrimitiveType {\n\n let values = as_primitive_array::<T>(values);\n\n\n\n Ok(match values.null_count() {\n\n //faster path\n\n 0 => (0..values.len())\n\n .map(|i| Some(values.value(i)))\n\n .collect::<Vec<Option<T::Native>>>(),\n\n _ => (0..values.len())\n\n .map(|i| {\n\n if values.is_null(i) {\n\n None\n\n } else {\n\n Some(values.value(i))\n\n }\n\n })\n\n .collect::<Vec<Option<T::Native>>>()\n\n })\n\n}\n\n\n", "file_path": "fusequery/query/src/servers/clickhouse/clickhouse_stream.rs", "rank": 2, "score": 265357.73387748783 }, { "content": "///! Convert a series of record batches into a table\n\nfn create_table(results: &[DataBlock]) -> Result<Table> {\n\n let mut table = Table::new();\n\n table.set_format(*format::consts::FORMAT_NO_LINESEP_WITH_TITLE);\n\n\n\n if results.is_empty() {\n\n return Ok(table);\n\n }\n\n\n\n let schema = results[0].schema();\n\n\n\n let mut header = Vec::new();\n\n for field in schema.fields() {\n\n header.push(Cell::new(&field.name()));\n\n }\n\n table.set_titles(Row::new(header));\n\n\n\n for batch in results {\n\n for row in 0..batch.num_rows() {\n\n let mut cells = Vec::new();\n\n for col in 0..batch.num_columns() {\n", "file_path": "common/datablocks/src/data_block_debug.rs", "rank": 3, "score": 247210.73317208345 }, { "content": "pub fn assert_blocks_eq(expect: Vec<&str>, blocks: &[DataBlock]) {\n\n let expected_lines: Vec<String> = expect.iter().map(|&s| s.into()).collect();\n\n let formatted = pretty_format_blocks(&blocks).unwrap();\n\n let actual_lines: Vec<&str> = formatted.trim().lines().collect();\n\n\n\n assert_eq!(\n\n expected_lines, actual_lines,\n\n \"\\n\\nexpected:\\n\\n{:#?}\\nactual:\\n\\n{:#?}\\n\\n\",\n\n expected_lines, actual_lines\n\n );\n\n}\n\n\n", "file_path": "common/datablocks/src/data_block_debug.rs", "rank": 4, "score": 246125.1932401257 }, { "content": "pub fn assert_blocks_sorted_eq(expect: Vec<&str>, blocks: &[DataBlock]) {\n\n let mut expected_lines: Vec<String> = expect.iter().map(|&s| s.into()).collect();\n\n\n\n // sort except for header + footer\n\n let num_lines = expected_lines.len();\n\n if num_lines > 3 {\n\n expected_lines.as_mut_slice()[2..num_lines - 1].sort_unstable()\n\n }\n\n\n\n let formatted = pretty_format_blocks(&blocks).unwrap();\n\n let mut actual_lines: Vec<&str> = formatted.trim().lines().collect();\n\n\n\n // sort except for header + footer\n\n let num_lines = actual_lines.len();\n\n if num_lines > 3 {\n\n actual_lines.as_mut_slice()[2..num_lines - 1].sort_unstable()\n\n }\n\n\n\n assert_eq!(\n\n expected_lines, actual_lines,\n\n \"\\n\\nexpected:\\n\\n{:#?}\\nactual:\\n\\n{:#?}\\n\\n\",\n\n expected_lines, actual_lines\n\n );\n\n}\n\n\n", "file_path": "common/datablocks/src/data_block_debug.rs", "rank": 5, "score": 243780.44239221542 }, { "content": "fn limit_push_down(upper_limit: Option<usize>, plan: &PlanNode) -> Result<PlanNode> {\n\n match plan {\n\n PlanNode::Limit(LimitPlan { n, input }) => {\n\n let smallest = upper_limit.map(|x| std::cmp::min(x, *n)).unwrap_or(*n);\n\n Ok(PlanNode::Limit(LimitPlan {\n\n n: smallest,\n\n input: Arc::new(limit_push_down(Some(smallest), input.as_ref())?)\n\n }))\n\n }\n\n _ => Ok(plan.clone())\n\n }\n\n}\n\n\n\nimpl IOptimizer for LimitPushDownOptimizer {\n\n fn name(&self) -> &str {\n\n \"LimitPushDown\"\n\n }\n\n\n\n fn optimize(&mut self, plan: &PlanNode) -> Result<PlanNode> {\n\n let mut rewritten_node = PlanNode::Empty(EmptyPlan {\n", "file_path": "fusequery/query/src/optimizers/optimizer_limit_push_down.rs", "rank": 6, "score": 230656.17934468808 }, { "content": "#[test]\n\nfn test_filter_plan() -> anyhow::Result<()> {\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::*;\n\n\n\n let source = Test::create().generate_source_plan_for_test(10000)?;\n\n let plan = PlanBuilder::from(&source)\n\n .filter(col(\"number\").eq(lit(1i64)))?\n\n .project(vec![col(\"number\")])?\n\n .build()?;\n\n\n\n let expect = \"\\\n\n Projection: number:UInt64\\\n\n \\n Filter: (number = 1)\\\n\n \\n ReadDataSource: scan partitions: [8], scan schema: [number:UInt64], statistics: [read_rows: 10000, read_bytes: 80000]\";\n\n let actual = format!(\"{:?}\", plan);\n\n\n\n assert_eq!(expect, actual);\n\n Ok(())\n\n}\n", "file_path": "common/planners/src/plan_filter_test.rs", "rank": 7, "score": 193937.94622992078 }, { "content": "#[test]\n\nfn test_limit_plan() -> anyhow::Result<()> {\n\n use std::sync::Arc;\n\n\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::*;\n\n\n\n let limit = PlanNode::Limit(LimitPlan {\n\n n: 33,\n\n input: Arc::from(PlanBuilder::empty().build()?)\n\n });\n\n let expect = \"Limit: 33\";\n\n let actual = format!(\"{:?}\", limit);\n\n assert_eq!(expect, actual);\n\n Ok(())\n\n}\n", "file_path": "common/planners/src/plan_limit_test.rs", "rank": 8, "score": 193928.46215400932 }, { "content": "#[test]\n\nfn test_projection_plan() -> anyhow::Result<()> {\n\n use std::sync::Arc;\n\n\n\n use common_datavalues::*;\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::*;\n\n\n\n let projection = PlanNode::Projection(ProjectionPlan {\n\n expr: vec![col(\"a\")],\n\n schema: Arc::new(DataSchema::new(vec![DataField::new(\n\n \"a\",\n\n DataType::Utf8,\n\n false\n\n )])),\n\n input: Arc::from(PlanBuilder::empty().build()?)\n\n });\n\n let _ = projection.schema();\n\n let expect = \"Projection: a:Utf8\";\n\n let actual = format!(\"{:?}\", projection);\n\n assert_eq!(expect, actual);\n\n Ok(())\n\n}\n", "file_path": "common/planners/src/plan_projection_test.rs", "rank": 9, "score": 193885.6485829352 }, { "content": "#[test]\n\nfn test_data_block() -> anyhow::Result<()> {\n\n use std::sync::Arc;\n\n\n\n use common_datavalues::*;\n\n\n\n use crate::DataBlock;\n\n\n\n let schema = Arc::new(DataSchema::new(vec![DataField::new(\n\n \"a\",\n\n DataType::Int64,\n\n false\n\n )]));\n\n\n\n let block = DataBlock::create(schema.clone(), vec![Arc::new(Int64Array::from(vec![\n\n 1, 2, 3,\n\n ]))]);\n\n assert_eq!(&schema, block.schema());\n\n\n\n assert_eq!(3, block.num_rows());\n\n assert_eq!(1, block.num_columns());\n\n assert_eq!(3, block.column_by_name(\"a\")?.len());\n\n assert_eq!(3, block.column(0).len());\n\n\n\n Ok(())\n\n}\n", "file_path": "common/datablocks/src/data_block_test.rs", "rank": 10, "score": 193880.87718739588 }, { "content": "#[test]\n\nfn test_plan_display_indent() -> anyhow::Result<()> {\n\n // TODO test other plan type\n\n\n\n let schema = Arc::new(DataSchema::new(vec![DataField::new(\n\n \"a\",\n\n DataType::Int64,\n\n false\n\n )]));\n\n\n\n let mut options = HashMap::new();\n\n options.insert(\"opt_foo\".to_string(), \"opt_bar\".to_string());\n\n\n\n let plan_create = PlanNode::CreateTable(CreateTablePlan {\n\n if_not_exists: true,\n\n db: \"foo\".into(),\n\n table: \"bar\".into(),\n\n schema,\n\n engine: TableEngineType::JsonEachRaw,\n\n options\n\n });\n\n\n\n assert_eq!(\n\n \"Create table foo.bar Field { name: \\\"a\\\", data_type: Int64, nullable: false, dict_id: 0, dict_is_ordered: false, metadata: None }, engine: JSON, if_not_exists:true, option: {\\\"opt_foo\\\": \\\"opt_bar\\\"}\",\n\n format!(\"{}\", plan_create.display_indent())\n\n );\n\n\n\n Ok(())\n\n}\n", "file_path": "common/planners/src/plan_display_test.rs", "rank": 11, "score": 191763.2572650526 }, { "content": "#[test]\n\nfn test_data_block_sort() -> anyhow::Result<()> {\n\n use std::sync::Arc;\n\n\n\n use common_datavalues::*;\n\n\n\n use crate::data_block_kernel::SortColumnDescription;\n\n use crate::*;\n\n\n\n let schema = Arc::new(DataSchema::new(vec![\n\n DataField::new(\"a\", DataType::Int64, false),\n\n DataField::new(\"b\", DataType::Utf8, false),\n\n ]));\n\n\n\n let raw = DataBlock::create(schema.clone(), vec![\n\n Arc::new(Int64Array::from(vec![6, 4, 3, 2, 1, 7])),\n\n Arc::new(StringArray::from(vec![\"b1\", \"b2\", \"b3\", \"b4\", \"b5\", \"b6\"])),\n\n ]);\n\n\n\n {\n\n let options = vec![SortColumnDescription {\n", "file_path": "common/datablocks/src/data_block_kernel_test.rs", "rank": 12, "score": 189611.35448879507 }, { "content": "#[test]\n\nfn test_limit_push_down_optimizer() -> anyhow::Result<()> {\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::optimizers::*;\n\n use crate::sql::*;\n\n\n\n let ctx = crate::tests::try_create_context()?;\n\n\n\n let plan = PlanParser::create(ctx.clone()).build_from_sql(\n\n \"select (number+1) as c1, number as c2 from numbers_mt(10000) where (c1+c2+1)=1 limit 10\"\n\n )?;\n\n\n\n let mut limit_push_down = AliasPushDownOptimizer::create(ctx);\n\n let optimized = limit_push_down.optimize(&plan)?;\n\n let expect = \"\\\n\n Limit: 10\\\n\n \\n Projection: (number + 1) as c1:UInt64, number as c2:UInt64\\\n\n \\n Filter: ((((number + 1) + number) + 1) = 1)\\\n\n \\n ReadDataSource: scan partitions: [8], scan schema: [number:UInt64], statistics: [read_rows: 10000, read_bytes: 80000]\";\n\n let actual = format!(\"{:?}\", optimized);\n\n assert_eq!(expect, actual);\n\n\n\n Ok(())\n\n}\n", "file_path": "fusequery/query/src/optimizers/optimizer_limit_push_down_test.rs", "rank": 13, "score": 187622.96659069665 }, { "content": "#[test]\n\nfn test_data_block_kernel_concat() -> anyhow::Result<()> {\n\n use std::sync::Arc;\n\n\n\n use common_datavalues::*;\n\n\n\n use crate::*;\n\n\n\n let schema = Arc::new(DataSchema::new(vec![\n\n DataField::new(\"a\", DataType::Int64, false),\n\n DataField::new(\"b\", DataType::Utf8, false),\n\n ]));\n\n\n\n let blocks = vec![\n\n DataBlock::create(schema.clone(), vec![\n\n Arc::new(Int64Array::from(vec![1, 2, 3])),\n\n Arc::new(StringArray::from(vec![\"b1\", \"b2\", \"b3\"])),\n\n ]),\n\n DataBlock::create(schema.clone(), vec![\n\n Arc::new(Int64Array::from(vec![4, 5, 6])),\n\n Arc::new(StringArray::from(vec![\"b1\", \"b2\", \"b3\"])),\n", "file_path": "common/datablocks/src/data_block_kernel_test.rs", "rank": 14, "score": 187576.98677751023 }, { "content": "#[test]\n\nfn test_data_block_kernel_take() -> anyhow::Result<()> {\n\n use std::sync::Arc;\n\n\n\n use common_datavalues::*;\n\n\n\n use crate::*;\n\n\n\n let schema = Arc::new(DataSchema::new(vec![\n\n DataField::new(\"a\", DataType::Int64, false),\n\n DataField::new(\"b\", DataType::Utf8, false),\n\n ]));\n\n\n\n let raw = DataBlock::create(schema.clone(), vec![\n\n Arc::new(Int64Array::from(vec![1, 2, 3])),\n\n Arc::new(StringArray::from(vec![\"b1\", \"b2\", \"b3\"])),\n\n ]);\n\n\n\n let take = DataBlock::block_take_by_indices(&raw, &[0, 2])?;\n\n assert_eq!(raw.schema(), take.schema());\n\n\n", "file_path": "common/datablocks/src/data_block_kernel_test.rs", "rank": 15, "score": 187576.98677751023 }, { "content": "type Input = Result<Vec<DataBlock>>;\n", "file_path": "fusequery/query/src/servers/mysql/endpoints/endpoint_on_query.rs", "rank": 16, "score": 179089.5613038007 }, { "content": "#[test]\n\nfn test_rewrite_projection_alias_plan() -> anyhow::Result<()> {\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::*;\n\n\n\n #[allow(dead_code)]\n\n struct RewriteTest {\n\n name: &'static str,\n\n exprs: Vec<ExpressionPlan>,\n\n expect_str: &'static str,\n\n error_msg: &'static str\n\n }\n\n\n\n let tests = vec![\n\n RewriteTest{\n\n name : \"Cyclic\",\n\n exprs: vec![\n\n Box::new(ExpressionPlan::Function {\n\n op: \"plus\".to_string(),\n\n args: vec![\n", "file_path": "common/planners/src/plan_rewriter_test.rs", "rank": 17, "score": 178068.19374216092 }, { "content": "#[test]\n\nfn test_context_function_build_arg_from_ctx() -> anyhow::Result<()> {\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::functions::*;\n\n\n\n let ctx = crate::tests::try_create_context()?;\n\n\n\n // Ok.\n\n {\n\n let args = ContextFunction::build_args_from_ctx(\"database\", ctx.clone())?;\n\n assert_eq!(\"default\", format!(\"{:?}\", args[0]));\n\n }\n\n\n\n // Error.\n\n {\n\n let result = ContextFunction::build_args_from_ctx(\"databasexx\", ctx).is_err();\n\n assert_eq!(true, result);\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "fusequery/query/src/functions/context_function_test.rs", "rank": 18, "score": 173922.73193142138 }, { "content": "#[test]\n\nfn test_filter_alias_push_down_optimizer() -> anyhow::Result<()> {\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::optimizers::*;\n\n use crate::sql::*;\n\n\n\n #[allow(dead_code)]\n\n struct Test {\n\n name: &'static str,\n\n query: &'static str,\n\n expect: &'static str\n\n }\n\n\n\n let tests = vec![\n\n Test {\n\n name:\"filter-alias-push-down\",\n\n query: \"select (number+1) as c1, number as c2 from numbers_mt(10000) where (c1+c2+1)=1\",\n\n expect: \"\\\n\n Projection: (number + 1) as c1:UInt64, number as c2:UInt64\\\n\n \\n Filter: ((((number + 1) + number) + 1) = 1)\\\n", "file_path": "fusequery/query/src/optimizers/optimizer_alias_push_down_test.rs", "rank": 19, "score": 173855.69166836172 }, { "content": "fn json_body() -> impl Filter<Extract = (ClusterNodeRequest,), Error = warp::Rejection> + Clone {\n\n // When accepting a body, we want a JSON body\n\n // (and to reject huge payloads)...\n\n warp::body::content_length_limit(1024 * 16).and(warp::body::json())\n\n}\n\n\n\nmod handlers {\n\n use log::info;\n\n\n\n use crate::api::http::v1::cluster::ClusterNodeRequest;\n\n use crate::clusters::ClusterRef;\n\n use crate::clusters::Node;\n\n\n\n pub async fn list_node(\n\n cluster: ClusterRef\n\n ) -> Result<impl warp::Reply, std::convert::Infallible> {\n\n // TODO(BohuTANG): error handler\n\n let nodes = cluster.get_nodes().unwrap();\n\n Ok(warp::reply::json(&nodes))\n\n }\n", "file_path": "fusequery/query/src/api/http/v1/cluster.rs", "rank": 20, "score": 173672.8745966183 }, { "content": "#[test]\n\nfn test_mem_engine_create_get_table() -> anyhow::Result<()> {\n\n // TODO check generated ver\n\n let eng = MemEngine::create();\n\n\n\n let mut eng = eng.lock().unwrap();\n\n\n\n let cmdfoo = CmdCreateDatabase {\n\n db_name: \"foo\".into(),\n\n db: Some(Db {\n\n db_id: -1,\n\n ver: -1,\n\n table_name_to_id: HashMap::new(),\n\n tables: HashMap::new()\n\n })\n\n };\n\n\n\n let cmd_table = CmdCreateTable {\n\n db_name: \"foo\".into(),\n\n table_name: \"t1\".into(),\n\n table: Some(Table {\n", "file_path": "fusestore/store/src/engine/mem_engine_test.rs", "rank": 21, "score": 173520.63408099258 }, { "content": "// Recursively walk an expression tree, collecting the unique set of column names\n\n// referenced in the expression\n\nfn expr_to_column_names(expr: &ExpressionPlan, accum: &mut HashSet<String>) -> Result<()> {\n\n let expressions = PlanRewriter::expression_plan_children(expr)?;\n\n\n\n let _expressions = expressions\n\n .iter()\n\n .map(|e| expr_to_column_names(e, accum))\n\n .collect::<Result<Vec<_>>>()?;\n\n\n\n if let ExpressionPlan::Column(name) = expr {\n\n accum.insert(name.clone());\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "fusequery/query/src/optimizers/optimizer_projection_push_down.rs", "rank": 22, "score": 152381.86394783342 }, { "content": "// Recursively walk a list of expression trees, collecting the unique set of column\n\n// names referenced in the expression\n\nfn exprvec_to_column_names(expr: &[ExpressionPlan], accum: &mut HashSet<String>) -> Result<()> {\n\n for e in expr {\n\n expr_to_column_names(e, accum)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "fusequery/query/src/optimizers/optimizer_projection_push_down.rs", "rank": 23, "score": 152381.8287282979 }, { "content": "fn commit_hash() -> Option<String> {\n\n Command::new(\"git\")\n\n .args(&[\"rev-parse\", \"--short\", \"HEAD\"])\n\n .output()\n\n .ok()\n\n .and_then(|r| String::from_utf8(r.stdout).ok())\n\n}\n\n\n", "file_path": "fusequery/query/build.rs", "rank": 24, "score": 148506.8097613834 }, { "content": "fn commit_hash() -> Option<String> {\n\n Command::new(\"git\")\n\n .args(&[\"rev-parse\", \"--short\", \"HEAD\"])\n\n .output()\n\n .ok()\n\n .and_then(|r| String::from_utf8(r.stdout).ok())\n\n}\n\n\n", "file_path": "fusestore/store/build.rs", "rank": 25, "score": 148506.8097613834 }, { "content": "fn commit_date() -> Option<String> {\n\n Command::new(\"git\")\n\n .args(&[\"log\", \"-1\", \"--date=short\", \"--pretty=format:%cd\"])\n\n .output()\n\n .ok()\n\n .and_then(|r| String::from_utf8(r.stdout).ok())\n\n}\n\n\n", "file_path": "fusestore/store/build.rs", "rank": 26, "score": 148506.8097613834 }, { "content": "fn commit_date() -> Option<String> {\n\n Command::new(\"git\")\n\n .args(&[\"log\", \"-1\", \"--date=short\", \"--pretty=format:%cd\"])\n\n .output()\n\n .ok()\n\n .and_then(|r| String::from_utf8(r.stdout).ok())\n\n}\n", "file_path": "fusequery/query/build.rs", "rank": 27, "score": 148506.8097613834 }, { "content": "#[test]\n\nfn test_cluster() -> Result<()> {\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::clusters::cluster::Cluster;\n\n use crate::clusters::node::Node;\n\n\n\n let cluster = Cluster::empty();\n\n\n\n let node1 = Node {\n\n name: \"node1\".to_string(),\n\n priority: 5,\n\n address: \"127.0.0.1:9001\".to_string(),\n\n local: false\n\n };\n\n cluster.add_node(&node1)?;\n\n\n\n let node2 = Node {\n\n name: \"node2\".to_string(),\n\n priority: 5,\n\n address: \"127.0.0.1:9002\".to_string(),\n", "file_path": "fusequery/query/src/clusters/cluster_test.rs", "rank": 28, "score": 146902.8118681729 }, { "content": "#[test]\n\nfn test_cast_function() -> Result<()> {\n\n use std::sync::Arc;\n\n\n\n use common_datablocks::*;\n\n use common_datavalues::*;\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::*;\n\n\n\n #[allow(dead_code)]\n\n struct Test {\n\n name: &'static str,\n\n display: &'static str,\n\n nullable: bool,\n\n block: DataBlock,\n\n expect: DataArrayRef,\n\n error: &'static str,\n\n func: Box<dyn IFunction>\n\n }\n\n\n", "file_path": "common/functions/src/expressions/cast_test.rs", "rank": 29, "score": 145237.41113756612 }, { "content": "#[test]\n\nfn test_aggregator_function() -> Result<()> {\n\n use std::sync::Arc;\n\n\n\n use common_datablocks::DataBlock;\n\n use common_datavalues::*;\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::aggregators::*;\n\n use crate::arithmetics::*;\n\n use crate::*;\n\n\n\n #[allow(dead_code)]\n\n struct Test {\n\n name: &'static str,\n\n eval_nums: usize,\n\n args: Vec<Box<dyn IFunction>>,\n\n display: &'static str,\n\n nullable: bool,\n\n block: DataBlock,\n\n expect: DataValue,\n", "file_path": "common/functions/src/aggregators/aggregator_test.rs", "rank": 30, "score": 145237.41113756612 }, { "content": "#[test]\n\nfn test_comparison_function() -> Result<()> {\n\n use std::sync::Arc;\n\n\n\n use common_datablocks::*;\n\n use common_datavalues::*;\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::comparisons::*;\n\n use crate::*;\n\n\n\n #[allow(dead_code)]\n\n struct Test {\n\n name: &'static str,\n\n display: &'static str,\n\n nullable: bool,\n\n block: DataBlock,\n\n expect: DataArrayRef,\n\n error: &'static str,\n\n func: Box<dyn IFunction>\n\n }\n", "file_path": "common/functions/src/comparisons/comparison_test.rs", "rank": 31, "score": 145237.41113756612 }, { "content": "#[test]\n\nfn test_arithmetic_function() -> Result<()> {\n\n use std::sync::Arc;\n\n\n\n use common_datablocks::DataBlock;\n\n use common_datavalues::*;\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::arithmetics::*;\n\n use crate::*;\n\n\n\n #[allow(dead_code)]\n\n struct Test {\n\n name: &'static str,\n\n display: &'static str,\n\n nullable: bool,\n\n block: DataBlock,\n\n expect: DataArrayRef,\n\n error: &'static str,\n\n func: Box<dyn IFunction>\n\n }\n", "file_path": "common/functions/src/arithmetics/arithmetic_test.rs", "rank": 32, "score": 145237.41113756612 }, { "content": "#[test]\n\nfn test_util_generate_parts() -> Result<()> {\n\n use common_planners::Partition;\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::datasources::Common;\n\n\n\n {\n\n // deal with remainder\n\n let ps = Common::generate_parts(0, 3, 11);\n\n\n\n assert_eq!(3, ps.len());\n\n assert_eq!(\n\n Partition {\n\n name: \"11-0-3\".into(),\n\n version: 0\n\n },\n\n ps[0]\n\n );\n\n assert_eq!(\n\n Partition {\n", "file_path": "fusequery/query/src/datasources/common_test.rs", "rank": 33, "score": 143633.43672939812 }, { "content": "#[test]\n\nfn test_progress() -> anyhow::Result<()> {\n\n use crate::*;\n\n\n\n let mut progress = Progress::create();\n\n progress.add_rows(2);\n\n progress.add_bytes(10);\n\n\n\n assert_eq!(2, progress.get_values().read_rows);\n\n assert_eq!(10, progress.get_values().read_bytes);\n\n progress.reset();\n\n\n\n assert_eq!(0, progress.get_values().read_rows);\n\n assert_eq!(0, progress.get_values().read_bytes);\n\n\n\n Ok(())\n\n}\n", "file_path": "common/progress/src/progress_test.rs", "rank": 34, "score": 142949.42005056184 }, { "content": "#[test]\n\nfn test_having_plan() -> anyhow::Result<()> {\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::*;\n\n\n\n let source = Test::create().generate_source_plan_for_test(10000)?;\n\n let plan = PlanBuilder::from(&source)\n\n .having(col(\"number\").eq(lit(1i64)))?\n\n .project(vec![col(\"number\")])?\n\n .build()?;\n\n\n\n let expect = \"\\\n\n Projection: number:UInt64\\\n\n \\n Having: (number = 1)\\\n\n \\n ReadDataSource: scan partitions: [8], scan schema: [number:UInt64], statistics: [read_rows: 10000, read_bytes: 80000]\";\n\n let actual = format!(\"{:?}\", plan);\n\n\n\n assert_eq!(expect, actual);\n\n Ok(())\n\n}\n", "file_path": "common/planners/src/plan_having_test.rs", "rank": 35, "score": 142949.42005056184 }, { "content": "pub fn flight_result_to_str(r: &arrow_flight::Result) -> String {\n\n match std::str::from_utf8(&r.body) {\n\n Ok(v) => v.to_string(),\n\n Err(_e) => format!(\"{:?}\", r.body)\n\n }\n\n}\n\n\n", "file_path": "common/flights/src/common.rs", "rank": 36, "score": 140311.86581070907 }, { "content": "#[test]\n\nfn test_scan_plan() -> anyhow::Result<()> {\n\n use std::sync::Arc;\n\n\n\n use common_datavalues::*;\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::*;\n\n\n\n let scan = PlanNode::Scan(ScanPlan {\n\n schema_name: \"scan_test\".to_string(),\n\n table_schema: Arc::new(DataSchema::new(vec![DataField::new(\n\n \"a\",\n\n DataType::Utf8,\n\n false\n\n )])),\n\n table_args: None,\n\n projection: None,\n\n projected_schema: Arc::new(DataSchema::new(vec![DataField::new(\n\n \"a\",\n\n DataType::Utf8,\n", "file_path": "common/planners/src/plan_scan_test.rs", "rank": 37, "score": 139680.0449117871 }, { "content": "#[test]\n\nfn test_database_function() -> anyhow::Result<()> {\n\n use std::sync::Arc;\n\n\n\n use common_datablocks::*;\n\n use common_datavalues::*;\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::udfs::*;\n\n use crate::*;\n\n\n\n #[allow(dead_code)]\n\n struct Test {\n\n name: &'static str,\n\n display: &'static str,\n\n nullable: bool,\n\n block: DataBlock,\n\n expect: DataArrayRef,\n\n error: &'static str,\n\n func: Box<dyn IFunction>\n\n }\n", "file_path": "common/functions/src/udfs/database_test.rs", "rank": 38, "score": 139680.0449117871 }, { "content": "#[test]\n\nfn test_lines_count() -> anyhow::Result<()> {\n\n use std::env;\n\n\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::datasources::Common;\n\n\n\n let file = env::current_dir()?\n\n .join(\"../../tests/data/sample.csv\")\n\n .display()\n\n .to_string();\n\n\n\n let lines = Common::count_lines(std::fs::File::open(file.as_str())?)?;\n\n assert_eq!(6, lines);\n\n Ok(())\n\n}\n", "file_path": "fusequery/query/src/datasources/common_test.rs", "rank": 39, "score": 139680.0449117871 }, { "content": "#[test]\n\nfn test_stage_plan() -> anyhow::Result<()> {\n\n use std::sync::Arc;\n\n\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::*;\n\n\n\n let source = Test::create().generate_source_plan_for_test(10000)?;\n\n let plan = PlanBuilder::from(&source)\n\n .aggregate_partial(vec![sum(col(\"number\")).alias(\"sumx\")], vec![])?\n\n .stage(\"uuid-xx\".to_string(), StageState::AggregatorMerge)?\n\n .aggregate_final(vec![sum(col(\"number\")).alias(\"sumx\")], vec![])?\n\n .project(vec![col(\"sumx\")])?\n\n .build()?;\n\n let explain = PlanNode::Explain(ExplainPlan {\n\n typ: ExplainType::Syntax,\n\n input: Arc::new(plan)\n\n });\n\n let expect = \"Projection: sumx:UInt64\\\n\n \\n AggregatorFinal: groupBy=[[]], aggr=[[sum([number]) as sumx]]\\\n\n \\n RedistributeStage[state: AggregatorMerge, id: 0]\\\n\n \\n AggregatorPartial: groupBy=[[]], aggr=[[sum([number]) as sumx]]\\\n\n \\n ReadDataSource: scan partitions: [8], scan schema: [number:UInt64], statistics: [read_rows: 10000, read_bytes: 80000]\";\n\n let actual = format!(\"{:?}\", explain);\n\n assert_eq!(expect, actual);\n\n Ok(())\n\n}\n", "file_path": "common/planners/src/plan_stage_test.rs", "rank": 40, "score": 139680.0449117871 }, { "content": "#[test]\n\nfn test_column_function() -> anyhow::Result<()> {\n\n use std::sync::Arc;\n\n\n\n use common_datablocks::*;\n\n use common_datavalues::*;\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::*;\n\n\n\n let schema = Arc::new(DataSchema::new(vec![DataField::new(\n\n \"a\",\n\n DataType::Boolean,\n\n false\n\n )]));\n\n let block = DataBlock::create(schema.clone(), vec![Arc::new(BooleanArray::from(vec![\n\n true, true, true, false,\n\n ]))]);\n\n\n\n // Ok.\n\n {\n", "file_path": "common/functions/src/function_column_test.rs", "rank": 41, "score": 139680.0449117871 }, { "content": "#[test]\n\nfn test_logic_function() -> anyhow::Result<()> {\n\n use std::sync::Arc;\n\n\n\n use common_datablocks::*;\n\n use common_datavalues::*;\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::logics::*;\n\n use crate::*;\n\n\n\n #[allow(dead_code)]\n\n struct Test {\n\n name: &'static str,\n\n func_name: &'static str,\n\n display: &'static str,\n\n nullable: bool,\n\n block: DataBlock,\n\n expect: DataArrayRef,\n\n error: &'static str,\n\n func: Box<dyn IFunction>\n", "file_path": "common/functions/src/logics/logic_test.rs", "rank": 42, "score": 139680.0449117871 }, { "content": "#[test]\n\nfn test_aggregator_plan() -> anyhow::Result<()> {\n\n use std::sync::Arc;\n\n\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::*;\n\n\n\n let source = Test::create().generate_source_plan_for_test(10000)?;\n\n let plan = PlanBuilder::from(&source)\n\n .aggregate_partial(vec![sum(col(\"number\")).alias(\"sumx\")], vec![])?\n\n .aggregate_final(vec![sum(col(\"number\")).alias(\"sumx\")], vec![])?\n\n .project(vec![col(\"sumx\")])?\n\n .build()?;\n\n let explain = PlanNode::Explain(ExplainPlan {\n\n typ: ExplainType::Syntax,\n\n input: Arc::new(plan)\n\n });\n\n let expect = \"Projection: sumx:UInt64\\\n\n \\n AggregatorFinal: groupBy=[[]], aggr=[[sum([number]) as sumx]]\\\n\n \\n AggregatorPartial: groupBy=[[]], aggr=[[sum([number]) as sumx]]\\\n\n \\n ReadDataSource: scan partitions: [8], scan schema: [number:UInt64], statistics: [read_rows: 10000, read_bytes: 80000]\";\n\n let actual = format!(\"{:?}\", explain);\n\n assert_eq!(expect, actual);\n\n Ok(())\n\n}\n", "file_path": "common/planners/src/plan_aggregator_test.rs", "rank": 43, "score": 139680.0449117871 }, { "content": "#[test]\n\nfn test_expression_plan() -> anyhow::Result<()> {\n\n use std::sync::Arc;\n\n\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::*;\n\n\n\n let source = Test::create().generate_source_plan_for_test(10000)?;\n\n let plan = PlanBuilder::from(&source)\n\n .filter(\n\n add(col(\"number\"), lit(1))\n\n .eq(lit(4))\n\n .and(col(\"number\").not_eq(lit(4)))\n\n .and(col(\"number\").lt(lit(4)))\n\n .and(col(\"number\").lt_eq(lit(4)))\n\n .and(col(\"number\").gt(lit(4)))\n\n .and(col(\"number\").gt_eq(lit(4)))\n\n )?\n\n .build()?;\n\n let explain = PlanNode::Explain(ExplainPlan {\n\n typ: ExplainType::Syntax,\n\n input: Arc::new(plan)\n\n });\n\n let expect =\"Filter: (((((((number + 1) = 4) and (number != 4)) and (number < 4)) and (number <= 4)) and (number > 4)) and (number >= 4))\\\n\n \\n ReadDataSource: scan partitions: [8], scan schema: [number:UInt64], statistics: [read_rows: 10000, read_bytes: 80000]\";\n\n let actual = format!(\"{:?}\", explain);\n\n assert_eq!(expect, actual);\n\n Ok(())\n\n}\n", "file_path": "common/planners/src/plan_expression_test.rs", "rank": 44, "score": 139680.0449117871 }, { "content": "#[test]\n\nfn test_explain_plan() -> anyhow::Result<()> {\n\n use std::sync::Arc;\n\n\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::*;\n\n\n\n let source = Test::create().generate_source_plan_for_test(10000)?;\n\n let plan = PlanBuilder::from(&source)\n\n .project(vec![col(\"number\").alias(\"c1\"), col(\"number\").alias(\"c2\")])?\n\n .filter(add(col(\"number\"), lit(1)).eq(lit(4)))?\n\n .having(add(col(\"number\"), lit(1)).eq(lit(4)))?\n\n .build()?;\n\n let explain = PlanNode::Explain(ExplainPlan {\n\n typ: ExplainType::Syntax,\n\n input: Arc::new(plan)\n\n });\n\n let expect =\"Having: ((number + 1) = 4)\\\n\n \\n Filter: ((number + 1) = 4)\\\n\n \\n Projection: number as c1:UInt64, number as c2:UInt64\\\n\n \\n ReadDataSource: scan partitions: [8], scan schema: [number:UInt64], statistics: [read_rows: 10000, read_bytes: 80000]\";\n\n let actual = format!(\"{:?}\", explain);\n\n assert_eq!(expect, actual);\n\n Ok(())\n\n}\n", "file_path": "common/planners/src/plan_explain_test.rs", "rank": 45, "score": 139680.0449117871 }, { "content": "#[test]\n\nfn test_config() -> common_exception::Result<()> {\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::configs::Config;\n\n\n\n // Default.\n\n {\n\n let expect = Config {\n\n version: include_str!(concat!(env!(\"OUT_DIR\"), \"/version-info.txt\")).to_string(),\n\n log_level: \"debug\".to_string(),\n\n num_cpus: 8,\n\n mysql_handler_host: \"127.0.0.1\".to_string(),\n\n mysql_handler_port: 3307,\n\n mysql_handler_thread_num: 256,\n\n clickhouse_handler_host: \"127.0.0.1\".to_string(),\n\n clickhouse_handler_port: 9000,\n\n clickhouse_handler_thread_num: 256,\n\n flight_api_address: \"127.0.0.1:9090\".to_string(),\n\n http_api_address: \"127.0.0.1:8080\".to_string(),\n\n metric_api_address: \"127.0.0.1:7070\".to_string(),\n", "file_path": "fusequery/query/src/configs/config_test.rs", "rank": 46, "score": 139680.0449117871 }, { "content": "#[test]\n\nfn test_select_wildcard_plan() -> anyhow::Result<()> {\n\n use std::sync::Arc;\n\n\n\n use common_datavalues::*;\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::*;\n\n\n\n let schema = Arc::new(DataSchema::new(vec![DataField::new(\n\n \"a\",\n\n DataType::Utf8,\n\n false\n\n )]));\n\n let plan = PlanBuilder::create(schema)\n\n .project(vec![col(\"a\")])?\n\n .build()?;\n\n let select = PlanNode::Select(SelectPlan {\n\n input: Arc::new(plan)\n\n });\n\n let expect = \"Projection: a:Utf8\";\n\n let actual = format!(\"{:?}\", select);\n\n assert_eq!(expect, actual);\n\n Ok(())\n\n}\n", "file_path": "common/planners/src/plan_select_test.rs", "rank": 47, "score": 138134.15991002892 }, { "content": "#[test]\n\nfn test_rewrite_expressions_plan() -> anyhow::Result<()> {\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::*;\n\n let source = Test::create().generate_source_plan_for_test(10000)?;\n\n let plan = PlanBuilder::from(&source)\n\n .project(vec![col(\"number\").alias(\"x\"), col(\"number\").alias(\"y\")])?\n\n .filter(col(\"x\").eq(lit(1i64)))?\n\n .build()?;\n\n\n\n let actual = PlanRewriter::projection_to_map(&plan)?;\n\n let mut expect = HashMap::new();\n\n expect.insert(\"x\".to_string(), col(\"number\"));\n\n expect.insert(\"y\".to_string(), col(\"number\"));\n\n assert_eq!(expect, actual);\n\n\n\n let exprs = vec![ExpressionPlan::Function {\n\n op: \"multiply\".to_string(),\n\n args: vec![col(\"x\"), col(\"y\")]\n\n }];\n", "file_path": "common/planners/src/plan_rewriter_test.rs", "rank": 48, "score": 138134.15991002892 }, { "content": "#[test]\n\nfn test_plan_parser() -> anyhow::Result<()> {\n\n #[allow(dead_code)]\n\n struct Test {\n\n name: &'static str,\n\n sql: &'static str,\n\n expect: &'static str,\n\n error: &'static str\n\n }\n\n\n\n let tests = vec![\n\n Test {\n\n name: \"cast-passed\",\n\n sql: \"select cast('1' as int)\",\n\n expect: \"Projection: cast(1 as Int32):Int32\\n ReadDataSource: scan partitions: [1], scan schema: [dummy:UInt8], statistics: [read_rows: 0, read_bytes: 0]\",\n\n error: \"\",\n\n },\n\n Test {\n\n name: \"database-passed\",\n\n sql: \"select database()\",\n\n expect: \"Projection: database([default]):Utf8\\n ReadDataSource: scan partitions: [1], scan schema: [dummy:UInt8], statistics: [read_rows: 0, read_bytes: 0]\",\n", "file_path": "fusequery/query/src/sql/plan_parser_test.rs", "rank": 49, "score": 138134.15991002892 }, { "content": "#[test]\n\nfn test_indices_other() -> anyhow::Result<()> {\n\n use std::sync::Arc;\n\n\n\n use common_arrow::arrow::compute::SortOptions;\n\n\n\n use crate::*;\n\n\n\n let a = Arc::new(UInt32Array::from(vec![None, Some(1), Some(2), Some(4)]));\n\n let b = Arc::new(UInt32Array::from(vec![None, Some(3)]));\n\n let c = DataArrayMerge::merge_indices(&[a], &[b], &[SortOptions::default()], None)?;\n\n\n\n // [0] false: when equal (None = None), rhs is picked\n\n // [1] true: None < 3\n\n // [2] true: 1 < 3\n\n // [3] true: 2 < 3\n\n // [4] false: 3 < 4\n\n // [5] true: rhs has finished => pick lhs\n\n assert_eq!(c, vec![false, true, true, true, false, true]);\n\n Ok(())\n\n}\n\n\n", "file_path": "common/datavalues/src/data_array_merge_sort_test.rs", "rank": 50, "score": 138134.15991002892 }, { "content": "#[test]\n\nfn test_to_type_name_function() -> anyhow::Result<()> {\n\n use std::sync::Arc;\n\n\n\n use common_datablocks::*;\n\n use common_datavalues::*;\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::udfs::*;\n\n use crate::*;\n\n\n\n #[allow(dead_code)]\n\n struct Test {\n\n name: &'static str,\n\n display: &'static str,\n\n nullable: bool,\n\n block: DataBlock,\n\n expect: DataArrayRef,\n\n error: &'static str,\n\n func: Box<dyn IFunction>\n\n }\n", "file_path": "common/functions/src/udfs/to_type_name_test.rs", "rank": 51, "score": 136643.26479877759 }, { "content": "#[test]\n\nfn test_merge_array() -> anyhow::Result<()> {\n\n use std::sync::Arc;\n\n\n\n use common_arrow::arrow::compute::SortOptions;\n\n\n\n use crate::*;\n\n\n\n let a1: DataArrayRef = Arc::new(UInt32Array::from(vec![Some(1), Some(3), Some(5)]));\n\n let b1: DataArrayRef = Arc::new(UInt32Array::from(vec![Some(2), Some(4), Some(6)]));\n\n\n\n let option1 = SortOptions {\n\n descending: false,\n\n nulls_first: true\n\n };\n\n\n\n let a2: DataArrayRef = Arc::new(UInt32Array::from(vec![Some(1), Some(3), Some(5)]));\n\n let b2: DataArrayRef = Arc::new(UInt32Array::from(vec![Some(2), Some(4), Some(6)]));\n\n let option2 = SortOptions {\n\n descending: false,\n\n nulls_first: true\n", "file_path": "common/datavalues/src/data_array_merge_sort_test.rs", "rank": 52, "score": 136643.26479877759 }, { "content": "#[test]\n\nfn test_indices_many() -> anyhow::Result<()> {\n\n use std::sync::Arc;\n\n\n\n use common_arrow::arrow::compute::SortOptions;\n\n\n\n use crate::*;\n\n\n\n let a1 = Arc::new(UInt32Array::from(vec![None, Some(1), Some(3)]));\n\n let b1 = Arc::new(UInt32Array::from(vec![None, Some(2), Some(3), Some(5)]));\n\n let option1 = SortOptions {\n\n descending: false,\n\n nulls_first: true\n\n };\n\n\n\n let a2 = Arc::new(UInt32Array::from(vec![Some(2), Some(3), Some(5)]));\n\n let b2 = Arc::new(UInt32Array::from(vec![Some(1), Some(4), Some(6), Some(6)]));\n\n let option2 = SortOptions {\n\n descending: true,\n\n nulls_first: true\n\n };\n", "file_path": "common/datavalues/src/data_array_merge_sort_test.rs", "rank": 53, "score": 136643.26479877759 }, { "content": "#[test]\n\nfn test_udf_example_function() -> anyhow::Result<()> {\n\n use std::sync::Arc;\n\n\n\n use common_datablocks::*;\n\n use common_datavalues::*;\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::udfs::*;\n\n use crate::*;\n\n\n\n #[allow(dead_code)]\n\n struct Test {\n\n name: &'static str,\n\n display: &'static str,\n\n nullable: bool,\n\n block: DataBlock,\n\n expect: DataArrayRef,\n\n error: &'static str,\n\n func: Box<dyn IFunction>\n\n }\n", "file_path": "common/functions/src/udfs/udf_example_test.rs", "rank": 54, "score": 136643.26479877759 }, { "content": "#[test]\n\nfn test_merge_array2() -> anyhow::Result<()> {\n\n use std::sync::Arc;\n\n\n\n use common_arrow::arrow::compute::SortOptions;\n\n\n\n use crate::*;\n\n\n\n let a1: DataArrayRef = Arc::new(UInt32Array::from(\n\n (1..500)\n\n .map(|s| Some(s as u32))\n\n .collect::<Vec<Option<u32>>>()\n\n ));\n\n let b1: DataArrayRef = Arc::new(UInt32Array::from(\n\n (500..1000)\n\n .map(|s| Some(s as u32))\n\n .collect::<Vec<Option<u32>>>()\n\n ));\n\n\n\n let option1 = SortOptions {\n\n descending: false,\n", "file_path": "common/datavalues/src/data_array_merge_sort_test.rs", "rank": 55, "score": 136643.26479877759 }, { "content": "#[test]\n\nfn test_plan_walker() -> std::result::Result<(), Box<dyn std::error::Error>> {\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::*;\n\n\n\n let source = Test::create().generate_source_plan_for_test(10000)?;\n\n let plan = PlanBuilder::from(&source)\n\n .aggregate_partial(vec![sum(col(\"number\")).alias(\"sumx\")], vec![])?\n\n .aggregate_final(vec![sum(col(\"number\")).alias(\"sumx\")], vec![])?\n\n .project(vec![col(\"sumx\")])?\n\n .build()?;\n\n\n\n // PreOrder.\n\n {\n\n let mut actual: Vec<String> = vec![];\n\n plan.input().as_ref().walk_preorder(\n\n |plan| -> Result<bool, Box<dyn std::error::Error>> {\n\n actual.push(plan.name().to_string());\n\n return Ok(true);\n\n }\n", "file_path": "common/planners/src/plan_walker_test.rs", "rank": 56, "score": 136108.23405197597 }, { "content": "#[test]\n\nfn test_scheduler_plan_with_one_node() -> anyhow::Result<()> {\n\n use common_planners::*;\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::planners::PlanScheduler;\n\n\n\n let ctx = crate::tests::try_create_context()?;\n\n\n\n let test_source = crate::tests::NumberTestData::create(ctx.clone());\n\n let source = test_source.number_read_source_plan_for_test(100000)?;\n\n\n\n let plan = PlanBuilder::from(&PlanNode::ReadSource(source))\n\n .filter(col(\"number\").eq(lit(1i64)))?\n\n .project(vec![col(\"number\")])?\n\n .build()?;\n\n\n\n let plans = PlanScheduler::reschedule(ctx, &plan)?;\n\n assert_eq!(0, plans.len());\n\n\n\n let expects = vec![\"Projection: number:UInt64\n\n Filter: (number = 1)\n\n ReadDataSource: scan partitions: [8], scan schema: [number:UInt64], statistics: [read_rows: 100000, read_bytes: 800000]\"];\n\n\n\n for (i, (_, plan)) in plans.iter().enumerate() {\n\n let actual = format!(\"{:?}\", plan);\n\n assert_eq!(expects[i], actual);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "fusequery/query/src/planners/plan_scheduler_test.rs", "rank": 57, "score": 135204.4767193215 }, { "content": "#[test]\n\nfn test_scheduler_plan_with_more_cpus_1_node() -> anyhow::Result<()> {\n\n use common_planners::*;\n\n use pretty_assertions::assert_eq;\n\n\n\n use crate::planners::PlanScheduler;\n\n\n\n let ctx = crate::tests::try_create_context()?;\n\n let cpus = ctx.get_max_threads()?;\n\n\n\n // For more partitions generation.\n\n let ctx_more_cpu = crate::tests::try_create_context()?;\n\n ctx_more_cpu.set_max_threads(cpus * 40)?;\n\n let test_source = crate::tests::NumberTestData::create(ctx_more_cpu.clone());\n\n let source = test_source.number_read_source_plan_for_test(100000)?;\n\n\n\n let plan = PlanBuilder::from(&PlanNode::ReadSource(source))\n\n .filter(col(\"number\").eq(lit(1i64)))?\n\n .project(vec![col(\"number\")])?\n\n .build()?;\n\n\n", "file_path": "fusequery/query/src/planners/plan_scheduler_test.rs", "rank": 58, "score": 135204.4767193215 }, { "content": "#[test]\n\nfn test_mem_engine_create_database() -> anyhow::Result<()> {\n\n // TODO check generated ver\n\n let eng = MemEngine::create();\n\n\n\n let mut eng = eng.lock().unwrap();\n\n\n\n let cmdfoo = CmdCreateDatabase {\n\n db_name: \"foo\".into(),\n\n db: Some(Db {\n\n db_id: -1,\n\n ver: -1,\n\n table_name_to_id: HashMap::new(),\n\n tables: HashMap::new()\n\n })\n\n };\n\n let cmdbar = CmdCreateDatabase {\n\n db_name: \"bar\".into(),\n\n db: Some(Db {\n\n db_id: -1,\n\n ver: -1,\n", "file_path": "fusestore/store/src/engine/mem_engine_test.rs", "rank": 59, "score": 135204.4767193215 }, { "content": "#[test]\n\nfn test_data_value_kernel_concat_row_key() -> anyhow::Result<()> {\n\n use std::sync::Arc;\n\n\n\n use pretty_assertions::assert_eq;\n\n\n\n use super::*;\n\n\n\n #[allow(dead_code)]\n\n struct ArrayTest {\n\n name: &'static str,\n\n args: Vec<DataArrayRef>,\n\n expect: Vec<&'static str>,\n\n error: Vec<&'static str>\n\n }\n\n\n\n let tests = vec![ArrayTest {\n\n name: \"passed\",\n\n args: vec![\n\n Arc::new(StringArray::from(vec![\"x1\", \"x2\"])),\n\n Arc::new(Int8Array::from(vec![1, 2])),\n", "file_path": "common/datavalues/src/data_value_kernel_test.rs", "rank": 60, "score": 132472.66375952217 }, { "content": "pub fn try_create_context() -> Result<FuseQueryContextRef> {\n\n let ctx = FuseQueryContext::try_create()?;\n\n\n\n ctx.set_max_threads(8)?;\n\n Ok(ctx)\n\n}\n", "file_path": "fusequery/query/src/tests/context.rs", "rank": 61, "score": 131753.5028309214 }, { "content": "fn get_projected_schema(\n\n schema: &DataSchema,\n\n required_columns: &HashSet<String>,\n\n has_projection: bool\n\n) -> Result<DataSchemaRef> {\n\n // Discard non-existing columns, e.g. when the column derives from aggregation\n\n let mut projection: Vec<usize> = required_columns\n\n .iter()\n\n .map(|name| schema.index_of(name))\n\n .filter_map(ArrowResult::ok)\n\n .collect();\n\n if projection.is_empty() {\n\n if has_projection {\n\n // Ensure reading at lease one column\n\n projection.push(0);\n\n } else {\n\n // for table scan without projection\n\n // just return all columns\n\n projection = schema\n\n .fields()\n", "file_path": "fusequery/query/src/optimizers/optimizer_projection_push_down.rs", "rank": 62, "score": 128153.1971408524 }, { "content": "pub fn numeric_byte_size(dt: &DataType) -> Result<usize> {\n\n match dt {\n\n DataType::Int8 | DataType::UInt8 => Ok(1),\n\n DataType::Int16 | DataType::UInt16 | DataType::Float16 => Ok(2),\n\n DataType::Int32 | DataType::UInt32 | DataType::Float32 => Ok(4),\n\n DataType::Int64 | DataType::UInt64 | DataType::Float64 => Ok(8),\n\n _ => Result::Err(ErrorCodes::BadArguments(\n\n \"Function number_byte_size argument must be numeric types\".to_string()\n\n ))\n\n }\n\n}\n\n\n", "file_path": "common/datavalues/src/data_type.rs", "rank": 63, "score": 126018.90594203169 }, { "content": "fn optimize_plan(\n\n optimizer: &ProjectionPushDownOptimizer,\n\n plan: &PlanNode,\n\n required_columns: &HashSet<String>,\n\n has_projection: bool\n\n) -> Result<PlanNode> {\n\n let mut new_required_columns = required_columns.clone();\n\n match plan {\n\n PlanNode::Projection(ProjectionPlan {\n\n expr,\n\n schema: _,\n\n input\n\n }) => {\n\n exprvec_to_column_names(expr, &mut new_required_columns)?;\n\n let new_input = optimize_plan(optimizer, &input, &new_required_columns, true)?;\n\n let mut cloned_plan = plan.clone();\n\n cloned_plan.set_input(&new_input)?;\n\n Ok(cloned_plan)\n\n }\n\n PlanNode::Filter(FilterPlan { predicate, input }) => {\n", "file_path": "fusequery/query/src/optimizers/optimizer_projection_push_down.rs", "rank": 64, "score": 118737.86356152158 }, { "content": "fn read_file(\n\n file: &str,\n\n tx: Sender<Option<Result<DataBlock>>>,\n\n projection: &[usize]\n\n) -> Result<()> {\n\n let file_reader = File::open(file).map_err(|e| ErrorCodes::CannotReadFile(e.to_string()))?;\n\n let file_reader = SerializedFileReader::new(file_reader)\n\n .map_err(|e| ErrorCodes::ParquetError(e.to_string()))?;\n\n let mut arrow_reader = ParquetFileArrowReader::new(Arc::new(file_reader));\n\n\n\n // TODO projection, row filters, batch size configurable, schema judgement\n\n let batch_size = 2048;\n\n let mut batch_reader = arrow_reader\n\n .get_record_reader_by_columns(projection.to_owned(), batch_size)\n\n .map_err(|exception| ErrorCodes::ParquetError(exception.to_string()))?;\n\n\n\n loop {\n\n match batch_reader.next() {\n\n Some(Ok(batch)) => {\n\n tx.send(Some(Ok(batch.try_into()?)))\n", "file_path": "fusequery/query/src/datasources/local/parquet_table.rs", "rank": 65, "score": 118407.33955164728 }, { "content": "// Table for <group_key, indices>\n\ntype GroupIndicesTable = HashMap<Vec<u8>, Vec<u32>, ahash::RandomState>;\n", "file_path": "fusequery/query/src/pipelines/transforms/transform_groupby_partial.rs", "rank": 66, "score": 117503.76360869552 }, { "content": "/// Coercion rules for dictionary values (aka the type of the dictionary itself)\n\nfn dictionary_value_coercion(lhs_type: &DataType, rhs_type: &DataType) -> Result<DataType> {\n\n numerical_coercion(lhs_type, rhs_type).or_else(|_| string_coercion(lhs_type, rhs_type))\n\n}\n\n\n", "file_path": "common/datavalues/src/data_type.rs", "rank": 67, "score": 117110.41964984577 }, { "content": "fn criterion_benchmark_filter_query(c: &mut Criterion) {\n\n let queries = vec![\"SELECT number FROM numbers_mt(10000000) WHERE number>100 AND number<200\"];\n\n\n\n for query in queries {\n\n criterion_benchmark_suite(c, query);\n\n }\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark_filter_query);\n\ncriterion_main!(benches);\n", "file_path": "fusequery/query/benches/suites/bench_filter_query_sql.rs", "rank": 68, "score": 116999.69216053496 }, { "content": "fn criterion_benchmark_limit_query(c: &mut Criterion) {\n\n let queries = vec![\"SELECT number FROM numbers_mt(10000000) LIMIT 1\"];\n\n\n\n for query in queries {\n\n criterion_benchmark_suite(c, query);\n\n }\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark_limit_query);\n\ncriterion_main!(benches);\n", "file_path": "fusequery/query/benches/suites/bench_limit_query_sql.rs", "rank": 69, "score": 116990.5280049268 }, { "content": "pub fn data_array_cast(array: &ArrayRef, to_type: &DataType) -> Result<ArrayRef> {\n\n arrow::compute::cast(&array, &to_type).map_err(ErrorCodes::from_arrow)\n\n}\n", "file_path": "common/datavalues/src/data_array.rs", "rank": 70, "score": 116161.33710086945 }, { "content": "/// Coercion rules for Dictionaries: the type that both lhs and rhs\n\n/// can be casted to for the purpose of a computation.\n\n///\n\n/// It would likely be preferable to cast primitive values to\n\n/// dictionaries, and thus avoid unpacking dictionary as well as doing\n\n/// faster comparisons. However, the arrow compute kernels (e.g. eq)\n\n/// don't have DictionaryArray support yet, so fall back to unpacking\n\n/// the dictionaries\n\npub fn dictionary_coercion(lhs_type: &DataType, rhs_type: &DataType) -> Result<DataType> {\n\n match (lhs_type, rhs_type) {\n\n (\n\n DataType::Dictionary(_lhs_index_type, lhs_value_type),\n\n DataType::Dictionary(_rhs_index_type, rhs_value_type)\n\n ) => dictionary_value_coercion(lhs_value_type, rhs_value_type),\n\n (DataType::Dictionary(_index_type, value_type), _) => {\n\n dictionary_value_coercion(value_type, rhs_type)\n\n }\n\n (_, DataType::Dictionary(_index_type, value_type)) => {\n\n dictionary_value_coercion(lhs_type, value_type)\n\n }\n\n _ => Result::Err(ErrorCodes::BadDataValueType(format!(\n\n \"Can't construct type from {} and {}\",\n\n lhs_type, rhs_type\n\n )))\n\n }\n\n}\n\n\n", "file_path": "common/datavalues/src/data_type.rs", "rank": 71, "score": 114946.22156988672 }, { "content": "/// Coercion rule for numerical types: The type that both lhs and rhs\n\n/// can be casted to for numerical calculation, while maintaining\n\n/// maximum precision\n\npub fn numerical_coercion(lhs_type: &DataType, rhs_type: &DataType) -> Result<DataType> {\n\n let has_float = is_floating(lhs_type) || is_floating(rhs_type);\n\n let has_integer = is_integer(lhs_type) || is_integer(rhs_type);\n\n let has_signed = is_signed_numeric(lhs_type) || is_signed_numeric(rhs_type);\n\n let has_unsigned = !is_signed_numeric(lhs_type) || !is_signed_numeric(rhs_type);\n\n\n\n let size_of_lhs = numeric_byte_size(lhs_type)?;\n\n let size_of_rhs = numeric_byte_size(rhs_type)?;\n\n\n\n let max_size_of_unsigned_integer = cmp::max(\n\n if is_signed_numeric(lhs_type) {\n\n 0\n\n } else {\n\n size_of_lhs\n\n },\n\n if is_signed_numeric(rhs_type) {\n\n 0\n\n } else {\n\n size_of_rhs\n\n }\n", "file_path": "common/datavalues/src/data_type.rs", "rank": 72, "score": 114946.22156988672 }, { "content": "/// Coercion rules for Strings: the type that both lhs and rhs can be\n\n/// casted to for the purpose of a string computation\n\npub fn string_coercion(lhs_type: &DataType, rhs_type: &DataType) -> Result<DataType> {\n\n match (lhs_type, rhs_type) {\n\n (Utf8, Utf8) => Ok(Utf8),\n\n (LargeUtf8, Utf8) => Ok(LargeUtf8),\n\n (Utf8, LargeUtf8) => Ok(LargeUtf8),\n\n (LargeUtf8, LargeUtf8) => Ok(LargeUtf8),\n\n _ => Result::Err(ErrorCodes::BadDataValueType(format!(\n\n \"Can't construct type from {} and {}\",\n\n lhs_type, rhs_type\n\n )))\n\n }\n\n}\n\n\n", "file_path": "common/datavalues/src/data_type.rs", "rank": 73, "score": 114946.22156988672 }, { "content": "// coercion rules for equality operations. This is a superset of all numerical coercion rules.\n\npub fn equal_coercion(lhs_type: &DataType, rhs_type: &DataType) -> Result<DataType> {\n\n if lhs_type == rhs_type {\n\n // same type => equality is possible\n\n return Ok(lhs_type.clone());\n\n }\n\n\n\n numerical_coercion(lhs_type, rhs_type).or_else(|_| dictionary_coercion(lhs_type, rhs_type))\n\n}\n", "file_path": "common/datavalues/src/data_type.rs", "rank": 74, "score": 114946.22156988672 }, { "content": "// Table for <group_key, indices>\n\ntype GroupFuncTable = RwLock<HashMap<Vec<u8>, Vec<Box<dyn IFunction>>, ahash::RandomState>>;\n\n\n\npub struct GroupByFinalTransform {\n\n aggr_exprs: Vec<ExpressionPlan>,\n\n schema: DataSchemaRef,\n\n input: Arc<dyn IProcessor>,\n\n groups: GroupFuncTable\n\n}\n\n\n\nimpl GroupByFinalTransform {\n\n pub fn create(\n\n schema: DataSchemaRef,\n\n aggr_exprs: Vec<ExpressionPlan>,\n\n _group_exprs: Vec<ExpressionPlan>\n\n ) -> Self {\n\n Self {\n\n aggr_exprs,\n\n schema,\n\n input: Arc::new(EmptyProcessor::create()),\n\n groups: RwLock::new(HashMap::default())\n", "file_path": "fusequery/query/src/pipelines/transforms/transform_groupby_final.rs", "rank": 75, "score": 110265.44995959516 }, { "content": "type GroupFuncTable = RwLock<HashMap<Vec<u8>, Vec<Box<dyn IFunction>>, ahash::RandomState>>;\n\n\n\npub struct GroupByPartialTransform {\n\n aggr_exprs: Vec<ExpressionPlan>,\n\n group_exprs: Vec<ExpressionPlan>,\n\n schema: DataSchemaRef,\n\n input: Arc<dyn IProcessor>,\n\n groups: GroupFuncTable\n\n}\n\n\n\nimpl GroupByPartialTransform {\n\n pub fn create(\n\n schema: DataSchemaRef,\n\n aggr_exprs: Vec<ExpressionPlan>,\n\n group_exprs: Vec<ExpressionPlan>\n\n ) -> Self {\n\n Self {\n\n aggr_exprs,\n\n group_exprs,\n\n schema,\n", "file_path": "fusequery/query/src/pipelines/transforms/transform_groupby_partial.rs", "rank": 76, "score": 110260.13263251632 }, { "content": "pub trait IFunction: fmt::Display + Sync + Send + DynClone {\n\n fn name(&self) -> &str;\n\n fn return_type(&self, input_schema: &DataSchema) -> Result<DataType>;\n\n fn nullable(&self, input_schema: &DataSchema) -> Result<bool>;\n\n fn eval(&self, block: &DataBlock) -> Result<DataColumnarValue>;\n\n fn set_depth(&mut self, _depth: usize) {}\n\n\n\n fn accumulate(&mut self, _block: &DataBlock) -> Result<()> {\n\n Result::Err(ErrorCodes::UnImplement(format!(\n\n \"Function Error: '{}' accumulate unimplemented\",\n\n self.name()\n\n )))\n\n }\n\n\n\n fn accumulate_result(&self) -> Result<Vec<DataValue>> {\n\n Result::Err(ErrorCodes::UnImplement(format!(\n\n \"Function Error: '{}' accumulate_result unimplemented\",\n\n self.name()\n\n )))\n\n }\n", "file_path": "common/functions/src/function.rs", "rank": 77, "score": 108394.89243016178 }, { "content": "\n\n let mut limited_columns = Vec::with_capacity(block.num_columns());\n\n for i in 0..block.num_columns() {\n\n limited_columns.push(arrow::compute::limit(block.column(i), keep));\n\n }\n\n Ok(Some(DataBlock::create(\n\n block.schema().clone(),\n\n limited_columns\n\n )))\n\n }\n\n }\n\n}\n\n\n\nimpl Stream for LimitStream {\n\n type Item = Result<DataBlock>;\n\n\n\n fn poll_next(\n\n mut self: std::pin::Pin<&mut Self>,\n\n ctx: &mut Context<'_>\n\n ) -> Poll<Option<Self::Item>> {\n\n self.input.poll_next_unpin(ctx).map(|x| match x {\n\n Some(Ok(ref v)) => self.limit(v).transpose(),\n\n other => other\n\n })\n\n }\n\n}\n", "file_path": "common/streams/src/stream_limit.rs", "rank": 78, "score": 106609.68269067415 }, { "content": "\n\nimpl LimitStream {\n\n pub fn try_create(input: SendableDataBlockStream, limit: usize) -> Result<Self> {\n\n Ok(LimitStream {\n\n input,\n\n limit,\n\n current: 0\n\n })\n\n }\n\n\n\n pub fn limit(&mut self, block: &DataBlock) -> Result<Option<DataBlock>> {\n\n let rows = block.num_rows();\n\n if self.current == self.limit {\n\n Ok(None)\n\n } else if (self.current + rows) < self.limit {\n\n self.current += rows;\n\n Ok(Some(block.clone()))\n\n } else {\n\n let keep = self.limit - self.current;\n\n self.current = self.limit;\n", "file_path": "common/streams/src/stream_limit.rs", "rank": 79, "score": 106608.77799215124 }, { "content": "// Copyright 2020-2021 The Datafuse Authors.\n\n//\n\n// SPDX-License-Identifier: Apache-2.0.\n\n\n\nuse std::task::Context;\n\nuse std::task::Poll;\n\n\n\nuse common_arrow::arrow;\n\nuse common_datablocks::DataBlock;\n\nuse common_exception::Result;\n\nuse futures::Stream;\n\nuse futures::StreamExt;\n\n\n\nuse crate::SendableDataBlockStream;\n\n\n\npub struct LimitStream {\n\n input: SendableDataBlockStream,\n\n limit: usize,\n\n current: usize\n\n}\n", "file_path": "common/streams/src/stream_limit.rs", "rank": 80, "score": 106595.0391338918 }, { "content": "SELECT number, number + 3 FROM numbers_mt (1000) where number > 5 order by number desc limit 3;\n", "file_path": "tests/queries/0_stateless/03_0004_select_order_by.sql", "rank": 81, "score": 99567.55412826466 }, { "content": " file: String,\n\n schema: DataSchemaRef\n\n}\n\n\n\nimpl CsvTableStream {\n\n pub fn try_create(\n\n ctx: FuseQueryContextRef,\n\n schema: DataSchemaRef,\n\n file: String\n\n ) -> Result<Self> {\n\n Ok(CsvTableStream { ctx, file, schema })\n\n }\n\n\n\n pub fn try_get_one_block(&self) -> Result<Option<DataBlock>> {\n\n let partitions = self.ctx.try_get_partitions(1)?;\n\n if partitions.is_empty() {\n\n return Ok(None);\n\n }\n\n\n\n let part = partitions[0].clone();\n", "file_path": "fusequery/query/src/datasources/local/csv_table_stream.rs", "rank": 82, "score": 90610.12514736861 }, { "content": " .next()\n\n .map(|record| {\n\n record\n\n .map_err(ErrorCodes::from_arrow)\n\n .and_then(|record| record.try_into())\n\n })\n\n .map(|data_block| data_block.map(Some))\n\n .unwrap_or_else(|| Ok(None))\n\n }\n\n}\n\n\n\nimpl Stream for CsvTableStream {\n\n type Item = Result<DataBlock>;\n\n\n\n fn poll_next(\n\n self: std::pin::Pin<&mut Self>,\n\n _: &mut std::task::Context<'_>\n\n ) -> Poll<Option<Self::Item>> {\n\n let block = self.try_get_one_block()?;\n\n Poll::Ready(block.map(Ok))\n\n }\n\n}\n", "file_path": "fusequery/query/src/datasources/local/csv_table_stream.rs", "rank": 83, "score": 90599.24315723263 }, { "content": "// Copyright 2020-2021 The Datafuse Authors.\n\n//\n\n// SPDX-License-Identifier: Apache-2.0.\n\n\n\nuse std::convert::TryInto;\n\nuse std::fs::File;\n\nuse std::task::Poll;\n\n\n\nuse anyhow::Context;\n\nuse common_arrow::arrow::csv;\n\nuse common_datablocks::DataBlock;\n\nuse common_datavalues::DataSchemaRef;\n\nuse common_exception::ErrorCodes;\n\nuse common_exception::Result;\n\nuse futures::Stream;\n\n\n\nuse crate::sessions::FuseQueryContextRef;\n\n\n\npub struct CsvTableStream {\n\n ctx: FuseQueryContextRef,\n", "file_path": "fusequery/query/src/datasources/local/csv_table_stream.rs", "rank": 84, "score": 90593.38013014202 }, { "content": " let names: Vec<_> = part.name.split('-').collect();\n\n let begin: usize = names[1].parse().map_err(ErrorCodes::from_parse)?;\n\n let end: usize = names[2].parse().map_err(ErrorCodes::from_parse)?;\n\n let bounds = Some((begin, end));\n\n let block_size = end - begin;\n\n\n\n let file = File::open(self.file.clone())\n\n .with_context(|| format!(\"Failed to read csv file:{}\", self.file.clone()))\n\n .map_err(ErrorCodes::from_anyhow)?;\n\n let mut reader: csv::Reader<File> = csv::Reader::new(\n\n file,\n\n self.schema.clone(),\n\n false,\n\n None,\n\n block_size,\n\n bounds,\n\n None\n\n );\n\n\n\n reader\n", "file_path": "fusequery/query/src/datasources/local/csv_table_stream.rs", "rank": 85, "score": 90590.19835806843 }, { "content": "select toTypeName(number) from numbers(100) limit 1;\n", "file_path": "tests/queries/0_stateless/02_0001_function_to_type_name.sql", "rank": 86, "score": 90478.00516984158 }, { "content": "#[derive(Debug, Clone)]\n\nstruct BlockRange {\n\n begin: u64,\n\n end: u64\n\n}\n\n\n\npub struct NumbersStream {\n\n ctx: FuseQueryContextRef,\n\n schema: DataSchemaRef,\n\n block_index: usize,\n\n blocks: Vec<BlockRange>\n\n}\n\n\n\nimpl NumbersStream {\n\n pub fn create(ctx: FuseQueryContextRef, schema: DataSchemaRef) -> Self {\n\n NumbersStream {\n\n ctx,\n\n schema,\n\n block_index: 0,\n\n blocks: vec![]\n\n }\n", "file_path": "fusequery/query/src/datasources/system/numbers_stream.rs", "rank": 87, "score": 89165.34949904456 }, { "content": "select 1 + 2 + 3 as x, x + 2 + 3 as y;\n", "file_path": "tests/queries/0_stateless/03_0000_select_aliases.sql", "rank": 88, "score": 79558.7158285634 }, { "content": "fn main() {\n\n build_proto();\n\n}\n\n\n", "file_path": "common/flights/build.rs", "rank": 89, "score": 78821.27398563092 }, { "content": "fn main() {\n\n if Path::new(\".git/HEAD\").exists() {\n\n println!(\"cargo:rerun-if-changed=.git/HEAD\");\n\n }\n\n\n\n println!(\"cargo:rerun-if-env-changed=CFG_RELEASE_CHANNEL\");\n\n if option_env!(\"CFG_RELEASE_CHANNEL\").map_or(true, |c| c == \"nightly\" || c == \"dev\") {\n\n println!(\"cargo:rustc-cfg=nightly\");\n\n }\n\n\n\n create_version_info();\n\n}\n\n\n", "file_path": "fusequery/query/build.rs", "rank": 90, "score": 78821.27398563092 }, { "content": "fn main() {\n\n if Path::new(\".git/HEAD\").exists() {\n\n println!(\"cargo:rerun-if-changed=.git/HEAD\");\n\n }\n\n\n\n create_version_info();\n\n build_proto();\n\n}\n\n\n", "file_path": "fusestore/store/build.rs", "rank": 91, "score": 78821.27398563092 }, { "content": "fn build_proto() {\n\n let manifest_dir =\n\n env::var(\"CARGO_MANIFEST_DIR\").expect(\"CARGO_MANIFEST_DIR env variable unset\");\n\n\n\n let proto_dir = Path::new(&manifest_dir).join(\"proto\");\n\n let protos = [\n\n &Path::new(&proto_dir).join(Path::new(\"queryflight.proto\")),\n\n &Path::new(&proto_dir).join(Path::new(\"storeflight.proto\"))\n\n ];\n\n\n\n for proto in protos.iter() {\n\n println!(\"cargo:rerun-if-changed={}\", proto.to_str().unwrap());\n\n }\n\n tonic_build::configure()\n\n .compile(&protos, &[&proto_dir])\n\n .unwrap();\n\n}\n", "file_path": "common/flights/build.rs", "rank": 92, "score": 77807.38032544087 }, { "content": "fn build_proto() {\n\n let manifest_dir =\n\n env::var(\"CARGO_MANIFEST_DIR\").expect(\"CARGO_MANIFEST_DIR env variable unset\");\n\n\n\n let proto_dir = Path::new(&manifest_dir).join(\"proto\");\n\n let protos = [&Path::new(&proto_dir).join(Path::new(\"store_meta.proto\"))];\n\n\n\n for proto in protos.iter() {\n\n println!(\"cargo:rerun-if-changed={}\", proto.to_str().unwrap());\n\n }\n\n tonic_build::configure()\n\n .compile(&protos, &[&proto_dir])\n\n .unwrap();\n\n}\n", "file_path": "fusestore/store/build.rs", "rank": 93, "score": 77807.38032544087 }, { "content": "fn create_version_info() {\n\n let out_dir = PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out_dir.join(\"version-info.txt\"))\n\n .unwrap()\n\n .write_all(commit_info().as_bytes())\n\n .unwrap();\n\n}\n\n\n", "file_path": "fusestore/store/build.rs", "rank": 94, "score": 76834.68147949925 }, { "content": "fn create_version_info() {\n\n let out_dir = PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out_dir.join(\"version-info.txt\"))\n\n .unwrap()\n\n .write_all(commit_info().as_bytes())\n\n .unwrap();\n\n}\n\n\n", "file_path": "fusequery/query/build.rs", "rank": 95, "score": 76834.68147949925 }, { "content": "#[test]\n\nfn test_rwlock() {\n\n use std::sync::Arc;\n\n use std::thread;\n\n\n\n use crate::RwLock;\n\n\n\n let a = 7u8;\n\n let rwlock = Arc::new(RwLock::new(a));\n\n let rwlock2 = rwlock.clone();\n\n let rwlock3 = rwlock.clone();\n\n\n\n let thread1 = thread::spawn(move || {\n\n let mut b = rwlock2.write();\n\n *b = 8;\n\n });\n\n let thread2 = thread::spawn(move || {\n\n let mut b = rwlock3.write();\n\n *b = 9;\n\n });\n\n\n\n let _ = thread1.join();\n\n let _ = thread2.join();\n\n\n\n let _read = rwlock.read();\n\n}\n", "file_path": "common/infallible/src/rwlock_test.rs", "rank": 96, "score": 75900.71679912393 }, { "content": "#[test]\n\nfn test_mutex() {\n\n use std::sync::Arc;\n\n use std::thread;\n\n\n\n use crate::Mutex;\n\n let a = 7u8;\n\n let mutex = Arc::new(Mutex::new(a));\n\n let mutex2 = mutex.clone();\n\n let mutex3 = mutex.clone();\n\n\n\n let thread1 = thread::spawn(move || {\n\n let mut b = mutex2.lock();\n\n *b = 8;\n\n });\n\n let thread2 = thread::spawn(move || {\n\n let mut b = mutex3.lock();\n\n *b = 9;\n\n });\n\n\n\n let _ = thread1.join();\n\n let _ = thread2.join();\n\n\n\n let _locked = mutex.lock();\n\n}\n", "file_path": "common/infallible/src/mutex_test.rs", "rank": 97, "score": 75900.71679912393 }, { "content": "fn channel() -> String {\n\n if let Ok(channel) = env::var(\"CFG_RELEASE_CHANNEL\") {\n\n channel\n\n } else {\n\n \"nightly\".to_owned()\n\n }\n\n}\n\n\n", "file_path": "fusequery/query/build.rs", "rank": 98, "score": 75628.8902657618 }, { "content": "select 1 + x, x + 2, 3 x;\n", "file_path": "tests/queries/0_stateless/03_0000_select_aliases.sql", "rank": 99, "score": 75006.65542815097 } ]
Rust
crates/bevy_winit/src/winit_windows.rs
Schell-0x52/bevy
39224eed1716a9a069ff2f19c4dc93dcd9f88452
use bevy_math::IVec2; use bevy_utils::HashMap; use bevy_window::{Window, WindowDescriptor, WindowId, WindowMode}; use raw_window_handle::HasRawWindowHandle; use winit::dpi::LogicalSize; #[derive(Debug, Default)] pub struct WinitWindows { pub windows: HashMap<winit::window::WindowId, winit::window::Window>, pub window_id_to_winit: HashMap<WindowId, winit::window::WindowId>, pub winit_to_window_id: HashMap<winit::window::WindowId, WindowId>, } impl WinitWindows { pub fn create_window( &mut self, event_loop: &winit::event_loop::EventLoopWindowTarget<()>, window_id: WindowId, window_descriptor: &WindowDescriptor, ) -> Window { #[cfg(target_os = "windows")] let mut winit_window_builder = { use winit::platform::windows::WindowBuilderExtWindows; winit::window::WindowBuilder::new().with_drag_and_drop(false) }; #[cfg(not(target_os = "windows"))] let mut winit_window_builder = winit::window::WindowBuilder::new(); winit_window_builder = match window_descriptor.mode { WindowMode::BorderlessFullscreen => winit_window_builder.with_fullscreen(Some( winit::window::Fullscreen::Borderless(event_loop.primary_monitor()), )), WindowMode::Fullscreen { use_size } => winit_window_builder.with_fullscreen(Some( winit::window::Fullscreen::Exclusive(match use_size { true => get_fitting_videomode( &event_loop.primary_monitor().unwrap(), window_descriptor.width as u32, window_descriptor.height as u32, ), false => get_best_videomode(&event_loop.primary_monitor().unwrap()), }), )), _ => { let WindowDescriptor { width, height, position, scale_factor_override, .. } = window_descriptor; if let Some(position) = position { if let Some(sf) = scale_factor_override { winit_window_builder = winit_window_builder.with_position( winit::dpi::LogicalPosition::new( position[0] as f64, position[1] as f64, ) .to_physical::<f64>(*sf), ); } else { winit_window_builder = winit_window_builder.with_position(winit::dpi::LogicalPosition::new( position[0] as f64, position[1] as f64, )); } } if let Some(sf) = scale_factor_override { winit_window_builder.with_inner_size( winit::dpi::LogicalSize::new(*width, *height).to_physical::<f64>(*sf), ) } else { winit_window_builder .with_inner_size(winit::dpi::LogicalSize::new(*width, *height)) } } .with_resizable(window_descriptor.resizable) .with_decorations(window_descriptor.decorations), }; let constraints = window_descriptor.resize_constraints.check_constraints(); let min_inner_size = LogicalSize { width: constraints.min_width, height: constraints.min_height, }; let max_inner_size = LogicalSize { width: constraints.max_width, height: constraints.max_height, }; let winit_window_builder = if constraints.max_width.is_finite() && constraints.max_height.is_finite() { winit_window_builder .with_min_inner_size(min_inner_size) .with_max_inner_size(max_inner_size) } else { winit_window_builder.with_min_inner_size(min_inner_size) }; #[allow(unused_mut)] let mut winit_window_builder = winit_window_builder.with_title(&window_descriptor.title); #[cfg(target_arch = "wasm32")] { use wasm_bindgen::JsCast; use winit::platform::web::WindowBuilderExtWebSys; if let Some(selector) = &window_descriptor.canvas { let window = web_sys::window().unwrap(); let document = window.document().unwrap(); let canvas = document .query_selector(&selector) .expect("Cannot query for canvas element."); if let Some(canvas) = canvas { let canvas = canvas.dyn_into::<web_sys::HtmlCanvasElement>().ok(); winit_window_builder = winit_window_builder.with_canvas(canvas); } else { panic!("Cannot find element: {}.", selector); } } } let winit_window = winit_window_builder.build(event_loop).unwrap(); match winit_window.set_cursor_grab(window_descriptor.cursor_locked) { Ok(_) => {} Err(winit::error::ExternalError::NotSupported(_)) => {} Err(err) => Err(err).unwrap(), } winit_window.set_cursor_visible(window_descriptor.cursor_visible); self.window_id_to_winit.insert(window_id, winit_window.id()); self.winit_to_window_id.insert(winit_window.id(), window_id); #[cfg(target_arch = "wasm32")] { use winit::platform::web::WindowExtWebSys; if window_descriptor.canvas.is_none() { let canvas = winit_window.canvas(); let window = web_sys::window().unwrap(); let document = window.document().unwrap(); let body = document.body().unwrap(); body.append_child(&canvas) .expect("Append canvas to HTML body."); } } let position = winit_window .outer_position() .ok() .map(|position| IVec2::new(position.x, position.y)); let inner_size = winit_window.inner_size(); let scale_factor = winit_window.scale_factor(); let raw_window_handle = winit_window.raw_window_handle(); self.windows.insert(winit_window.id(), winit_window); Window::new( window_id, window_descriptor, inner_size.width, inner_size.height, scale_factor, position, raw_window_handle, ) } pub fn get_window(&self, id: WindowId) -> Option<&winit::window::Window> { self.window_id_to_winit .get(&id) .and_then(|id| self.windows.get(id)) } pub fn get_window_id(&self, id: winit::window::WindowId) -> Option<WindowId> { self.winit_to_window_id.get(&id).cloned() } } pub fn get_fitting_videomode( monitor: &winit::monitor::MonitorHandle, width: u32, height: u32, ) -> winit::monitor::VideoMode { let mut modes = monitor.video_modes().collect::<Vec<_>>(); fn abs_diff(a: u32, b: u32) -> u32 { if a > b { return a - b; } b - a } modes.sort_by(|a, b| { use std::cmp::Ordering::*; match abs_diff(a.size().width, width).cmp(&abs_diff(b.size().width, width)) { Equal => { match abs_diff(a.size().height, height).cmp(&abs_diff(b.size().height, height)) { Equal => b.refresh_rate().cmp(&a.refresh_rate()), default => default, } } default => default, } }); modes.first().unwrap().clone() } pub fn get_best_videomode(monitor: &winit::monitor::MonitorHandle) -> winit::monitor::VideoMode { let mut modes = monitor.video_modes().collect::<Vec<_>>(); modes.sort_by(|a, b| { use std::cmp::Ordering::*; match b.size().width.cmp(&a.size().width) { Equal => match b.size().height.cmp(&a.size().height) { Equal => b.refresh_rate().cmp(&a.refresh_rate()), default => default, }, default => default, } }); modes.first().unwrap().clone() } #[cfg(target_arch = "wasm32")] unsafe impl Send for WinitWindows {} #[cfg(target_arch = "wasm32")] unsafe impl Sync for WinitWindows {}
use bevy_math::IVec2; use bevy_utils::HashMap; use bevy_window::{Window, WindowDescriptor, WindowId, WindowMode}; use raw_window_handle::HasRawWindowHandle; use winit::dpi::LogicalSize; #[derive(Debug, Default)] pub struct WinitWindows { pub windows: HashMap<winit::window::WindowId, winit::window::Window>, pub window_id_to_winit: HashMap<WindowId, winit::window::WindowId>, pub winit_to_window_id: HashMap<winit::window::WindowId, WindowId>, } impl WinitWindows {
pub fn get_window(&self, id: WindowId) -> Option<&winit::window::Window> { self.window_id_to_winit .get(&id) .and_then(|id| self.windows.get(id)) } pub fn get_window_id(&self, id: winit::window::WindowId) -> Option<WindowId> { self.winit_to_window_id.get(&id).cloned() } } pub fn get_fitting_videomode( monitor: &winit::monitor::MonitorHandle, width: u32, height: u32, ) -> winit::monitor::VideoMode { let mut modes = monitor.video_modes().collect::<Vec<_>>(); fn abs_diff(a: u32, b: u32) -> u32 { if a > b { return a - b; } b - a } modes.sort_by(|a, b| { use std::cmp::Ordering::*; match abs_diff(a.size().width, width).cmp(&abs_diff(b.size().width, width)) { Equal => { match abs_diff(a.size().height, height).cmp(&abs_diff(b.size().height, height)) { Equal => b.refresh_rate().cmp(&a.refresh_rate()), default => default, } } default => default, } }); modes.first().unwrap().clone() } pub fn get_best_videomode(monitor: &winit::monitor::MonitorHandle) -> winit::monitor::VideoMode { let mut modes = monitor.video_modes().collect::<Vec<_>>(); modes.sort_by(|a, b| { use std::cmp::Ordering::*; match b.size().width.cmp(&a.size().width) { Equal => match b.size().height.cmp(&a.size().height) { Equal => b.refresh_rate().cmp(&a.refresh_rate()), default => default, }, default => default, } }); modes.first().unwrap().clone() } #[cfg(target_arch = "wasm32")] unsafe impl Send for WinitWindows {} #[cfg(target_arch = "wasm32")] unsafe impl Sync for WinitWindows {}
pub fn create_window( &mut self, event_loop: &winit::event_loop::EventLoopWindowTarget<()>, window_id: WindowId, window_descriptor: &WindowDescriptor, ) -> Window { #[cfg(target_os = "windows")] let mut winit_window_builder = { use winit::platform::windows::WindowBuilderExtWindows; winit::window::WindowBuilder::new().with_drag_and_drop(false) }; #[cfg(not(target_os = "windows"))] let mut winit_window_builder = winit::window::WindowBuilder::new(); winit_window_builder = match window_descriptor.mode { WindowMode::BorderlessFullscreen => winit_window_builder.with_fullscreen(Some( winit::window::Fullscreen::Borderless(event_loop.primary_monitor()), )), WindowMode::Fullscreen { use_size } => winit_window_builder.with_fullscreen(Some( winit::window::Fullscreen::Exclusive(match use_size { true => get_fitting_videomode( &event_loop.primary_monitor().unwrap(), window_descriptor.width as u32, window_descriptor.height as u32, ), false => get_best_videomode(&event_loop.primary_monitor().unwrap()), }), )), _ => { let WindowDescriptor { width, height, position, scale_factor_override, .. } = window_descriptor; if let Some(position) = position { if let Some(sf) = scale_factor_override { winit_window_builder = winit_window_builder.with_position( winit::dpi::LogicalPosition::new( position[0] as f64, position[1] as f64, ) .to_physical::<f64>(*sf), ); } else { winit_window_builder = winit_window_builder.with_position(winit::dpi::LogicalPosition::new( position[0] as f64, position[1] as f64, )); } } if let Some(sf) = scale_factor_override { winit_window_builder.with_inner_size( winit::dpi::LogicalSize::new(*width, *height).to_physical::<f64>(*sf), ) } else { winit_window_builder .with_inner_size(winit::dpi::LogicalSize::new(*width, *height)) } } .with_resizable(window_descriptor.resizable) .with_decorations(window_descriptor.decorations), }; let constraints = window_descriptor.resize_constraints.check_constraints(); let min_inner_size = LogicalSize { width: constraints.min_width, height: constraints.min_height, }; let max_inner_size = LogicalSize { width: constraints.max_width, height: constraints.max_height, }; let winit_window_builder = if constraints.max_width.is_finite() && constraints.max_height.is_finite() { winit_window_builder .with_min_inner_size(min_inner_size) .with_max_inner_size(max_inner_size) } else { winit_window_builder.with_min_inner_size(min_inner_size) }; #[allow(unused_mut)] let mut winit_window_builder = winit_window_builder.with_title(&window_descriptor.title); #[cfg(target_arch = "wasm32")] { use wasm_bindgen::JsCast; use winit::platform::web::WindowBuilderExtWebSys; if let Some(selector) = &window_descriptor.canvas { let window = web_sys::window().unwrap(); let document = window.document().unwrap(); let canvas = document .query_selector(&selector) .expect("Cannot query for canvas element."); if let Some(canvas) = canvas { let canvas = canvas.dyn_into::<web_sys::HtmlCanvasElement>().ok(); winit_window_builder = winit_window_builder.with_canvas(canvas); } else { panic!("Cannot find element: {}.", selector); } } } let winit_window = winit_window_builder.build(event_loop).unwrap(); match winit_window.set_cursor_grab(window_descriptor.cursor_locked) { Ok(_) => {} Err(winit::error::ExternalError::NotSupported(_)) => {} Err(err) => Err(err).unwrap(), } winit_window.set_cursor_visible(window_descriptor.cursor_visible); self.window_id_to_winit.insert(window_id, winit_window.id()); self.winit_to_window_id.insert(winit_window.id(), window_id); #[cfg(target_arch = "wasm32")] { use winit::platform::web::WindowExtWebSys; if window_descriptor.canvas.is_none() { let canvas = winit_window.canvas(); let window = web_sys::window().unwrap(); let document = window.document().unwrap(); let body = document.body().unwrap(); body.append_child(&canvas) .expect("Append canvas to HTML body."); } } let position = winit_window .outer_position() .ok() .map(|position| IVec2::new(position.x, position.y)); let inner_size = winit_window.inner_size(); let scale_factor = winit_window.scale_factor(); let raw_window_handle = winit_window.raw_window_handle(); self.windows.insert(winit_window.id(), winit_window); Window::new( window_id, window_descriptor, inner_size.width, inner_size.height, scale_factor, position, raw_window_handle, ) }
function_block-full_function
[ { "content": "/// An ordered &str->ReflectValue mapping where &str is a \"field\".\n\n/// This corresponds to rust struct types.\n\npub trait Struct: Reflect {\n\n fn field(&self, name: &str) -> Option<&dyn Reflect>;\n\n fn field_mut(&mut self, name: &str) -> Option<&mut dyn Reflect>;\n\n fn field_at(&self, index: usize) -> Option<&dyn Reflect>;\n\n fn field_at_mut(&mut self, index: usize) -> Option<&mut dyn Reflect>;\n\n fn name_at(&self, index: usize) -> Option<&str>;\n\n fn field_len(&self) -> usize;\n\n fn iter_fields(&self) -> FieldIter;\n\n fn clone_dynamic(&self) -> DynamicStruct;\n\n}\n\n\n\npub struct FieldIter<'a> {\n\n pub(crate) struct_val: &'a dyn Struct,\n\n pub(crate) index: usize,\n\n}\n\n\n\nimpl<'a> FieldIter<'a> {\n\n pub fn new(value: &'a dyn Struct) -> Self {\n\n FieldIter {\n\n struct_val: value,\n", "file_path": "crates/bevy_reflect/src/struct_trait.rs", "rank": 0, "score": 180430.6572524046 }, { "content": "pub fn prepare_windows(\n\n // By accessing a NonSend resource, we tell the scheduler to put this system on the main thread,\n\n // which is necessary for some OS s\n\n _marker: NonSend<NonSendMarker>,\n\n mut windows: ResMut<ExtractedWindows>,\n\n mut window_surfaces: ResMut<WindowSurfaces>,\n\n render_device: Res<RenderDevice>,\n\n render_instance: Res<RenderInstance>,\n\n) {\n\n let window_surfaces = window_surfaces.deref_mut();\n\n for window in windows.windows.values_mut() {\n\n let surface = window_surfaces\n\n .surfaces\n\n .entry(window.id)\n\n .or_insert_with(|| unsafe {\n\n // NOTE: On some OSes this MUST be called from the main thread.\n\n render_instance.create_surface(&window.handle.get_handle())\n\n });\n\n\n\n let swap_chain_descriptor = wgpu::SurfaceConfiguration {\n", "file_path": "pipelined/bevy_render2/src/view/window.rs", "rank": 1, "score": 146373.58803397993 }, { "content": "#[inline]\n\npub fn struct_partial_eq<S: Struct>(a: &S, b: &dyn Reflect) -> Option<bool> {\n\n let struct_value = if let ReflectRef::Struct(struct_value) = b.reflect_ref() {\n\n struct_value\n\n } else {\n\n return Some(false);\n\n };\n\n\n\n if a.field_len() != struct_value.field_len() {\n\n return Some(false);\n\n }\n\n\n\n for (i, value) in struct_value.iter_fields().enumerate() {\n\n let name = struct_value.name_at(i).unwrap();\n\n if let Some(field_value) = a.field(name) {\n\n if let Some(false) | None = field_value.reflect_partial_eq(value) {\n\n return Some(false);\n\n }\n\n } else {\n\n return Some(false);\n\n }\n\n }\n\n\n\n Some(true)\n\n}\n", "file_path": "crates/bevy_reflect/src/struct_trait.rs", "rank": 2, "score": 145872.35649477158 }, { "content": "pub fn exit_on_window_close_system(\n\n mut app_exit_events: EventWriter<AppExit>,\n\n mut window_close_requested_events: EventReader<WindowCloseRequested>,\n\n) {\n\n if window_close_requested_events.iter().next().is_some() {\n\n app_exit_events.send(AppExit);\n\n }\n\n}\n", "file_path": "crates/bevy_window/src/system.rs", "rank": 3, "score": 144144.85227681743 }, { "content": "#[derive(Default)]\n\nstruct SizedCommand<T: Default + Send + Sync + 'static>(T);\n\n\n\nimpl<T: Default + Send + Sync + 'static> Command for SizedCommand<T> {\n\n fn write(self, world: &mut World) {\n\n black_box(self);\n\n black_box(world);\n\n }\n\n}\n\n\n", "file_path": "benches/benches/bevy_ecs/commands.rs", "rank": 4, "score": 142872.17287975448 }, { "content": "pub trait GetTupleStructField {\n\n fn get_field<T: Reflect>(&self, index: usize) -> Option<&T>;\n\n fn get_field_mut<T: Reflect>(&mut self, index: usize) -> Option<&mut T>;\n\n}\n\n\n\nimpl<S: TupleStruct> GetTupleStructField for S {\n\n fn get_field<T: Reflect>(&self, index: usize) -> Option<&T> {\n\n self.field(index)\n\n .and_then(|value| value.downcast_ref::<T>())\n\n }\n\n\n\n fn get_field_mut<T: Reflect>(&mut self, index: usize) -> Option<&mut T> {\n\n self.field_mut(index)\n\n .and_then(|value| value.downcast_mut::<T>())\n\n }\n\n}\n\n\n\nimpl GetTupleStructField for dyn TupleStruct {\n\n fn get_field<T: Reflect>(&self, index: usize) -> Option<&T> {\n\n self.field(index)\n", "file_path": "crates/bevy_reflect/src/tuple_struct.rs", "rank": 5, "score": 141821.44285367755 }, { "content": "/// A rust \"tuple struct\" reflection\n\npub trait TupleStruct: Reflect {\n\n fn field(&self, index: usize) -> Option<&dyn Reflect>;\n\n fn field_mut(&mut self, index: usize) -> Option<&mut dyn Reflect>;\n\n fn field_len(&self) -> usize;\n\n fn iter_fields(&self) -> TupleStructFieldIter;\n\n fn clone_dynamic(&self) -> DynamicTupleStruct;\n\n}\n\n\n\npub struct TupleStructFieldIter<'a> {\n\n pub(crate) tuple_struct: &'a dyn TupleStruct,\n\n pub(crate) index: usize,\n\n}\n\n\n\nimpl<'a> TupleStructFieldIter<'a> {\n\n pub fn new(value: &'a dyn TupleStruct) -> Self {\n\n TupleStructFieldIter {\n\n tuple_struct: value,\n\n index: 0,\n\n }\n\n }\n", "file_path": "crates/bevy_reflect/src/tuple_struct.rs", "rank": 6, "score": 141141.05851408513 }, { "content": "pub trait BevyDefault {\n\n fn bevy_default() -> Self;\n\n}\n\n\n\nimpl BevyDefault for wgpu::TextureFormat {\n\n fn bevy_default() -> Self {\n\n if cfg!(target_os = \"android\") || cfg!(target_arch = \"wasm32\") {\n\n // Bgra8UnormSrgb texture missing on some Android devices\n\n wgpu::TextureFormat::Rgba8UnormSrgb\n\n } else {\n\n wgpu::TextureFormat::Bgra8UnormSrgb\n\n }\n\n }\n\n}\n", "file_path": "pipelined/bevy_render2/src/texture/mod.rs", "rank": 7, "score": 132694.4545757156 }, { "content": "pub trait GetField {\n\n fn get_field<T: Reflect>(&self, name: &str) -> Option<&T>;\n\n fn get_field_mut<T: Reflect>(&mut self, name: &str) -> Option<&mut T>;\n\n}\n\n\n\nimpl<S: Struct> GetField for S {\n\n fn get_field<T: Reflect>(&self, name: &str) -> Option<&T> {\n\n self.field(name).and_then(|value| value.downcast_ref::<T>())\n\n }\n\n\n\n fn get_field_mut<T: Reflect>(&mut self, name: &str) -> Option<&mut T> {\n\n self.field_mut(name)\n\n .and_then(|value| value.downcast_mut::<T>())\n\n }\n\n}\n\n\n\nimpl GetField for dyn Struct {\n\n fn get_field<T: Reflect>(&self, name: &str) -> Option<&T> {\n\n self.field(name).and_then(|value| value.downcast_ref::<T>())\n\n }\n", "file_path": "crates/bevy_reflect/src/struct_trait.rs", "rank": 8, "score": 132449.92564781505 }, { "content": "pub fn get_wgpu_render_system(world: &mut World) -> impl FnMut(&mut World) {\n\n let options = world\n\n .get_resource::<WgpuOptions>()\n\n .cloned()\n\n .unwrap_or_default();\n\n let mut wgpu_renderer = future::block_on(WgpuRenderer::new(options));\n\n\n\n let resource_context = WgpuRenderResourceContext::new(wgpu_renderer.device.clone());\n\n world.insert_resource::<Box<dyn RenderResourceContext>>(Box::new(resource_context));\n\n world.insert_resource(SharedBuffers::new(4096));\n\n move |world| {\n\n wgpu_renderer.update(world);\n\n }\n\n}\n\n\n\n#[derive(Default, Clone)]\n\npub struct WgpuOptions {\n\n pub device_label: Option<Cow<'static, str>>,\n\n pub backend: WgpuBackend,\n\n pub power_pref: WgpuPowerOptions,\n", "file_path": "crates/bevy_wgpu/src/lib.rs", "rank": 10, "score": 127683.82016050625 }, { "content": "#[inline]\n\npub fn tuple_struct_partial_eq<S: TupleStruct>(a: &S, b: &dyn Reflect) -> Option<bool> {\n\n let tuple_struct = if let ReflectRef::TupleStruct(tuple_struct) = b.reflect_ref() {\n\n tuple_struct\n\n } else {\n\n return Some(false);\n\n };\n\n\n\n if a.field_len() != tuple_struct.field_len() {\n\n return Some(false);\n\n }\n\n\n\n for (i, value) in tuple_struct.iter_fields().enumerate() {\n\n if let Some(field_value) = a.field(i) {\n\n if let Some(false) | None = field_value.reflect_partial_eq(value) {\n\n return Some(false);\n\n }\n\n } else {\n\n return Some(false);\n\n }\n\n }\n\n\n\n Some(true)\n\n}\n", "file_path": "crates/bevy_reflect/src/tuple_struct.rs", "rank": 11, "score": 119284.06338639566 }, { "content": "#[proc_macro]\n\npub fn impl_query_set(_input: TokenStream) -> TokenStream {\n\n let mut tokens = TokenStream::new();\n\n let max_queries = 4;\n\n let queries = get_idents(|i| format!(\"Q{}\", i), max_queries);\n\n let filters = get_idents(|i| format!(\"F{}\", i), max_queries);\n\n let mut query_fn_muts = Vec::new();\n\n for i in 0..max_queries {\n\n let query = &queries[i];\n\n let filter = &filters[i];\n\n let fn_name = Ident::new(&format!(\"q{}\", i), Span::call_site());\n\n let index = Index::from(i);\n\n query_fn_muts.push(quote! {\n\n pub fn #fn_name(&mut self) -> Query<'_, '_, #query, #filter> {\n\n // SAFE: systems run without conflicts with other systems.\n\n // Conflicting queries in QuerySet are not accessible at the same time\n\n // QuerySets are guaranteed to not conflict with other SystemParams\n\n unsafe {\n\n Query::new(self.world, &self.query_states.#index, self.last_change_tick, self.change_tick)\n\n }\n\n }\n", "file_path": "crates/bevy_ecs/macros/src/lib.rs", "rank": 12, "score": 112445.12297771133 }, { "content": "fn sized_commands_impl<T: Default + Command>(criterion: &mut Criterion) {\n\n let mut group =\n\n criterion.benchmark_group(format!(\"sized_commands_{}_bytes\", std::mem::size_of::<T>()));\n\n group.warm_up_time(std::time::Duration::from_millis(500));\n\n group.measurement_time(std::time::Duration::from_secs(4));\n\n\n\n for command_count in (1..5).map(|i| i * 2 * 1000) {\n\n group.bench_function(format!(\"{}_commands\", command_count), |bencher| {\n\n let mut world = World::default();\n\n let mut command_queue = CommandQueue::default();\n\n\n\n bencher.iter(|| {\n\n let mut commands = Commands::new(&mut command_queue, &world);\n\n for _ in 0..command_count {\n\n commands.add(T::default());\n\n }\n\n drop(commands);\n\n command_queue.apply(&mut world);\n\n });\n\n });\n\n }\n\n\n\n group.finish();\n\n}\n\n\n", "file_path": "benches/benches/bevy_ecs/commands.rs", "rank": 13, "score": 112240.53727578968 }, { "content": "pub fn test_round_trip_struct<T: Debug + PartialEq + AsStd140 + AsStd430 + GlslStruct>(value: T) {\n\n let shader_std140 = glsl_shader_for_struct::<T>(\"std140\");\n\n let shader_std430 = glsl_shader_for_struct::<T>(\"std430\");\n\n\n\n let context = Context::new();\n\n context.test_round_trip_std140(&shader_std140, &value);\n\n context.test_round_trip_std430(&shader_std430, &value);\n\n}\n\n\n", "file_path": "crates/crevice/crevice-tests/src/gpu.rs", "rank": 14, "score": 109570.8872991087 }, { "content": "#[proc_macro]\n\npub fn impl_reflect_value(input: TokenStream) -> TokenStream {\n\n let reflect_value_def = parse_macro_input!(input as ReflectDef);\n\n\n\n let bevy_reflect_path = BevyManifest::default().get_path(\"bevy_reflect\");\n\n let ty = &reflect_value_def.type_name;\n\n let reflect_attrs = reflect_value_def.attrs.unwrap_or_default();\n\n let registration_data = &reflect_attrs.data;\n\n let get_type_registration_impl = impl_get_type_registration(\n\n ty,\n\n &bevy_reflect_path,\n\n registration_data,\n\n &reflect_value_def.generics,\n\n );\n\n impl_value(\n\n ty,\n\n &reflect_value_def.generics,\n\n get_type_registration_impl,\n\n &bevy_reflect_path,\n\n &reflect_attrs,\n\n )\n\n}\n\n\n", "file_path": "crates/bevy_reflect/bevy_reflect_derive/src/lib.rs", "rank": 15, "score": 108919.1920675898 }, { "content": "#[proc_macro_derive(GlslStruct)]\n\npub fn derive_glsl_struct(input: CompilerTokenStream) -> CompilerTokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n let expanded = glsl::emit(input);\n\n\n\n CompilerTokenStream::from(expanded)\n\n}\n", "file_path": "crates/crevice/crevice-derive/src/lib.rs", "rank": 16, "score": 108741.27541482811 }, { "content": "fn collision_system(window: Res<WindowDescriptor>, mut bird_query: Query<(&mut Bird, &Transform)>) {\n\n let half_width = window.width as f32 * 0.5;\n\n let half_height = window.height as f32 * 0.5;\n\n\n\n for (mut bird, transform) in bird_query.iter_mut() {\n\n let x_vel = bird.velocity.x;\n\n let y_vel = bird.velocity.y;\n\n let x_pos = transform.translation.x;\n\n let y_pos = transform.translation.y;\n\n\n\n if (x_vel > 0. && x_pos + HALF_BIRD_SIZE > half_width)\n\n || (x_vel <= 0. && x_pos - HALF_BIRD_SIZE < -(half_width))\n\n {\n\n bird.velocity.x = -x_vel;\n\n }\n\n if y_vel < 0. && y_pos - HALF_BIRD_SIZE < -half_height {\n\n bird.velocity.y = -y_vel;\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/tools/bevymark.rs", "rank": 17, "score": 105010.62720163795 }, { "content": "fn collision_system(window: Res<WindowDescriptor>, mut bird_query: Query<(&mut Bird, &Transform)>) {\n\n let half_width = window.width as f32 * 0.5;\n\n let half_height = window.height as f32 * 0.5;\n\n\n\n for (mut bird, transform) in bird_query.iter_mut() {\n\n let x_vel = bird.velocity.x;\n\n let y_vel = bird.velocity.y;\n\n let x_pos = transform.translation.x;\n\n let y_pos = transform.translation.y;\n\n\n\n if (x_vel > 0. && x_pos + HALF_BIRD_SIZE > half_width)\n\n || (x_vel <= 0. && x_pos - HALF_BIRD_SIZE < -(half_width))\n\n {\n\n bird.velocity.x = -x_vel;\n\n }\n\n if y_vel < 0. && y_pos - HALF_BIRD_SIZE < -half_height {\n\n bird.velocity.y = -y_vel;\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/tools/bevymark_pipelined.rs", "rank": 18, "score": 104193.909999183 }, { "content": "/// Create an instance of the platform default `AssetIo`\n\n///\n\n/// This is useful when providing a custom `AssetIo` instance that needs to\n\n/// delegate to the default `AssetIo` for the platform.\n\npub fn create_platform_default_asset_io(app: &mut App) -> Box<dyn AssetIo> {\n\n let settings = app\n\n .world\n\n .get_resource_or_insert_with(AssetServerSettings::default);\n\n\n\n #[cfg(all(not(target_arch = \"wasm32\"), not(target_os = \"android\")))]\n\n let source = FileAssetIo::new(&settings.asset_folder);\n\n #[cfg(target_arch = \"wasm32\")]\n\n let source = WasmAssetIo::new(&settings.asset_folder);\n\n #[cfg(target_os = \"android\")]\n\n let source = AndroidAssetIo::new(&settings.asset_folder);\n\n\n\n Box::new(source)\n\n}\n\n\n\nimpl Plugin for AssetPlugin {\n\n fn build(&self, app: &mut App) {\n\n if app.world.get_resource::<AssetServer>().is_none() {\n\n let task_pool = app\n\n .world\n", "file_path": "crates/bevy_asset/src/lib.rs", "rank": 19, "score": 102233.33359603107 }, { "content": "struct StructVisitor<'a> {\n\n registry: &'a TypeRegistry,\n\n}\n\n\n\nimpl<'a, 'de> Visitor<'de> for StructVisitor<'a> {\n\n type Value = DynamicStruct;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"struct value\")\n\n }\n\n\n\n fn visit_map<V>(self, mut map: V) -> Result<Self::Value, V::Error>\n\n where\n\n V: MapAccess<'de>,\n\n {\n\n let mut dynamic_struct = DynamicStruct::default();\n\n while let Some(key) = map.next_key::<String>()? {\n\n let value = map.next_value_seed(ReflectDeserializer {\n\n registry: self.registry,\n\n })?;\n\n dynamic_struct.insert_boxed(&key, value);\n\n }\n\n\n\n Ok(dynamic_struct)\n\n }\n\n}\n\n\n", "file_path": "crates/bevy_reflect/src/serde/de.rs", "rank": 20, "score": 101161.19162977285 }, { "content": "struct StructDeserializer<'a> {\n\n registry: &'a TypeRegistry,\n\n}\n\n\n\nimpl<'a, 'de> DeserializeSeed<'de> for StructDeserializer<'a> {\n\n type Value = DynamicStruct;\n\n\n\n fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>\n\n where\n\n D: serde::Deserializer<'de>,\n\n {\n\n deserializer.deserialize_map(StructVisitor {\n\n registry: self.registry,\n\n })\n\n }\n\n}\n\n\n", "file_path": "crates/bevy_reflect/src/serde/de.rs", "rank": 21, "score": 101161.19162977285 }, { "content": "struct TupleStructDeserializer<'a> {\n\n registry: &'a TypeRegistry,\n\n}\n\n\n\nimpl<'a, 'de> DeserializeSeed<'de> for TupleStructDeserializer<'a> {\n\n type Value = DynamicTupleStruct;\n\n\n\n fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>\n\n where\n\n D: serde::Deserializer<'de>,\n\n {\n\n deserializer.deserialize_seq(TupleStructVisitor {\n\n registry: self.registry,\n\n })\n\n }\n\n}\n\n\n", "file_path": "crates/bevy_reflect/src/serde/de.rs", "rank": 22, "score": 100104.73896124214 }, { "content": "struct TupleStructVisitor<'a> {\n\n registry: &'a TypeRegistry,\n\n}\n\n\n\nimpl<'a, 'de> Visitor<'de> for TupleStructVisitor<'a> {\n\n type Value = DynamicTupleStruct;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"tuple struct value\")\n\n }\n\n\n\n fn visit_seq<V>(self, mut seq: V) -> Result<Self::Value, V::Error>\n\n where\n\n V: SeqAccess<'de>,\n\n {\n\n let mut tuple_struct = DynamicTupleStruct::default();\n\n while let Some(value) = seq.next_element_seed(ReflectDeserializer {\n\n registry: self.registry,\n\n })? {\n\n tuple_struct.insert_boxed(value);\n\n }\n\n Ok(tuple_struct)\n\n }\n\n}\n\n\n", "file_path": "crates/bevy_reflect/src/serde/de.rs", "rank": 23, "score": 100104.73896124214 }, { "content": "#[derive(Component)]\n\nstruct Rotator;\n\n\n", "file_path": "examples/3d/parenting.rs", "rank": 25, "score": 98529.95723637153 }, { "content": "#[derive(Component)]\n\nstruct Contributor {\n\n hue: f32,\n\n}\n\n\n", "file_path": "examples/2d/contributors.rs", "rank": 26, "score": 98529.95723637153 }, { "content": "#[derive(Component)]\n\nstruct Velocity {\n\n translation: Vec3,\n\n rotation: f32,\n\n}\n\n\n\nconst GRAVITY: f32 = -9.821 * 100.0;\n\nconst SPRITE_SIZE: f32 = 75.0;\n\n\n\nconst SATURATION_DESELECTED: f32 = 0.3;\n\nconst LIGHTNESS_DESELECTED: f32 = 0.2;\n\nconst SATURATION_SELECTED: f32 = 0.9;\n\nconst LIGHTNESS_SELECTED: f32 = 0.7;\n\nconst ALPHA: f32 = 0.92;\n\n\n\nconst SHOWCASE_TIMER_SECS: f32 = 3.0;\n\n\n", "file_path": "examples/2d/contributors.rs", "rank": 27, "score": 98529.95723637153 }, { "content": "struct LargeStruct([u64; 64]);\n\n\n\nimpl Default for LargeStruct {\n\n fn default() -> Self {\n\n Self([0; 64])\n\n }\n\n}\n\n\n", "file_path": "benches/benches/bevy_ecs/commands.rs", "rank": 28, "score": 98317.39107833937 }, { "content": "#[derive(Component, Default)]\n\nstruct Enemy {\n\n hit_points: u32,\n\n}\n\n\n", "file_path": "tests/how_to_test_systems.rs", "rank": 29, "score": 96837.21120579034 }, { "content": "#[derive(Component, Reflect, Default)]\n\n#[reflect(Component)] // this tells the reflect derive to also reflect component behaviors\n\nstruct ComponentA {\n\n pub x: f32,\n\n pub y: f32,\n\n}\n\n\n\n// Some components have fields that cannot (or should not) be written to scene files. These can be\n\n// ignored with the #[reflect(ignore)] attribute. This is also generally where the `FromWorld`\n\n// trait comes into play. `FromWorld` gives you access to your App's current ECS `Resources`\n\n// when you construct your component.\n", "file_path": "examples/scene/scene.rs", "rank": 30, "score": 96836.49359857835 }, { "content": "#[derive(Component)]\n\nstruct SelectTimer;\n\n\n", "file_path": "examples/2d/contributors.rs", "rank": 31, "score": 96831.0707258203 }, { "content": "struct MyEvent {\n\n pub message: String,\n\n}\n\n\n", "file_path": "examples/ecs/event.rs", "rank": 32, "score": 96831.0707258203 }, { "content": "#[derive(Component)]\n\nstruct Movable;\n\n\n", "file_path": "examples/3d/3d_scene_pipelined.rs", "rank": 33, "score": 96831.0707258203 }, { "content": "#[derive(Component)]\n\nstruct Ball {\n\n velocity: Vec3,\n\n}\n\n\n", "file_path": "examples/game/breakout.rs", "rank": 34, "score": 96831.0707258203 }, { "content": "struct Scoreboard {\n\n score: usize,\n\n}\n\n\n", "file_path": "examples/game/breakout.rs", "rank": 35, "score": 96831.0707258203 }, { "content": "#[derive(Component)]\n\nstruct Rotator;\n\n\n", "file_path": "examples/3d/z_sort_debug.rs", "rank": 36, "score": 96831.0707258203 }, { "content": "#[derive(Component)]\n\nstruct AnimateRotation;\n", "file_path": "examples/2d/text2d.rs", "rank": 37, "score": 96831.0707258203 }, { "content": "struct ContributorSelection {\n\n order: Vec<(String, Entity)>,\n\n idx: usize,\n\n}\n\n\n", "file_path": "examples/2d/contributors.rs", "rank": 38, "score": 96831.0707258203 }, { "content": "#[derive(Component)]\n\nstruct Bird {\n\n velocity: Vec3,\n\n}\n\n\n", "file_path": "examples/tools/bevymark.rs", "rank": 39, "score": 96831.0707258203 }, { "content": "#[derive(Component)]\n\nstruct ContributorDisplay;\n\n\n", "file_path": "examples/2d/contributors.rs", "rank": 40, "score": 96831.0707258203 }, { "content": "#[derive(Component)]\n\nstruct AnimateScale;\n\n\n", "file_path": "examples/2d/text2d.rs", "rank": 41, "score": 96831.0707258203 }, { "content": "#[derive(Component)]\n\nstruct Rotates;\n\n\n", "file_path": "examples/3d/load_gltf.rs", "rank": 42, "score": 96831.0707258203 }, { "content": "#[derive(Component)]\n\nstruct AnimateTranslation;\n", "file_path": "examples/2d/text2d.rs", "rank": 43, "score": 96831.0707258203 }, { "content": "#[derive(Component)]\n\nstruct Paddle {\n\n speed: f32,\n\n}\n\n\n", "file_path": "examples/game/breakout.rs", "rank": 44, "score": 96831.0707258203 }, { "content": "#[derive(Default)]\n\nstruct State {\n\n handle: Handle<CustomAsset>,\n\n printed: bool,\n\n}\n\n\n", "file_path": "examples/asset/custom_asset.rs", "rank": 45, "score": 95239.67084761849 }, { "content": "#[derive(Default)]\n\nstruct CounterState {\n\n count: u32,\n\n}\n", "file_path": "examples/app/headless.rs", "rank": 46, "score": 95239.67084761849 }, { "content": "#[derive(Default)]\n\nstruct State {\n\n counter: usize,\n\n}\n\n\n\n// NOTE: this doesn't do anything relevant to our game, it is just here for illustrative purposes\n", "file_path": "examples/ecs/ecs_guide.rs", "rank": 47, "score": 95239.67084761849 }, { "content": "#[derive(RenderResources, ShaderDefs, Default, TypeUuid)]\n\n#[uuid = \"620f651b-adbe-464b-b740-ba0e547282ba\"]\n\nstruct MyMaterial {\n\n pub color: Color,\n\n #[render_resources(ignore)]\n\n #[shader_def]\n\n pub always_blue: bool,\n\n}\n\n\n\nconst VERTEX_SHADER: &str = r#\"\n\n#version 450\n\nlayout(location = 0) in vec3 Vertex_Position;\n\nlayout(set = 0, binding = 0) uniform CameraViewProj {\n\n mat4 ViewProj;\n\n};\n\nlayout(set = 1, binding = 0) uniform Transform {\n\n mat4 Model;\n\n};\n\nvoid main() {\n\n gl_Position = ViewProj * Model * vec4(Vertex_Position, 1.0);\n\n}\n\n\"#;\n", "file_path": "examples/shader/shader_defs.rs", "rank": 48, "score": 95238.87089392074 }, { "content": "struct ButtonMaterials {\n\n normal: Handle<ColorMaterial>,\n\n hovered: Handle<ColorMaterial>,\n\n pressed: Handle<ColorMaterial>,\n\n}\n\n\n\nimpl FromWorld for ButtonMaterials {\n\n fn from_world(world: &mut World) -> Self {\n\n let mut materials = world.get_resource_mut::<Assets<ColorMaterial>>().unwrap();\n\n ButtonMaterials {\n\n normal: materials.add(Color::rgb(0.15, 0.15, 0.15).into()),\n\n hovered: materials.add(Color::rgb(0.25, 0.25, 0.25).into()),\n\n pressed: materials.add(Color::rgb(0.35, 0.75, 0.35).into()),\n\n }\n\n }\n\n}\n", "file_path": "examples/ecs/state.rs", "rank": 49, "score": 95233.44802116271 }, { "content": "#[derive(Component)]\n\nstruct Bird {\n\n velocity: Vec3,\n\n}\n\n\n\n// struct BirdMaterial(Handle<ColorMaterial>);\n\n\n\n// impl FromWorld for BirdMaterial {\n\n// fn from_world(world: &mut World) -> Self {\n\n// let world = world.cell();\n\n// let mut color_materials = world.get_resource_mut::<Assets<ColorMaterial>>().unwrap();\n\n// let asset_server = world.get_resource_mut::<AssetServer>().unwrap();\n\n// BirdMaterial(color_materials.add(asset_server.load(\"branding/icon.png\").into()))\n\n// }\n\n// }\n\n\n", "file_path": "examples/tools/bevymark_pipelined.rs", "rank": 50, "score": 95233.44802116271 }, { "content": "struct ButtonMaterials {\n\n normal: Handle<ColorMaterial>,\n\n hovered: Handle<ColorMaterial>,\n\n pressed: Handle<ColorMaterial>,\n\n}\n\n\n\nimpl FromWorld for ButtonMaterials {\n\n fn from_world(world: &mut World) -> Self {\n\n let mut materials = world.get_resource_mut::<Assets<ColorMaterial>>().unwrap();\n\n ButtonMaterials {\n\n normal: materials.add(Color::rgb(0.15, 0.15, 0.15).into()),\n\n hovered: materials.add(Color::rgb(0.25, 0.25, 0.25).into()),\n\n pressed: materials.add(Color::rgb(0.35, 0.75, 0.35).into()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/ui/button.rs", "rank": 51, "score": 95233.44802116271 }, { "content": "#[derive(Component)]\n\nstruct A;\n", "file_path": "benches/benches/bevy_ecs/commands.rs", "rank": 52, "score": 95233.44802116271 }, { "content": "#[derive(Reflect)]\n\n#[reflect(DoThing)]\n\nstruct MyType {\n\n value: String,\n\n}\n\n\n\nimpl DoThing for MyType {\n\n fn do_thing(&self) -> String {\n\n format!(\"{} World!\", self.value)\n\n }\n\n}\n\n\n", "file_path": "examples/reflection/trait_reflection.rs", "rank": 53, "score": 95233.44802116271 }, { "content": "#[derive(Component, Reflect)]\n\n#[reflect(Component)]\n\nstruct ComponentB {\n\n pub value: String,\n\n #[reflect(ignore)]\n\n pub _time_since_startup: Duration,\n\n}\n\n\n\nimpl FromWorld for ComponentB {\n\n fn from_world(world: &mut World) -> Self {\n\n let time = world.get_resource::<Time>().unwrap();\n\n ComponentB {\n\n _time_since_startup: time.time_since_startup(),\n\n value: \"Default Value\".to_string(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/scene/scene.rs", "rank": 54, "score": 95233.44802116271 }, { "content": "struct State {\n\n handle: Handle<RustSourceCode>,\n\n printed: bool,\n\n}\n\n\n", "file_path": "examples/wasm/assets_wasm.rs", "rank": 55, "score": 95233.44802116271 }, { "content": "#[derive(Component)]\n\nstruct Player {\n\n name: String,\n\n}\n\n\n\n// Each player also has a score. This component holds on to that score\n", "file_path": "examples/ecs/ecs_guide.rs", "rank": 56, "score": 95233.44802116271 }, { "content": "#[derive(Component)]\n\nstruct FpsText;\n\n\n\n// A unit struct to help identify the color-changing Text component\n", "file_path": "examples/ui/text.rs", "rank": 57, "score": 95233.44802116271 }, { "content": "#[derive(Component)]\n\nstruct ColorText;\n\n\n", "file_path": "examples/ui/text.rs", "rank": 58, "score": 95233.44802116271 }, { "content": "#[derive(Clone, Hash, Debug, PartialEq, Eq, SystemLabel)]\n\nstruct Physics;\n\n\n", "file_path": "examples/ecs/system_sets.rs", "rank": 59, "score": 95233.44802116271 }, { "content": "#[derive(Component)]\n\nstruct Score {\n\n value: usize,\n\n}\n\n\n\n// RESOURCES: \"Global\" state accessible by systems. These are also just normal Rust data types!\n\n//\n\n\n\n// This resource holds information about the game:\n", "file_path": "examples/ecs/ecs_guide.rs", "rank": 60, "score": 95233.44802116271 }, { "content": "#[derive(Component)]\n\nstruct MyComponent;\n\n\n", "file_path": "examples/ecs/removal_detection.rs", "rank": 61, "score": 95233.44802116271 }, { "content": "struct MenuData {\n\n button_entity: Entity,\n\n}\n\n\n", "file_path": "examples/ecs/state.rs", "rank": 62, "score": 95233.44802116271 }, { "content": "struct BevyCounter {\n\n pub count: u128,\n\n}\n\n\n", "file_path": "examples/tools/bevymark.rs", "rank": 63, "score": 95233.44802116271 }, { "content": "#[derive(Default)]\n\nstruct GamepadLobby {\n\n gamepads: HashSet<Gamepad>,\n\n}\n\n\n", "file_path": "examples/input/gamepad_input.rs", "rank": 64, "score": 93734.52007935637 }, { "content": "#[derive(Default)]\n\nstruct RpgSpriteHandles {\n\n handles: Vec<HandleUntyped>,\n\n}\n\n\n", "file_path": "examples/2d/texture_atlas.rs", "rank": 65, "score": 93734.52007935637 }, { "content": "#[derive(Default)]\n\nstruct GameState {\n\n current_round: usize,\n\n total_players: usize,\n\n winning_player: Option<String>,\n\n}\n\n\n", "file_path": "examples/ecs/ecs_guide.rs", "rank": 66, "score": 93734.52007935637 }, { "content": "#[derive(Default)]\n\nstruct Player {\n\n entity: Option<Entity>,\n\n i: usize,\n\n j: usize,\n\n}\n\n\n", "file_path": "examples/game/alien_cake_addict.rs", "rank": 67, "score": 93734.52007935637 }, { "content": "#[derive(Default)]\n\nstruct CounterState {\n\n count: u32,\n\n}\n\n\n", "file_path": "examples/wasm/winit_wasm.rs", "rank": 68, "score": 93734.52007935637 }, { "content": "#[derive(Default)]\n\nstruct Bonus {\n\n entity: Option<Entity>,\n\n i: usize,\n\n j: usize,\n\n handle: Handle<Scene>,\n\n}\n\n\n", "file_path": "examples/game/alien_cake_addict.rs", "rank": 69, "score": 93734.52007935637 }, { "content": "#[derive(Default)]\n\nstruct CounterState {\n\n count: u32,\n\n}\n", "file_path": "examples/wasm/headless_wasm.rs", "rank": 70, "score": 93734.52007935637 }, { "content": "#[derive(Default)]\n\nstruct Game {\n\n board: Vec<Vec<Cell>>,\n\n player: Player,\n\n bonus: Bonus,\n\n score: i32,\n\n cake_eaten: u32,\n\n camera_should_focus: Vec3,\n\n camera_is_focus: Vec3,\n\n}\n\n\n\nconst BOARD_SIZE_I: usize = 14;\n\nconst BOARD_SIZE_J: usize = 21;\n\n\n\nconst RESET_FOCUS: [f32; 3] = [\n\n BOARD_SIZE_I as f32 / 2.0,\n\n 0.0,\n\n BOARD_SIZE_J as f32 / 2.0 - 0.5,\n\n];\n\n\n", "file_path": "examples/game/alien_cake_addict.rs", "rank": 71, "score": 93734.52007935637 }, { "content": "#[derive(Bundle, Default)]\n\nstruct BodyBundle {\n\n #[bundle]\n\n pbr: PbrBundle,\n\n mass: Mass,\n\n last_pos: LastPos,\n\n acceleration: Acceleration,\n\n}\n\n\n", "file_path": "examples/ecs/iter_combinations.rs", "rank": 72, "score": 93734.4377328706 }, { "content": "#[derive(RenderResources, Default, TypeUuid)]\n\n#[uuid = \"1e08866c-0b8a-437e-8bce-37733b25127e\"]\n\nstruct MyMaterial {\n\n pub color: Color,\n\n}\n\n\n\nconst VERTEX_SHADER: &str = r#\"\n\n#version 450\n\nlayout(location = 0) in vec3 Vertex_Position;\n\nlayout(set = 0, binding = 0) uniform CameraViewProj {\n\n mat4 ViewProj;\n\n};\n\nlayout(set = 1, binding = 0) uniform Transform {\n\n mat4 Model;\n\n};\n\nvoid main() {\n\n gl_Position = ViewProj * Model * vec4(Vertex_Position, 1.0);\n\n}\n\n\"#;\n\n\n\nconst FRAGMENT_SHADER: &str = r#\"\n\n#version 450\n\nlayout(location = 0) out vec4 o_Target;\n\nlayout(set = 2, binding = 0) uniform MyMaterial_color {\n\n vec4 color;\n\n};\n\nvoid main() {\n\n o_Target = color;\n\n}\n\n\"#;\n\n\n", "file_path": "examples/shader/shader_custom_material.rs", "rank": 73, "score": 93733.84990786682 }, { "content": "#[derive(Component, RenderResources, Default, TypeUuid)]\n\n#[uuid = \"93fb26fc-6c05-489b-9029-601edf703b6b\"]\n\nstruct MyArrayTexture {\n\n pub texture: Handle<Texture>,\n\n}\n\n\n\nconst VERTEX_SHADER: &str = r#\"\n\n#version 450\n\n\n\nlayout(location = 0) in vec3 Vertex_Position;\n\nlayout(location = 0) out vec4 v_Position;\n\n\n\nlayout(set = 0, binding = 0) uniform CameraViewProj {\n\n mat4 ViewProj;\n\n};\n\nlayout(set = 1, binding = 0) uniform Transform {\n\n mat4 Model;\n\n};\n\n\n\nvoid main() {\n\n v_Position = ViewProj * Model * vec4(Vertex_Position, 1.0);\n\n gl_Position = v_Position;\n", "file_path": "examples/shader/array_texture.rs", "rank": 74, "score": 93733.84990786682 }, { "content": "#[derive(RenderResources, Default, TypeUuid)]\n\n#[uuid = \"3bf9e364-f29d-4d6c-92cf-93298466c620\"]\n\nstruct MyMaterial {\n\n pub color: Color,\n\n}\n\n\n", "file_path": "examples/shader/hot_shader_reloading.rs", "rank": 75, "score": 93733.78424944557 }, { "content": "#[derive(Component, RenderResources, Default, TypeUuid)]\n\n#[uuid = \"463e4b8a-d555-4fc2-ba9f-4c880063ba92\"]\n\nstruct TimeUniform {\n\n value: f32,\n\n}\n\n\n\nconst VERTEX_SHADER: &str = r#\"\n\n#version 450\n\n\n\nlayout(location = 0) in vec3 Vertex_Position;\n\nlayout(location = 1) in vec2 Vertex_Uv;\n\nlayout(location = 0) out vec2 v_Uv;\n\n\n\nlayout(set = 0, binding = 0) uniform CameraViewProj {\n\n mat4 ViewProj;\n\n};\n\n\n\nlayout(set = 1, binding = 0) uniform Transform {\n\n mat4 Model;\n\n};\n\n\n\nvoid main() {\n", "file_path": "examples/shader/animate_shader.rs", "rank": 76, "score": 93733.72012565863 }, { "content": "struct BevyCounter {\n\n pub count: u128,\n\n pub color: Color,\n\n}\n\n\n", "file_path": "examples/tools/bevymark_pipelined.rs", "rank": 77, "score": 93728.29725290058 }, { "content": "#[derive(Clone, Hash, Debug, PartialEq, Eq, SystemLabel)]\n\nstruct PostPhysics;\n\n\n\n/// Resource used to stop our example.\n", "file_path": "examples/ecs/system_sets.rs", "rank": 78, "score": 93728.29725290058 }, { "content": "struct Cell {\n\n height: f32,\n\n}\n\n\n", "file_path": "examples/game/alien_cake_addict.rs", "rank": 79, "score": 93728.29725290058 }, { "content": "#[derive(Component)]\n\nstruct EntityInMyScene;\n\n\n", "file_path": "examples/3d/update_gltf_scene.rs", "rank": 80, "score": 93728.29725290058 }, { "content": "#[derive(Component)]\n\nstruct CameraController {\n\n pub enabled: bool,\n\n pub sensitivity: f32,\n\n pub key_forward: KeyCode,\n\n pub key_back: KeyCode,\n\n pub key_left: KeyCode,\n\n pub key_right: KeyCode,\n\n pub key_up: KeyCode,\n\n pub key_down: KeyCode,\n\n pub key_run: KeyCode,\n\n pub walk_speed: f32,\n\n pub run_speed: f32,\n\n pub friction: f32,\n\n pub pitch: f32,\n\n pub yaw: f32,\n\n pub velocity: Vec3,\n\n}\n\n\n\nimpl Default for CameraController {\n\n fn default() -> Self {\n", "file_path": "examples/3d/shadow_biases_pipelined.rs", "rank": 81, "score": 93728.29725290058 }, { "content": "struct State {\n\n atlas_count: u32,\n\n handle: Handle<Font>,\n\n timer: Timer,\n\n}\n\n\n\nimpl Default for State {\n\n fn default() -> Self {\n\n Self {\n\n atlas_count: 0,\n\n handle: Handle::default(),\n\n timer: Timer::from_seconds(0.05, true),\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/ui/font_atlas_debug.rs", "rank": 82, "score": 93728.29725290058 }, { "content": "// This is our event that we will send and receive in systems\n\nstruct MyEvent {\n\n pub message: String,\n\n pub random_value: f32,\n\n}\n\n\n", "file_path": "crates/bevy_ecs/examples/events.rs", "rank": 83, "score": 93728.29725290058 }, { "content": "#[derive(Debug)]\n\nstruct Counter {\n\n pub value: i32,\n\n}\n\n\n\n// System label to enforce a run order of our systems\n", "file_path": "crates/bevy_ecs/examples/resources.rs", "rank": 84, "score": 93728.29725290058 }, { "content": "#[derive(AsStd140)]\n\nstruct PointLight {\n\n position: mint::Vector3<f32>,\n\n color: mint::Vector3<f32>,\n\n brightness: f32,\n\n}\n\n\n\nlet lights = vec![\n\n PointLight {\n\n position: [0.0, 1.0, 0.0].into(),\n\n color: [1.0, 0.0, 0.0].into(),\n\n brightness: 0.6,\n\n },\n\n PointLight {\n\n position: [0.0, 4.0, 3.0].into(),\n\n color: [1.0, 1.0, 1.0].into(),\n\n brightness: 1.0,\n\n },\n\n];\n\n\n\n# fn map_gpu_buffer_for_write() -> &'static mut [u8] {\n", "file_path": "crates/crevice/src/lib.rs", "rank": 85, "score": 93728.29725290058 }, { "content": "#[derive(Component)]\n\nstruct FirstPassCube;\n", "file_path": "examples/3d/render_to_texture.rs", "rank": 86, "score": 93728.29725290058 }, { "content": "#[derive(Component)]\n\nstruct MainPassCube;\n\n\n", "file_path": "examples/3d/render_to_texture.rs", "rank": 87, "score": 93728.29725290058 }, { "content": "struct CountTimer {\n\n timer: Timer,\n\n}\n\n\n\nimpl Default for CountTimer {\n\n fn default() -> Self {\n\n Self {\n\n timer: Timer::from_seconds(1.0, true),\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/tools/bevymark_pipelined.rs", "rank": 88, "score": 93728.29725290058 }, { "content": "#[derive(AsStd430)]\n\nstruct Frob {\n\n size: mint::Vector3<f32>,\n\n frobiness: f32,\n\n}\n\n\n\n// Many APIs require that buffers contain at least enough space for all\n\n// fixed-size bindiongs to a buffer as well as one element of any arrays, if\n\n// there are any.\n\nlet mut sizer = std430::Sizer::new();\n\nsizer.add::<u32>();\n\nsizer.add::<Frob>();\n\n\n\n# fn create_buffer_with_size(size: usize) {}\n\nlet buffer = create_buffer_with_size(sizer.len());\n\n# assert_eq!(sizer.len(), 32);\n\n```\n\n*/\n\npub struct Sizer {\n\n offset: usize,\n\n}\n", "file_path": "crates/crevice/src/std430/sizer.rs", "rank": 89, "score": 93728.29725290058 }, { "content": "struct PrintMessageState {\n\n message: String,\n\n timer: Timer,\n\n}\n\n\n", "file_path": "examples/app/plugin.rs", "rank": 90, "score": 93728.29725290058 }, { "content": "struct EventTriggerState {\n\n event_timer: Timer,\n\n}\n\n\n\nimpl Default for EventTriggerState {\n\n fn default() -> Self {\n\n EventTriggerState {\n\n event_timer: Timer::from_seconds(1.0, true),\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/ecs/event.rs", "rank": 91, "score": 93728.29725290058 }, { "content": "#[derive(Component)]\n\nstruct B;\n", "file_path": "benches/benches/bevy_ecs/commands.rs", "rank": 92, "score": 93728.29725290058 }, { "content": "#[derive(Component)]\n\nstruct C;\n\n\n", "file_path": "benches/benches/bevy_ecs/commands.rs", "rank": 93, "score": 93728.29725290058 }, { "content": "#[derive(AsStd140)]\n\nstruct Frob {\n\n size: mint::Vector3<f32>,\n\n frobiness: f32,\n\n}\n\n\n\n// Many APIs require that buffers contain at least enough space for all\n\n// fixed-size bindiongs to a buffer as well as one element of any arrays, if\n\n// there are any.\n\nlet mut sizer = std140::Sizer::new();\n\nsizer.add::<u32>();\n\nsizer.add::<Frob>();\n\n\n\n# fn create_buffer_with_size(size: usize) {}\n\nlet buffer = create_buffer_with_size(sizer.len());\n\n# assert_eq!(sizer.len(), 32);\n\n```\n\n*/\n\npub struct Sizer {\n\n offset: usize,\n\n}\n", "file_path": "crates/crevice/src/std140/sizer.rs", "rank": 94, "score": 93728.29725290058 }, { "content": "// This resource provides rules for our \"game\".\n\nstruct GameRules {\n\n winning_score: usize,\n\n max_rounds: usize,\n\n max_players: usize,\n\n}\n\n\n\n// SYSTEMS: Logic that runs on entities, components, and resources. These generally run once each\n\n// time the app updates.\n\n//\n\n\n", "file_path": "examples/ecs/ecs_guide.rs", "rank": 95, "score": 93728.29725290058 }, { "content": "#[derive(AsStd140)]\n\nstruct MainUniform {\n\n orientation: mint::ColumnMatrix3<f32>,\n\n position: mint::Vector3<f32>,\n\n scale: f32,\n\n}\n\n\n\nlet value = MainUniform {\n\n orientation: [\n\n [1.0, 0.0, 0.0],\n\n [0.0, 1.0, 0.0],\n\n [0.0, 0.0, 1.0],\n\n ].into(),\n\n position: [1.0, 2.0, 3.0].into(),\n\n scale: 4.0,\n\n};\n\n\n\nlet value_std140 = value.as_std140();\n\n\n\n# fn upload_data_to_gpu(_value: &[u8]) {}\n\nupload_data_to_gpu(value_std140.as_bytes());\n\n```\n\n\n\n### Sequential Types\n\n\n\nMore complicated data can be uploaded using the std140\n\n[`Writer`][std140::Writer] type.\n\n\n\n```glsl\n", "file_path": "crates/crevice/src/lib.rs", "rank": 96, "score": 93728.29725290058 }, { "content": "use super::{Window, WindowId};\n\nuse bevy_utils::HashMap;\n\n\n\n#[derive(Debug, Default)]\n\npub struct Windows {\n\n windows: HashMap<WindowId, Window>,\n\n}\n\n\n\nimpl Windows {\n\n pub fn add(&mut self, window: Window) {\n\n self.windows.insert(window.id(), window);\n\n }\n\n\n\n pub fn get(&self, id: WindowId) -> Option<&Window> {\n\n self.windows.get(&id)\n\n }\n\n\n\n pub fn get_mut(&mut self, id: WindowId) -> Option<&mut Window> {\n\n self.windows.get_mut(&id)\n\n }\n", "file_path": "crates/bevy_window/src/windows.rs", "rank": 97, "score": 25.916611643912148 }, { "content": "use bevy_app::{prelude::*, Events};\n\n\n\npub struct WindowPlugin {\n\n pub add_primary_window: bool,\n\n pub exit_on_close: bool,\n\n}\n\n\n\nimpl Default for WindowPlugin {\n\n fn default() -> Self {\n\n WindowPlugin {\n\n add_primary_window: true,\n\n exit_on_close: true,\n\n }\n\n }\n\n}\n\n\n\nimpl Plugin for WindowPlugin {\n\n fn build(&self, app: &mut App) {\n\n app.add_event::<WindowResized>()\n\n .add_event::<CreateWindow>()\n", "file_path": "crates/bevy_window/src/lib.rs", "rank": 98, "score": 25.8267987672942 } ]
Rust
src/cargo/sources/registry/remote.rs
Aelnor/cargo
b1684e28490a111b4956b40377abb2b961bfd4b3
use crate::core::{GitReference, PackageId, SourceId}; use crate::sources::git; use crate::sources::registry::MaybeLock; use crate::sources::registry::{ RegistryConfig, RegistryData, CRATE_TEMPLATE, LOWER_PREFIX_TEMPLATE, PREFIX_TEMPLATE, VERSION_TEMPLATE, }; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::{Config, Filesystem}; use anyhow::Context as _; use cargo_util::{paths, Sha256}; use lazycell::LazyCell; use log::{debug, trace}; use std::cell::{Cell, Ref, RefCell}; use std::fmt::Write as FmtWrite; use std::fs::{self, File, OpenOptions}; use std::io::prelude::*; use std::io::SeekFrom; use std::mem; use std::path::Path; use std::str; fn make_dep_prefix(name: &str) -> String { match name.len() { 1 => String::from("1"), 2 => String::from("2"), 3 => format!("3/{}", &name[..1]), _ => format!("{}/{}", &name[0..2], &name[2..4]), } } pub struct RemoteRegistry<'cfg> { index_path: Filesystem, cache_path: Filesystem, source_id: SourceId, index_git_ref: GitReference, config: &'cfg Config, tree: RefCell<Option<git2::Tree<'static>>>, repo: LazyCell<git2::Repository>, head: Cell<Option<git2::Oid>>, current_sha: Cell<Option<InternedString>>, } impl<'cfg> RemoteRegistry<'cfg> { pub fn new(source_id: SourceId, config: &'cfg Config, name: &str) -> RemoteRegistry<'cfg> { RemoteRegistry { index_path: config.registry_index_path().join(name), cache_path: config.registry_cache_path().join(name), source_id, config, index_git_ref: GitReference::DefaultBranch, tree: RefCell::new(None), repo: LazyCell::new(), head: Cell::new(None), current_sha: Cell::new(None), } } fn repo(&self) -> CargoResult<&git2::Repository> { self.repo.try_borrow_with(|| { let path = self.config.assert_package_cache_locked(&self.index_path); if let Ok(repo) = git2::Repository::open(&path) { trace!("opened a repo without a lock"); return Ok(repo); } trace!("acquiring registry index lock"); match git2::Repository::open(&path) { Ok(repo) => Ok(repo), Err(_) => { drop(paths::remove_dir_all(&path)); paths::create_dir_all(&path)?; let mut opts = git2::RepositoryInitOptions::new(); opts.external_template(false); Ok(git2::Repository::init_opts(&path, &opts) .with_context(|| "failed to initialize index git repository")?) } } }) } fn head(&self) -> CargoResult<git2::Oid> { if self.head.get().is_none() { let repo = self.repo()?; let oid = self.index_git_ref.resolve(repo)?; self.head.set(Some(oid)); } Ok(self.head.get().unwrap()) } fn tree(&self) -> CargoResult<Ref<'_, git2::Tree<'_>>> { { let tree = self.tree.borrow(); if tree.is_some() { return Ok(Ref::map(tree, |s| s.as_ref().unwrap())); } } let repo = self.repo()?; let commit = repo.find_commit(self.head()?)?; let tree = commit.tree()?; let tree = unsafe { mem::transmute::<git2::Tree<'_>, git2::Tree<'static>>(tree) }; *self.tree.borrow_mut() = Some(tree); Ok(Ref::map(self.tree.borrow(), |s| s.as_ref().unwrap())) } fn filename(&self, pkg: PackageId) -> String { format!("{}-{}.crate", pkg.name(), pkg.version()) } } const LAST_UPDATED_FILE: &str = ".last-updated"; impl<'cfg> RegistryData for RemoteRegistry<'cfg> { fn prepare(&self) -> CargoResult<()> { self.repo()?; Ok(()) } fn index_path(&self) -> &Filesystem { &self.index_path } fn assert_index_locked<'a>(&self, path: &'a Filesystem) -> &'a Path { self.config.assert_package_cache_locked(path) } fn current_version(&self) -> Option<InternedString> { if let Some(sha) = self.current_sha.get() { return Some(sha); } let sha = InternedString::new(&self.head().ok()?.to_string()); self.current_sha.set(Some(sha)); Some(sha) } fn load( &self, _root: &Path, path: &Path, data: &mut dyn FnMut(&[u8]) -> CargoResult<()>, ) -> CargoResult<()> { let repo = self.repo()?; let tree = self.tree()?; let entry = tree.get_path(path)?; let object = entry.to_object(repo)?; let blob = match object.as_blob() { Some(blob) => blob, None => anyhow::bail!("path `{}` is not a blob in the git repo", path.display()), }; data(blob.content()) } fn config(&mut self) -> CargoResult<Option<RegistryConfig>> { debug!("loading config"); self.prepare()?; self.config.assert_package_cache_locked(&self.index_path); let mut config = None; self.load(Path::new(""), Path::new("config.json"), &mut |json| { config = Some(serde_json::from_slice(json)?); Ok(()) })?; trace!("config loaded"); Ok(config) } fn update_index(&mut self) -> CargoResult<()> { if self.config.offline() { return Ok(()); } if self.config.cli_unstable().no_index_update { return Ok(()); } if self.config.updated_sources().contains(&self.source_id) { return Ok(()); } debug!("updating the index"); self.config.http()?; self.prepare()?; self.head.set(None); *self.tree.borrow_mut() = None; self.current_sha.set(None); let path = self.config.assert_package_cache_locked(&self.index_path); self.config .shell() .status("Updating", self.source_id.display_index())?; let url = self.source_id.url(); let repo = self.repo.borrow_mut().unwrap(); git::fetch(repo, url.as_str(), &self.index_git_ref, self.config) .with_context(|| format!("failed to fetch `{}`", url))?; self.config.updated_sources().insert(self.source_id); paths::create(&path.join(LAST_UPDATED_FILE))?; Ok(()) } fn download(&mut self, pkg: PackageId, _checksum: &str) -> CargoResult<MaybeLock> { let filename = self.filename(pkg); let path = self.cache_path.join(&filename); let path = self.config.assert_package_cache_locked(&path); if let Ok(dst) = File::open(&path) { let meta = dst.metadata()?; if meta.len() > 0 { return Ok(MaybeLock::Ready(dst)); } } let config = self.config()?.unwrap(); let mut url = config.dl; if !url.contains(CRATE_TEMPLATE) && !url.contains(VERSION_TEMPLATE) && !url.contains(PREFIX_TEMPLATE) && !url.contains(LOWER_PREFIX_TEMPLATE) { write!(url, "/{}/{}/download", CRATE_TEMPLATE, VERSION_TEMPLATE).unwrap(); } let prefix = make_dep_prefix(&*pkg.name()); let url = url .replace(CRATE_TEMPLATE, &*pkg.name()) .replace(VERSION_TEMPLATE, &pkg.version().to_string()) .replace(PREFIX_TEMPLATE, &prefix) .replace(LOWER_PREFIX_TEMPLATE, &prefix.to_lowercase()); Ok(MaybeLock::Download { url, descriptor: pkg.to_string(), }) } fn finish_download( &mut self, pkg: PackageId, checksum: &str, data: &[u8], ) -> CargoResult<File> { let actual = Sha256::new().update(data).finish_hex(); if actual != checksum { anyhow::bail!("failed to verify the checksum of `{}`", pkg) } let filename = self.filename(pkg); self.cache_path.create_dir()?; let path = self.cache_path.join(&filename); let path = self.config.assert_package_cache_locked(&path); let mut dst = OpenOptions::new() .create(true) .read(true) .write(true) .open(&path) .with_context(|| format!("failed to open `{}`", path.display()))?; let meta = dst.metadata()?; if meta.len() > 0 { return Ok(dst); } dst.write_all(data)?; dst.seek(SeekFrom::Start(0))?; Ok(dst) } fn is_crate_downloaded(&self, pkg: PackageId) -> bool { let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); let path = Path::new(&filename); let path = self.cache_path.join(path); let path = self.config.assert_package_cache_locked(&path); if let Ok(meta) = fs::metadata(path) { return meta.len() > 0; } false } } impl<'cfg> Drop for RemoteRegistry<'cfg> { fn drop(&mut self) { self.tree.borrow_mut().take(); } } #[cfg(test)] mod tests { use super::make_dep_prefix; #[test] fn dep_prefix() { assert_eq!(make_dep_prefix("a"), "1"); assert_eq!(make_dep_prefix("ab"), "2"); assert_eq!(make_dep_prefix("abc"), "3/a"); assert_eq!(make_dep_prefix("Abc"), "3/A"); assert_eq!(make_dep_prefix("AbCd"), "Ab/Cd"); assert_eq!(make_dep_prefix("aBcDe"), "aB/cD"); } }
use crate::core::{GitReference, PackageId, SourceId}; use crate::sources::git; use crate::sources::registry::MaybeLock; use crate::sources::registry::{ RegistryConfig, RegistryData, CRATE_TEMPLATE, LOWER_PREFIX_TEMPLATE, PREFIX_TEMPLATE, VERSION_TEMPLATE, }; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::{Config, Filesystem}; use anyhow::Context as _; use cargo_util::{paths, Sha256}; use lazycell::LazyCell; use log::{debug, trace}; use std::cell::{Cell, Ref, RefCell}; use std::fmt::Write as FmtWrite; use std::fs::{self, File, OpenOptions}; use std::io::prelude::*; use std::io::SeekFrom; use std::mem; use std::path::Path; use std::str; fn make_dep_prefix(name: &str) -> String { match name.len() { 1 => String::from("1"), 2 => String::from("2"), 3 => format!("3/{}", &name[..1]), _ => format!("{}/{}", &name[0..2], &name[2..4]), } } pub struct RemoteRegistry<'cfg> { index_path: Filesystem, cache_path: Filesystem, source_id: SourceId, index_git_ref: GitReference, config: &'cfg Config, tree: RefCell<Option<git2::Tree<'static>>>, repo: LazyCell<git2::Repository>, head: Cell<Option<git2::Oid>>, current_sha: Cell<Option<InternedString>>, } impl<'cfg> RemoteRegistry<'cfg> { pub fn new(source_id: SourceId, config: &'cfg Config, name: &str) -> RemoteRegistry<'cfg> { RemoteRegistry { index_path: config.registry_index_path().join(name), cache_path: config.registry_cache_path().join(name), source_id, config, index_git_ref: GitReference::DefaultBranch, tree: RefCell::new(None), repo: LazyCell::new(), head: Cell::new(None), current_sha: Cell::new(None), } } fn repo(&self) -> CargoResult<&git2::Repository> { self.repo.try_borrow_with(|| { let path = self.config.assert_package_cache_locked(&self.index_path); if let Ok(repo) = git2::Repository::open(&path) { trace!("opened a repo without a lock"); return Ok(repo); } trace!("acquiring registry index lock"); match git2::Repository::open(&path) { Ok(repo) => Ok(repo), Err(_) => { drop(paths::remove_dir_all(&path)); paths::create_dir_all(&path)?; let mut opts = git2::RepositoryInitOptions::new(); opts.external_template(false); Ok(git2::Repository::init_opts(&path, &opts) .with_context(|| "failed to initialize index git repository")?) } } }) } fn head(&self) -> CargoResult<git2::Oid> { if self.head.get().is_none() { let repo = self.repo()?; let oid = self.index_git_ref.resolve(repo)?; self.head.set(Some(oid)); } Ok(self.head.get().unwrap()) } fn tree(&self) -> CargoResult<Ref<'_, git2::Tree<'_>>> { { let tree = self.tree.borrow(); if tree.is_some() { return Ok(Ref::map(tree, |s| s.as_ref().unwrap())); } } let repo = self.repo()?; let commit = repo.find_commit(self.head()?)?; let tree = commit.tree()?; let tree = unsafe { mem::transmute::<git2::Tree<'_>, git2::Tree<'static>>(tree) }; *self.tree.borrow_mut() = Some(tree); Ok(Ref::map(self.tree.borrow(), |s| s.as_ref().unwrap())) } fn filename(&self, pkg: PackageId) -> String { format!("{}-{}.crate", pkg.name(), pkg.version()) } } const LAST_UPDATED_FILE: &str = ".last-updated"; impl<'cfg> RegistryData for RemoteRegistry<'cfg> { fn prepare(&self) -> CargoResult<()> { self.repo()?; Ok(()) } fn index_path(&self) -> &Filesystem { &self.index_path } fn assert_index_locked<'a>(&self, path: &'a Filesystem) -> &'a Path { self.config.assert_package_cache_locked(path) } fn current_version(&self) -> Option<InternedString> { if let Some(sha) = self.current_sha.get() { return Some(sha); } let sha = InternedString::new(&self.head().ok()?.to_string()); self.current_sha.set(Some(sha)); Some(sha) } fn load( &self, _root: &Path, path: &Path, data: &mut dyn FnMut(&[u8]) -> CargoResult<()>, ) -> CargoResult<()> { let repo = self.repo()?; let tree = self.tree()?; let entry = tree.get_path(path)?; let object = entry.to_object(repo)?; let blob = match object.as_blob() { Some(blob) => blob, None => anyhow::bail!("path `{}` is not a blob in the git repo", path.display()), }; data(blob.content()) } fn config(&mut self) -> CargoResult<Option<RegistryConfig>> { debug!("
th::new(""), Path::new("config.json"), &mut |json| { config = Some(serde_json::from_slice(json)?); Ok(()) })?; trace!("config loaded"); Ok(config) } fn update_index(&mut self) -> CargoResult<()> { if self.config.offline() { return Ok(()); } if self.config.cli_unstable().no_index_update { return Ok(()); } if self.config.updated_sources().contains(&self.source_id) { return Ok(()); } debug!("updating the index"); self.config.http()?; self.prepare()?; self.head.set(None); *self.tree.borrow_mut() = None; self.current_sha.set(None); let path = self.config.assert_package_cache_locked(&self.index_path); self.config .shell() .status("Updating", self.source_id.display_index())?; let url = self.source_id.url(); let repo = self.repo.borrow_mut().unwrap(); git::fetch(repo, url.as_str(), &self.index_git_ref, self.config) .with_context(|| format!("failed to fetch `{}`", url))?; self.config.updated_sources().insert(self.source_id); paths::create(&path.join(LAST_UPDATED_FILE))?; Ok(()) } fn download(&mut self, pkg: PackageId, _checksum: &str) -> CargoResult<MaybeLock> { let filename = self.filename(pkg); let path = self.cache_path.join(&filename); let path = self.config.assert_package_cache_locked(&path); if let Ok(dst) = File::open(&path) { let meta = dst.metadata()?; if meta.len() > 0 { return Ok(MaybeLock::Ready(dst)); } } let config = self.config()?.unwrap(); let mut url = config.dl; if !url.contains(CRATE_TEMPLATE) && !url.contains(VERSION_TEMPLATE) && !url.contains(PREFIX_TEMPLATE) && !url.contains(LOWER_PREFIX_TEMPLATE) { write!(url, "/{}/{}/download", CRATE_TEMPLATE, VERSION_TEMPLATE).unwrap(); } let prefix = make_dep_prefix(&*pkg.name()); let url = url .replace(CRATE_TEMPLATE, &*pkg.name()) .replace(VERSION_TEMPLATE, &pkg.version().to_string()) .replace(PREFIX_TEMPLATE, &prefix) .replace(LOWER_PREFIX_TEMPLATE, &prefix.to_lowercase()); Ok(MaybeLock::Download { url, descriptor: pkg.to_string(), }) } fn finish_download( &mut self, pkg: PackageId, checksum: &str, data: &[u8], ) -> CargoResult<File> { let actual = Sha256::new().update(data).finish_hex(); if actual != checksum { anyhow::bail!("failed to verify the checksum of `{}`", pkg) } let filename = self.filename(pkg); self.cache_path.create_dir()?; let path = self.cache_path.join(&filename); let path = self.config.assert_package_cache_locked(&path); let mut dst = OpenOptions::new() .create(true) .read(true) .write(true) .open(&path) .with_context(|| format!("failed to open `{}`", path.display()))?; let meta = dst.metadata()?; if meta.len() > 0 { return Ok(dst); } dst.write_all(data)?; dst.seek(SeekFrom::Start(0))?; Ok(dst) } fn is_crate_downloaded(&self, pkg: PackageId) -> bool { let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); let path = Path::new(&filename); let path = self.cache_path.join(path); let path = self.config.assert_package_cache_locked(&path); if let Ok(meta) = fs::metadata(path) { return meta.len() > 0; } false } } impl<'cfg> Drop for RemoteRegistry<'cfg> { fn drop(&mut self) { self.tree.borrow_mut().take(); } } #[cfg(test)] mod tests { use super::make_dep_prefix; #[test] fn dep_prefix() { assert_eq!(make_dep_prefix("a"), "1"); assert_eq!(make_dep_prefix("ab"), "2"); assert_eq!(make_dep_prefix("abc"), "3/a"); assert_eq!(make_dep_prefix("Abc"), "3/A"); assert_eq!(make_dep_prefix("AbCd"), "Ab/Cd"); assert_eq!(make_dep_prefix("aBcDe"), "aB/cD"); } }
loading config"); self.prepare()?; self.config.assert_package_cache_locked(&self.index_path); let mut config = None; self.load(Pa
function_block-random_span
[ { "content": "/// Commit changes to the git repository.\n\npub fn commit(repo: &git2::Repository) -> git2::Oid {\n\n let tree_id = t!(t!(repo.index()).write_tree());\n\n let sig = t!(repo.signature());\n\n let mut parents = Vec::new();\n\n if let Some(parent) = repo.head().ok().map(|h| h.target().unwrap()) {\n\n parents.push(t!(repo.find_commit(parent)))\n\n }\n\n let parents = parents.iter().collect::<Vec<_>>();\n\n t!(repo.commit(\n\n Some(\"HEAD\"),\n\n &sig,\n\n &sig,\n\n \"test\",\n\n &t!(repo.find_tree(tree_id)),\n\n &parents\n\n ))\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/git.rs", "rank": 0, "score": 466268.73930245463 }, { "content": "/// Create a new tag in the git repository.\n\npub fn tag(repo: &git2::Repository, name: &str) {\n\n let head = repo.head().unwrap().target().unwrap();\n\n t!(repo.tag(\n\n name,\n\n &t!(repo.find_object(head, None)),\n\n &t!(repo.signature()),\n\n \"make a new tag\",\n\n false\n\n ));\n\n}\n", "file_path": "crates/cargo-test-support/src/git.rs", "rank": 1, "score": 465847.14636914927 }, { "content": "/// Attempts to parse a string into a [`toml::Value`]. This is not specific to any\n\n/// particular kind of TOML file.\n\n///\n\n/// The purpose of this wrapper is to detect invalid TOML which was previously\n\n/// accepted and display a warning to the user in that case. The `file` and `config`\n\n/// parameters are only used by this fallback path.\n\npub fn parse(toml: &str, file: &Path, config: &Config) -> CargoResult<toml::Value> {\n\n let first_error = match toml.parse() {\n\n Ok(ret) => return Ok(ret),\n\n Err(e) => e,\n\n };\n\n\n\n let mut second_parser = toml::de::Deserializer::new(toml);\n\n second_parser.set_require_newline_after_table(false);\n\n if let Ok(ret) = toml::Value::deserialize(&mut second_parser) {\n\n let msg = format!(\n\n \"\\\n\nTOML file found which contains invalid syntax and will soon not parse\n\nat `{}`.\n\n\n\nThe TOML spec requires newlines after table definitions (e.g., `[a] b = 1` is\n\ninvalid), but this file has a table header which does not have a newline after\n\nit. A newline needs to be added and this warning will soon become a hard error\n\nin the future.\",\n\n file.display()\n\n );\n", "file_path": "src/cargo/util/toml/mod.rs", "rank": 2, "score": 442011.3415344986 }, { "content": "/// Get the filename for a library.\n\n///\n\n/// `kind` should be one of: \"lib\", \"rlib\", \"staticlib\", \"dylib\", \"proc-macro\"\n\n///\n\n/// For example, dynamic library named \"foo\" would return:\n\n/// - macOS: \"libfoo.dylib\"\n\n/// - Windows: \"foo.dll\"\n\n/// - Unix: \"libfoo.so\"\n\npub fn get_lib_filename(name: &str, kind: &str) -> String {\n\n let prefix = get_lib_prefix(kind);\n\n let extension = get_lib_extension(kind);\n\n format!(\"{}{}.{}\", prefix, name, extension)\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/paths.rs", "rank": 3, "score": 436175.8406985217 }, { "content": "pub fn generate_alt_dl_url(name: &str) -> String {\n\n let base = Url::from_file_path(generate_path(name)).ok().unwrap();\n\n format!(\"{}/{{crate}}/{{version}}/{{crate}}-{{version}}.crate\", base)\n\n}\n\n\n\n/// A builder for initializing registries.\n\npub struct RegistryBuilder {\n\n /// If `true`, adds source replacement for crates.io to a registry on the filesystem.\n\n replace_crates_io: bool,\n\n /// If `true`, configures a registry named \"alternative\".\n\n alternative: bool,\n\n /// If set, sets the API url for the \"alternative\" registry.\n\n /// This defaults to a directory on the filesystem.\n\n alt_api_url: Option<String>,\n\n /// If `true`, configures `.cargo/credentials` with some tokens.\n\n add_tokens: bool,\n\n}\n\n\n\nimpl RegistryBuilder {\n\n pub fn new() -> RegistryBuilder {\n", "file_path": "crates/cargo-test-support/src/registry.rs", "rank": 4, "score": 434082.68856818357 }, { "content": "pub fn write_config_at(path: impl AsRef<Path>, contents: &str) {\n\n let path = paths::root().join(path.as_ref());\n\n fs::create_dir_all(path.parent().unwrap()).unwrap();\n\n fs::write(path, contents).unwrap();\n\n}\n\n\n", "file_path": "tests/testsuite/config.rs", "rank": 5, "score": 431957.7383370557 }, { "content": "fn reset(repo: &git2::Repository, obj: &git2::Object<'_>, config: &Config) -> CargoResult<()> {\n\n let mut pb = Progress::new(\"Checkout\", config);\n\n let mut opts = git2::build::CheckoutBuilder::new();\n\n opts.progress(|_, cur, max| {\n\n drop(pb.tick(cur, max, \"\"));\n\n });\n\n debug!(\"doing reset\");\n\n repo.reset(obj, git2::ResetType::Hard, Some(&mut opts))?;\n\n debug!(\"reset done\");\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cargo/sources/git/utils.rs", "rank": 6, "score": 424217.692720781 }, { "content": "pub fn values(args: &ArgMatches<'_>, name: &str) -> Vec<String> {\n\n args._values_of(name)\n\n}\n\n\n", "file_path": "src/cargo/util/command_prelude.rs", "rank": 7, "score": 423002.7764980395 }, { "content": "pub fn generate_path(name: &str) -> PathBuf {\n\n paths::root().join(name)\n\n}\n", "file_path": "crates/cargo-test-support/src/registry.rs", "rank": 8, "score": 420956.85241882654 }, { "content": "fn validate_feature_name(config: &Config, pkg_id: PackageId, name: &str) -> CargoResult<()> {\n\n let mut chars = name.chars();\n\n const FUTURE: &str = \"This was previously accepted but is being phased out; \\\n\n it will become a hard error in a future release.\\n\\\n\n For more information, see issue #8813 <https://github.com/rust-lang/cargo/issues/8813>, \\\n\n and please leave a comment if this will be a problem for your project.\";\n\n if let Some(ch) = chars.next() {\n\n if !(unicode_xid::UnicodeXID::is_xid_start(ch) || ch == '_' || ch.is_digit(10)) {\n\n config.shell().warn(&format!(\n\n \"invalid character `{}` in feature `{}` in package {}, \\\n\n the first character must be a Unicode XID start character or digit \\\n\n (most letters or `_` or `0` to `9`)\\n\\\n\n {}\",\n\n ch, name, pkg_id, FUTURE\n\n ))?;\n\n }\n\n }\n\n for ch in chars {\n\n if !(unicode_xid::UnicodeXID::is_xid_continue(ch) || ch == '-' || ch == '+' || ch == '.') {\n\n config.shell().warn(&format!(\n", "file_path": "src/cargo/core/summary.rs", "rank": 9, "score": 416856.2654364895 }, { "content": "pub fn exe(name: &str) -> String {\n\n format!(\"{}{}\", name, EXE_SUFFIX)\n\n}\n", "file_path": "crates/cargo-test-support/src/install.rs", "rank": 10, "score": 403932.9343054807 }, { "content": "pub fn registry_logout(config: &Config, reg: Option<String>) -> CargoResult<()> {\n\n let (registry, reg_cfg, _) = registry(config, None, None, reg.clone(), false, false)?;\n\n let reg_name = reg.as_deref().unwrap_or(\"crates.io\");\n\n if reg_cfg.credential_process.is_none() && reg_cfg.token.is_none() {\n\n config.shell().status(\n\n \"Logout\",\n\n format!(\"not currently logged in to `{}`\", reg_name),\n\n )?;\n\n return Ok(());\n\n }\n\n auth::logout(\n\n config,\n\n reg_cfg.credential_process.as_ref(),\n\n reg.as_deref(),\n\n registry.host(),\n\n )?;\n\n config.shell().status(\n\n \"Logout\",\n\n format!(\n\n \"token for `{}` has been removed from local storage\",\n", "file_path": "src/cargo/ops/registry.rs", "rank": 11, "score": 401822.40611036256 }, { "content": "pub fn pkg_id(name: &str) -> PackageId {\n\n PackageId::new(name, \"1.0.0\", registry_loc()).unwrap()\n\n}\n\n\n", "file_path": "crates/resolver-tests/src/lib.rs", "rank": 12, "score": 398615.76432216004 }, { "content": "/// Processes the handlebars template at the given file.\n\npub fn expand(file: &Path, formatter: FormatterRef) -> Result<String, Error> {\n\n let mut handlebars = Handlebars::new();\n\n handlebars.set_strict_mode(true);\n\n handlebars.register_helper(\"lower\", Box::new(lower));\n\n handlebars.register_helper(\"options\", Box::new(OptionsHelper { formatter }));\n\n handlebars.register_helper(\"option\", Box::new(OptionHelper { formatter }));\n\n handlebars.register_helper(\"man\", Box::new(ManLinkHelper { formatter }));\n\n handlebars.register_decorator(\"set\", Box::new(set_decorator));\n\n handlebars.register_template_file(\"template\", file)?;\n\n let includes = file.parent().unwrap().join(\"includes\");\n\n handlebars.register_templates_directory(\".md\", includes)?;\n\n let mut data: HashMap<String, String> = HashMap::new();\n\n let man_name = file\n\n .file_stem()\n\n .expect(\"expected filename\")\n\n .to_str()\n\n .expect(\"utf8 filename\")\n\n .to_string();\n\n data.insert(\"man_name\".to_string(), man_name);\n\n let expanded = handlebars.render(\"template\", &data)?;\n\n Ok(expanded)\n\n}\n\n\n", "file_path": "crates/mdman/src/hbs.rs", "rank": 13, "score": 397017.76999035146 }, { "content": "#[track_caller]\n\npub fn assert_has_installed_exe<P: AsRef<Path>>(path: P, name: &'static str) {\n\n assert!(check_has_installed_exe(path, name));\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/install.rs", "rank": 14, "score": 396969.5026940612 }, { "content": "#[track_caller]\n\npub fn assert_has_not_installed_exe<P: AsRef<Path>>(path: P, name: &'static str) {\n\n assert!(!check_has_installed_exe(path, name));\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/install.rs", "rank": 15, "score": 396969.5026940612 }, { "content": "pub fn basic_manifest(name: &str, version: &str) -> String {\n\n format!(\n\n r#\"\n\n [package]\n\n name = \"{}\"\n\n version = \"{}\"\n\n authors = []\n\n \"#,\n\n name, version\n\n )\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/lib.rs", "rank": 16, "score": 395937.2718145946 }, { "content": "pub fn basic_bin_manifest(name: &str) -> String {\n\n format!(\n\n r#\"\n\n [package]\n\n\n\n name = \"{}\"\n\n version = \"0.5.0\"\n\n authors = [\"[email protected]\"]\n\n\n\n [[bin]]\n\n\n\n name = \"{}\"\n\n \"#,\n\n name, name\n\n )\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/lib.rs", "rank": 17, "score": 393265.90462380624 }, { "content": "pub fn basic_lib_manifest(name: &str) -> String {\n\n format!(\n\n r#\"\n\n [package]\n\n\n\n name = \"{}\"\n\n version = \"0.5.0\"\n\n authors = [\"[email protected]\"]\n\n\n\n [lib]\n\n\n\n name = \"{}\"\n\n \"#,\n\n name, name\n\n )\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/lib.rs", "rank": 18, "score": 393265.90462380624 }, { "content": "/// Create a new git repository with a project.\n\n/// Returns both the Project and the git Repository.\n\npub fn new_repo<F>(name: &str, callback: F) -> (Project, git2::Repository)\n\nwhere\n\n F: FnOnce(ProjectBuilder) -> ProjectBuilder,\n\n{\n\n let mut git_project = project().at(name);\n\n git_project = callback(git_project);\n\n let git_project = git_project.build();\n\n\n\n let repo = init(&git_project.root());\n\n add(&repo);\n\n commit(&repo);\n\n (git_project, repo)\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/git.rs", "rank": 19, "score": 391967.25473088614 }, { "content": "/// Determines the root directory where installation is done.\n\npub fn resolve_root(flag: Option<&str>, config: &Config) -> CargoResult<Filesystem> {\n\n let config_root = config.get_path(\"install.root\")?;\n\n Ok(flag\n\n .map(PathBuf::from)\n\n .or_else(|| env::var_os(\"CARGO_INSTALL_ROOT\").map(PathBuf::from))\n\n .or_else(move || config_root.map(|v| v.val))\n\n .map(Filesystem::new)\n\n .unwrap_or_else(|| config.home().clone()))\n\n}\n\n\n", "file_path": "src/cargo/ops/common_for_install_and_uninstall.rs", "rank": 20, "score": 391719.32663953176 }, { "content": "/// Returns the path to the `file` in `pwd`, if it exists.\n\npub fn find_project_manifest_exact(pwd: &Path, file: &str) -> CargoResult<PathBuf> {\n\n let manifest = pwd.join(file);\n\n\n\n if manifest.exists() {\n\n Ok(manifest)\n\n } else {\n\n anyhow::bail!(\"Could not find `{}` in `{}`\", file, pwd.display())\n\n }\n\n}\n", "file_path": "src/cargo/util/important_paths.rs", "rank": 21, "score": 391384.80983938853 }, { "content": "pub fn basic_bin_manifest_with_readme(name: &str, readme_filename: &str) -> String {\n\n format!(\n\n r#\"\n\n [package]\n\n\n\n name = \"{}\"\n\n version = \"0.5.0\"\n\n authors = [\"[email protected]\"]\n\n readme = {}\n\n\n\n [[bin]]\n\n\n\n name = \"{}\"\n\n \"#,\n\n name, readme_filename, name\n\n )\n\n}\n\n\n", "file_path": "tests/testsuite/read_manifest.rs", "rank": 22, "score": 391263.224785566 }, { "content": "pub fn values_os(args: &ArgMatches<'_>, name: &str) -> Vec<OsString> {\n\n args._values_of_os(name)\n\n}\n\n\n\n#[derive(PartialEq, PartialOrd, Eq, Ord)]\n\npub enum CommandInfo {\n\n BuiltIn { name: String, about: Option<String> },\n\n External { name: String, path: PathBuf },\n\n}\n\n\n\nimpl CommandInfo {\n\n pub fn name(&self) -> &str {\n\n match self {\n\n CommandInfo::BuiltIn { name, .. } => name,\n\n CommandInfo::External { name, .. } => name,\n\n }\n\n }\n\n}\n", "file_path": "src/cargo/util/command_prelude.rs", "rank": 23, "score": 385772.3538801404 }, { "content": "/// Create a `RepoBuilder` to build a new git repository.\n\n///\n\n/// Call `build()` to finalize and create the repository.\n\npub fn repo(p: &Path) -> RepoBuilder {\n\n RepoBuilder::init(p)\n\n}\n\n\n\nimpl RepoBuilder {\n\n pub fn init(p: &Path) -> RepoBuilder {\n\n t!(fs::create_dir_all(p.parent().unwrap()));\n\n let repo = init(p);\n\n RepoBuilder {\n\n repo,\n\n files: Vec::new(),\n\n }\n\n }\n\n\n\n /// Add a file to the repository.\n\n pub fn file(self, path: &str, contents: &str) -> RepoBuilder {\n\n let mut me = self.nocommit_file(path, contents);\n\n me.files.push(PathBuf::from(path));\n\n me\n\n }\n", "file_path": "crates/cargo-test-support/src/git.rs", "rank": 24, "score": 384121.9806104031 }, { "content": "/// Determines the `PathSource` from a `SourceId`.\n\npub fn path_source(source_id: SourceId, config: &Config) -> CargoResult<PathSource<'_>> {\n\n let path = source_id\n\n .url()\n\n .to_file_path()\n\n .map_err(|()| format_err!(\"path sources must have a valid path\"))?;\n\n Ok(PathSource::new(&path, source_id, config))\n\n}\n\n\n", "file_path": "src/cargo/ops/common_for_install_and_uninstall.rs", "rank": 25, "score": 383362.2410805024 }, { "content": "/// Prepare the authentication callbacks for cloning a git repository.\n\n///\n\n/// The main purpose of this function is to construct the \"authentication\n\n/// callback\" which is used to clone a repository. This callback will attempt to\n\n/// find the right authentication on the system (without user input) and will\n\n/// guide libgit2 in doing so.\n\n///\n\n/// The callback is provided `allowed` types of credentials, and we try to do as\n\n/// much as possible based on that:\n\n///\n\n/// * Prioritize SSH keys from the local ssh agent as they're likely the most\n\n/// reliable. The username here is prioritized from the credential\n\n/// callback, then from whatever is configured in git itself, and finally\n\n/// we fall back to the generic user of `git`.\n\n///\n\n/// * If a username/password is allowed, then we fallback to git2-rs's\n\n/// implementation of the credential helper. This is what is configured\n\n/// with `credential.helper` in git, and is the interface for the macOS\n\n/// keychain, for example.\n\n///\n\n/// * After the above two have failed, we just kinda grapple attempting to\n\n/// return *something*.\n\n///\n\n/// If any form of authentication fails, libgit2 will repeatedly ask us for\n\n/// credentials until we give it a reason to not do so. To ensure we don't\n\n/// just sit here looping forever we keep track of authentications we've\n\n/// attempted and we don't try the same ones again.\n\nfn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F) -> CargoResult<T>\n\nwhere\n\n F: FnMut(&mut git2::Credentials<'_>) -> CargoResult<T>,\n\n{\n\n let mut cred_helper = git2::CredentialHelper::new(url);\n\n cred_helper.config(cfg);\n\n\n\n let mut ssh_username_requested = false;\n\n let mut cred_helper_bad = None;\n\n let mut ssh_agent_attempts = Vec::new();\n\n let mut any_attempts = false;\n\n let mut tried_sshkey = false;\n\n let mut url_attempt = None;\n\n\n\n let orig_url = url;\n\n let mut res = f(&mut |url, username, allowed| {\n\n any_attempts = true;\n\n if url != orig_url {\n\n url_attempt = Some(url.to_string());\n\n }\n", "file_path": "src/cargo/sources/git/utils.rs", "rank": 27, "score": 373255.5966957692 }, { "content": "pub fn pkg_loc(name: &str, loc: &str) -> Summary {\n\n let link = if name.ends_with(\"-sys\") {\n\n Some(name)\n\n } else {\n\n None\n\n };\n\n Summary::new(\n\n &Config::default().unwrap(),\n\n pkg_id_loc(name, loc),\n\n Vec::new(),\n\n &BTreeMap::new(),\n\n link,\n\n )\n\n .unwrap()\n\n}\n\n\n", "file_path": "crates/resolver-tests/src/lib.rs", "rank": 28, "score": 371447.81117548887 }, { "content": "pub fn main_file(println: &str, deps: &[&str]) -> String {\n\n let mut buf = String::new();\n\n\n\n for dep in deps.iter() {\n\n buf.push_str(&format!(\"extern crate {};\\n\", dep));\n\n }\n\n\n\n buf.push_str(\"fn main() { println!(\");\n\n buf.push_str(println);\n\n buf.push_str(\"); }\\n\");\n\n\n\n buf\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/lib.rs", "rank": 29, "score": 366532.9680935143 }, { "content": "pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> {\n\n let name = match opts.krate {\n\n Some(ref name) => name.clone(),\n\n None => {\n\n let manifest_path = find_root_manifest_for_wd(config.cwd())?;\n\n let ws = Workspace::new(&manifest_path, config)?;\n\n ws.current()?.package_id().name().to_string()\n\n }\n\n };\n\n\n\n let (mut registry, _, _) = registry(\n\n config,\n\n opts.token.clone(),\n\n opts.index.clone(),\n\n opts.registry.clone(),\n\n true,\n\n true,\n\n )?;\n\n\n\n if let Some(ref v) = opts.to_add {\n", "file_path": "src/cargo/ops/registry.rs", "rank": 30, "score": 366204.91157465975 }, { "content": "pub fn write_config(config: &str) {\n\n write_config_at(paths::root().join(\".cargo/config\"), config);\n\n}\n\n\n", "file_path": "tests/testsuite/config.rs", "rank": 31, "score": 365757.5465528979 }, { "content": "/// Configure a libcurl http handle with the defaults options for Cargo\n\npub fn configure_http_handle(config: &Config, handle: &mut Easy) -> CargoResult<HttpTimeout> {\n\n let http = config.http_config()?;\n\n if let Some(proxy) = http_proxy(config)? {\n\n handle.proxy(&proxy)?;\n\n }\n\n if let Some(cainfo) = &http.cainfo {\n\n let cainfo = cainfo.resolve_path(config);\n\n handle.cainfo(&cainfo)?;\n\n }\n\n if let Some(check) = http.check_revoke {\n\n handle.ssl_options(SslOpt::new().no_revoke(!check))?;\n\n }\n\n\n\n if let Some(user_agent) = &http.user_agent {\n\n handle.useragent(user_agent)?;\n\n } else {\n\n handle.useragent(&version().to_string())?;\n\n }\n\n\n\n fn to_ssl_version(s: &str) -> CargoResult<SslVersion> {\n", "file_path": "src/cargo/ops/registry.rs", "rank": 32, "score": 365538.4602637704 }, { "content": "/// Check the base requirements for a package name.\n\n///\n\n/// This can be used for other things than package names, to enforce some\n\n/// level of sanity. Note that package names have other restrictions\n\n/// elsewhere. `cargo new` has a few restrictions, such as checking for\n\n/// reserved names. crates.io has even more restrictions.\n\npub fn validate_package_name(name: &str, what: &str, help: &str) -> CargoResult<()> {\n\n let mut chars = name.chars();\n\n if let Some(ch) = chars.next() {\n\n if ch.is_digit(10) {\n\n // A specific error for a potentially common case.\n\n bail!(\n\n \"the name `{}` cannot be used as a {}, \\\n\n the name cannot start with a digit{}\",\n\n name,\n\n what,\n\n help\n\n );\n\n }\n\n if !(unicode_xid::UnicodeXID::is_xid_start(ch) || ch == '_') {\n\n bail!(\n\n \"invalid character `{}` in {}: `{}`, \\\n\n the first character must be a Unicode XID start character \\\n\n (most letters or `_`){}\",\n\n ch,\n\n what,\n", "file_path": "src/cargo/util/restricted_names.rs", "rank": 33, "score": 364975.51865222165 }, { "content": "/// Read the output from Config.\n\npub fn read_output(config: Config) -> String {\n\n drop(config); // Paranoid about flushing the file.\n\n let path = paths::root().join(\"shell.out\");\n\n fs::read_to_string(path).unwrap()\n\n}\n\n\n", "file_path": "tests/testsuite/config.rs", "rank": 34, "score": 364849.4206731558 }, { "content": "pub fn generate_url(name: &str) -> Url {\n\n Url::from_file_path(generate_path(name)).ok().unwrap()\n\n}\n", "file_path": "crates/cargo-test-support/src/registry.rs", "rank": 35, "score": 364367.05688908324 }, { "content": "fn pkg(name: &str, vers: &str) {\n\n Package::new(name, vers)\n\n .file(\"src/main.rs\", \"fn main() {{}}\")\n\n .publish();\n\n}\n\n\n", "file_path": "tests/testsuite/concurrent.rs", "rank": 36, "score": 363649.9007876786 }, { "content": "fn pkg(name: &str, vers: &str) {\n\n Package::new(name, vers)\n\n .file(\"src/lib.rs\", \"\")\n\n .file(\n\n \"src/main.rs\",\n\n &format!(\"extern crate {}; fn main() {{}}\", name),\n\n )\n\n .publish();\n\n}\n\n\n", "file_path": "tests/testsuite/install.rs", "rank": 37, "score": 363649.9007876786 }, { "content": "#[track_caller]\n\npub fn assert_match(expected: &str, actual: &str) {\n\n if !normalized_lines_match(expected, actual, None) {\n\n panic!(\n\n \"Did not find expected:\\n{}\\nActual:\\n{}\\n\",\n\n expected, actual\n\n );\n\n }\n\n}\n\n\n", "file_path": "tests/testsuite/config.rs", "rank": 38, "score": 363434.4168441724 }, { "content": "/// Opens an existing file.\n\npub fn open<P: AsRef<Path>>(path: P) -> Result<File> {\n\n let path = path.as_ref();\n\n File::open(path).with_context(|| format!(\"failed to open file `{}`\", path.display()))\n\n}\n\n\n", "file_path": "crates/cargo-util/src/paths.rs", "rank": 39, "score": 360896.22717960516 }, { "content": "/// Creates a new file.\n\npub fn create<P: AsRef<Path>>(path: P) -> Result<File> {\n\n let path = path.as_ref();\n\n File::create(path).with_context(|| format!(\"failed to create file `{}`\", path.display()))\n\n}\n\n\n", "file_path": "crates/cargo-util/src/paths.rs", "rank": 40, "score": 360896.22717960516 }, { "content": "/// Cargo has a bunch of long-lived git repositories in its global cache and\n\n/// some, like the index, are updated very frequently. Right now each update\n\n/// creates a new \"pack file\" inside the git database, and over time this can\n\n/// cause bad performance and bad current behavior in libgit2.\n\n///\n\n/// One pathological use case today is where libgit2 opens hundreds of file\n\n/// descriptors, getting us dangerously close to blowing out the OS limits of\n\n/// how many fds we can have open. This is detailed in #4403.\n\n///\n\n/// To try to combat this problem we attempt a `git gc` here. Note, though, that\n\n/// we may not even have `git` installed on the system! As a result we\n\n/// opportunistically try a `git gc` when the pack directory looks too big, and\n\n/// failing that we just blow away the repository and start over.\n\nfn maybe_gc_repo(repo: &mut git2::Repository) -> CargoResult<()> {\n\n // Here we arbitrarily declare that if you have more than 100 files in your\n\n // `pack` folder that we need to do a gc.\n\n let entries = match repo.path().join(\"objects/pack\").read_dir() {\n\n Ok(e) => e.count(),\n\n Err(_) => {\n\n debug!(\"skipping gc as pack dir appears gone\");\n\n return Ok(());\n\n }\n\n };\n\n let max = env::var(\"__CARGO_PACKFILE_LIMIT\")\n\n .ok()\n\n .and_then(|s| s.parse::<usize>().ok())\n\n .unwrap_or(100);\n\n if entries < max {\n\n debug!(\"skipping gc as there's only {} pack files\", entries);\n\n return Ok(());\n\n }\n\n\n\n // First up, try a literal `git gc` by shelling out to git. This is pretty\n", "file_path": "src/cargo/sources/git/utils.rs", "rank": 41, "score": 360665.74627324013 }, { "content": "fn walk(path: &Path, callback: &mut dyn FnMut(&Path) -> CargoResult<bool>) -> CargoResult<()> {\n\n if !callback(path)? {\n\n trace!(\"not processing {}\", path.display());\n\n return Ok(());\n\n }\n\n\n\n // Ignore any permission denied errors because temporary directories\n\n // can often have some weird permissions on them.\n\n let dirs = match fs::read_dir(path) {\n\n Ok(dirs) => dirs,\n\n Err(ref e) if e.kind() == io::ErrorKind::PermissionDenied => return Ok(()),\n\n Err(e) => {\n\n let cx = format!(\"failed to read directory `{}`\", path.display());\n\n let e = anyhow::Error::from(e);\n\n return Err(e.context(cx));\n\n }\n\n };\n\n for dir in dirs {\n\n let dir = dir?;\n\n if dir.file_type()?.is_dir() {\n\n walk(&dir.path(), callback)?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_read_manifest.rs", "rank": 42, "score": 359746.585971355 }, { "content": "// Helper for publishing a package.\n\nfn pkg(name: &str, vers: &str) {\n\n pkg_maybe_yanked(name, vers, false)\n\n}\n\n\n", "file_path": "tests/testsuite/install_upgrade.rs", "rank": 43, "score": 358879.3185514625 }, { "content": "pub fn opt(name: &'static str, help: &'static str) -> Arg<'static, 'static> {\n\n Arg::with_name(name).long(name).help(help)\n\n}\n\n\n", "file_path": "src/cargo/util/command_prelude.rs", "rank": 44, "score": 356988.1328746247 }, { "content": "fn reinitialize(repo: &mut git2::Repository) -> CargoResult<()> {\n\n // Here we want to drop the current repository object pointed to by `repo`,\n\n // so we initialize temporary repository in a sub-folder, blow away the\n\n // existing git folder, and then recreate the git repo. Finally we blow away\n\n // the `tmp` folder we allocated.\n\n let path = repo.path().to_path_buf();\n\n debug!(\"reinitializing git repo at {:?}\", path);\n\n let tmp = path.join(\"tmp\");\n\n let bare = !repo.path().ends_with(\".git\");\n\n *repo = init(&tmp, false)?;\n\n for entry in path.read_dir()? {\n\n let entry = entry?;\n\n if entry.file_name().to_str() == Some(\"tmp\") {\n\n continue;\n\n }\n\n let path = entry.path();\n\n drop(paths::remove_file(&path).or_else(|_| paths::remove_dir_all(&path)));\n\n }\n\n *repo = init(&path, bare)?;\n\n paths::remove_dir_all(&tmp)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cargo/sources/git/utils.rs", "rank": 45, "score": 355841.18797034817 }, { "content": "/// Compares a line with an expected pattern.\n\n/// - Use `[..]` as a wildcard to match 0 or more characters on the same line\n\n/// (similar to `.*` in a regex). It is non-greedy.\n\n/// - Use `[EXE]` to optionally add `.exe` on Windows (empty string on other\n\n/// platforms).\n\n/// - There is a wide range of macros (such as `[COMPILING]` or `[WARNING]`)\n\n/// to match cargo's \"status\" output and allows you to ignore the alignment.\n\n/// See `substitute_macros` for a complete list of macros.\n\n/// - `[ROOT]` the path to the test directory's root\n\n/// - `[CWD]` is the working directory of the process that was run.\n\npub fn lines_match(expected: &str, mut actual: &str) -> bool {\n\n let expected = substitute_macros(expected);\n\n for (i, part) in expected.split(\"[..]\").enumerate() {\n\n match actual.find(part) {\n\n Some(j) => {\n\n if i == 0 && j != 0 {\n\n return false;\n\n }\n\n actual = &actual[j + part.len()..];\n\n }\n\n None => return false,\n\n }\n\n }\n\n actual.is_empty() || expected.ends_with(\"[..]\")\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/lib.rs", "rank": 46, "score": 355016.87867601466 }, { "content": "/// Initialize a new repository at the given path.\n\npub fn init(path: &Path) -> git2::Repository {\n\n default_search_path();\n\n let repo = t!(git2::Repository::init(path));\n\n default_repo_cfg(&repo);\n\n repo\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/git.rs", "rank": 47, "score": 352720.92594698956 }, { "content": "pub fn lines_match_unordered(expected: &str, actual: &str) -> Result<(), String> {\n\n let mut a = actual.lines().collect::<Vec<_>>();\n\n // match more-constrained lines first, although in theory we'll\n\n // need some sort of recursive match here. This handles the case\n\n // that you expect \"a\\n[..]b\" and two lines are printed out,\n\n // \"ab\\n\"a\", where technically we do match unordered but a naive\n\n // search fails to find this. This simple sort at least gets the\n\n // test suite to pass for now, but we may need to get more fancy\n\n // if tests start failing again.\n\n a.sort_by_key(|s| s.len());\n\n let mut failures = Vec::new();\n\n\n\n for e_line in expected.lines() {\n\n match a.iter().position(|a_line| lines_match(e_line, a_line)) {\n\n Some(index) => {\n\n a.remove(index);\n\n }\n\n None => failures.push(e_line),\n\n }\n\n }\n", "file_path": "crates/cargo-test-support/src/lib.rs", "rank": 48, "score": 350463.45708383305 }, { "content": "/// Joins paths into a string suitable for the `PATH` environment variable.\n\n///\n\n/// This is equivalent to [`std::env::join_paths`], but includes a more\n\n/// detailed error message. The given `env` argument is the name of the\n\n/// environment variable this is will be used for, which is included in the\n\n/// error message.\n\npub fn join_paths<T: AsRef<OsStr>>(paths: &[T], env: &str) -> Result<OsString> {\n\n env::join_paths(paths.iter())\n\n .with_context(|| {\n\n let paths = paths.iter().map(Path::new).collect::<Vec<_>>();\n\n format!(\"failed to join path array: {:?}\", paths)\n\n })\n\n .with_context(|| {\n\n format!(\n\n \"failed to join search paths together\\n\\\n\n Does ${} have an unterminated quote character?\",\n\n env\n\n )\n\n })\n\n}\n\n\n", "file_path": "crates/cargo-util/src/paths.rs", "rank": 49, "score": 350045.1777057924 }, { "content": "fn find_files(path: &Path, dst: &mut Vec<PathBuf>) {\n\n for e in path.read_dir().unwrap() {\n\n let e = e.unwrap();\n\n let path = e.path();\n\n if e.file_type().unwrap().is_dir() {\n\n find_files(&path, dst);\n\n } else {\n\n dst.push(path);\n\n }\n\n }\n\n}\n", "file_path": "tests/testsuite/corrupt_git.rs", "rank": 50, "score": 349550.8894341342 }, { "content": "pub fn get(config: &Config, opts: &GetOptions<'_>) -> CargoResult<()> {\n\n if opts.show_origin {\n\n if !matches!(opts.format, ConfigFormat::Toml) {\n\n bail!(\n\n \"the `{}` format does not support --show-origin, try the `toml` format instead\",\n\n opts.format\n\n );\n\n }\n\n }\n\n let key = match opts.key {\n\n Some(key) => ConfigKey::from_str(key),\n\n None => ConfigKey::new(),\n\n };\n\n if opts.merged {\n\n let cv = config\n\n .get_cv_with_env(&key)?\n\n .ok_or_else(|| format_err!(\"config value `{}` is not set\", key))?;\n\n match opts.format {\n\n ConfigFormat::Toml => print_toml(config, opts, &key, &cv),\n\n ConfigFormat::Json => print_json(config, &key, &cv, true),\n", "file_path": "src/cargo/ops/cargo_config.rs", "rank": 51, "score": 348497.17953253665 }, { "content": "fn registry(registry_name: &str) -> String {\n\n format!(\"cargo-registry:{}\", registry_name)\n\n}\n\n\n\nimpl Credential for MacKeychain {\n\n fn name(&self) -> &'static str {\n\n env!(\"CARGO_PKG_NAME\")\n\n }\n\n\n\n fn get(&self, registry_name: &str, _api_url: &str) -> Result<String, Error> {\n\n let keychain = SecKeychain::default().unwrap();\n\n let service_name = registry(registry_name);\n\n let (pass, _item) = keychain.find_generic_password(&service_name, ACCOUNT)?;\n\n String::from_utf8(pass.as_ref().to_vec())\n\n .map_err(|_| \"failed to convert token to UTF8\".into())\n\n }\n\n\n\n fn store(&self, registry_name: &str, _api_url: &str, token: &str) -> Result<(), Error> {\n\n let keychain = SecKeychain::default().unwrap();\n\n let service_name = registry(registry_name);\n", "file_path": "crates/credential/cargo-credential-macos-keychain/src/main.rs", "rank": 52, "score": 348221.05959612195 }, { "content": "/// Reads a file to a string.\n\n///\n\n/// Equivalent to [`std::fs::read_to_string`] with better error messages.\n\npub fn read(path: &Path) -> Result<String> {\n\n match String::from_utf8(read_bytes(path)?) {\n\n Ok(s) => Ok(s),\n\n Err(_) => anyhow::bail!(\"path at `{}` was not valid utf-8\", path.display()),\n\n }\n\n}\n\n\n", "file_path": "crates/cargo-util/src/paths.rs", "rank": 53, "score": 347386.79760223813 }, { "content": "/// Returns `true` if the name contains any glob pattern wildcards.\n\npub fn is_glob_pattern<T: AsRef<str>>(name: T) -> bool {\n\n name.as_ref().contains(&['*', '?', '[', ']'][..])\n\n}\n", "file_path": "src/cargo/util/restricted_names.rs", "rank": 54, "score": 346115.0493334363 }, { "content": "/// Helper to get the executable names from a filter.\n\npub fn exe_names(pkg: &Package, filter: &ops::CompileFilter) -> BTreeSet<String> {\n\n let to_exe = |name| format!(\"{}{}\", name, env::consts::EXE_SUFFIX);\n\n match filter {\n\n CompileFilter::Default { .. } => pkg\n\n .targets()\n\n .iter()\n\n .filter(|t| t.is_bin())\n\n .map(|t| to_exe(t.name()))\n\n .collect(),\n\n CompileFilter::Only {\n\n all_targets: true, ..\n\n } => pkg\n\n .targets()\n\n .iter()\n\n .filter(|target| target.is_executable())\n\n .map(|target| to_exe(target.name()))\n\n .collect(),\n\n CompileFilter::Only {\n\n ref bins,\n\n ref examples,\n", "file_path": "src/cargo/ops/common_for_install_and_uninstall.rs", "rank": 55, "score": 344632.4877742614 }, { "content": "fn render_filename<P: AsRef<Path>>(path: P, basedir: Option<&str>) -> CargoResult<String> {\n\n let path = path.as_ref();\n\n let relpath = match basedir {\n\n None => path,\n\n Some(base) => match path.strip_prefix(base) {\n\n Ok(relpath) => relpath,\n\n _ => path,\n\n },\n\n };\n\n relpath\n\n .to_str()\n\n .ok_or_else(|| internal(format!(\"path `{:?}` not utf-8\", relpath)))\n\n .map(|f| f.replace(\" \", \"\\\\ \"))\n\n}\n\n\n", "file_path": "src/cargo/core/compiler/output_depinfo.rs", "rank": 56, "score": 344551.96050264 }, { "content": "/// A Rust keyword.\n\npub fn is_keyword(name: &str) -> bool {\n\n // See https://doc.rust-lang.org/reference/keywords.html\n\n [\n\n \"Self\", \"abstract\", \"as\", \"async\", \"await\", \"become\", \"box\", \"break\", \"const\", \"continue\",\n\n \"crate\", \"do\", \"dyn\", \"else\", \"enum\", \"extern\", \"false\", \"final\", \"fn\", \"for\", \"if\",\n\n \"impl\", \"in\", \"let\", \"loop\", \"macro\", \"match\", \"mod\", \"move\", \"mut\", \"override\", \"priv\",\n\n \"pub\", \"ref\", \"return\", \"self\", \"static\", \"struct\", \"super\", \"trait\", \"true\", \"try\",\n\n \"type\", \"typeof\", \"unsafe\", \"unsized\", \"use\", \"virtual\", \"where\", \"while\", \"yield\",\n\n ]\n\n .contains(&name)\n\n}\n\n\n", "file_path": "src/cargo/util/restricted_names.rs", "rank": 57, "score": 339898.33535858354 }, { "content": "/// Returns `true` if the name contains non-ASCII characters.\n\npub fn is_non_ascii_name(name: &str) -> bool {\n\n name.chars().any(|ch| ch > '\\x7f')\n\n}\n\n\n", "file_path": "src/cargo/util/restricted_names.rs", "rank": 58, "score": 338570.4744947892 }, { "content": "/// An artifact with this name will conflict with one of Cargo's build directories.\n\npub fn is_conflicting_artifact_name(name: &str) -> bool {\n\n [\"deps\", \"examples\", \"build\", \"incremental\"].contains(&name)\n\n}\n\n\n", "file_path": "src/cargo/util/restricted_names.rs", "rank": 59, "score": 338563.9424931668 }, { "content": "fn get_name<'a>(path: &'a Path, opts: &'a NewOptions) -> CargoResult<&'a str> {\n\n if let Some(ref name) = opts.name {\n\n return Ok(name);\n\n }\n\n\n\n let file_name = path.file_name().ok_or_else(|| {\n\n anyhow::format_err!(\n\n \"cannot auto-detect package name from path {:?} ; use --name to override\",\n\n path.as_os_str()\n\n )\n\n })?;\n\n\n\n file_name.to_str().ok_or_else(|| {\n\n anyhow::format_err!(\n\n \"cannot create package with a non-unicode name: {:?}\",\n\n file_name\n\n )\n\n })\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_new.rs", "rank": 60, "score": 338246.7618998103 }, { "content": "fn uninstall_cwd(root: &Filesystem, bins: &[String], config: &Config) -> CargoResult<()> {\n\n let tracker = InstallTracker::load(config, root)?;\n\n let source_id = SourceId::for_path(config.cwd())?;\n\n let mut src = path_source(source_id, config)?;\n\n let pkg = select_pkg(\n\n &mut src,\n\n None,\n\n |path: &mut PathSource<'_>| path.read_packages(),\n\n config,\n\n )?;\n\n let pkgid = pkg.package_id();\n\n uninstall_pkgid(root, tracker, pkgid, bins, config)\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_uninstall.rs", "rank": 61, "score": 337770.545567211 }, { "content": "/// Display a list of installed binaries.\n\npub fn install_list(dst: Option<&str>, config: &Config) -> CargoResult<()> {\n\n let root = resolve_root(dst, config)?;\n\n let tracker = InstallTracker::load(config, &root)?;\n\n for (k, v) in tracker.all_installed_bins() {\n\n drop_println!(config, \"{}:\", k);\n\n for bin in v {\n\n drop_println!(config, \" {}\", bin);\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_install.rs", "rank": 62, "score": 337160.1425967553 }, { "content": "/// Finds an explicit HTTP proxy if one is available.\n\n///\n\n/// Favor cargo's `http.proxy`, then git's `http.proxy`. Proxies specified\n\n/// via environment variables are picked up by libcurl.\n\nfn http_proxy(config: &Config) -> CargoResult<Option<String>> {\n\n let http = config.http_config()?;\n\n if let Some(s) = &http.proxy {\n\n return Ok(Some(s.clone()));\n\n }\n\n if let Ok(cfg) = git2::Config::open_default() {\n\n if let Ok(s) = cfg.get_string(\"http.proxy\") {\n\n return Ok(Some(s));\n\n }\n\n }\n\n Ok(None)\n\n}\n\n\n", "file_path": "src/cargo/ops/registry.rs", "rank": 63, "score": 336261.0122606405 }, { "content": "/// Add all files in the working directory to the git index.\n\npub fn add(repo: &git2::Repository) {\n\n // FIXME(libgit2/libgit2#2514): apparently, `add_all` will add all submodules\n\n // as well, and then fail because they're directories. As a stop-gap, we just\n\n // ignore all submodules.\n\n let mut s = t!(repo.submodules());\n\n for submodule in s.iter_mut() {\n\n t!(submodule.add_to_index(false));\n\n }\n\n let mut index = t!(repo.index());\n\n t!(index.add_all(\n\n [\"*\"].iter(),\n\n git2::IndexAddOption::DEFAULT,\n\n Some(\n\n &mut (|a, _b| if s.iter().any(|s| a.starts_with(s.path())) {\n\n 1\n\n } else {\n\n 0\n\n })\n\n )\n\n ));\n\n t!(index.write());\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/git.rs", "rank": 64, "score": 335946.04217087786 }, { "content": "/// Creates a new HTTP handle with appropriate global configuration for cargo.\n\npub fn http_handle(config: &Config) -> CargoResult<Easy> {\n\n let (mut handle, timeout) = http_handle_and_timeout(config)?;\n\n timeout.configure(&mut handle)?;\n\n Ok(handle)\n\n}\n\n\n", "file_path": "src/cargo/ops/registry.rs", "rank": 65, "score": 335849.3719579147 }, { "content": "/// These names cannot be used on Windows, even with an extension.\n\npub fn is_windows_reserved(name: &str) -> bool {\n\n [\n\n \"con\", \"prn\", \"aux\", \"nul\", \"com1\", \"com2\", \"com3\", \"com4\", \"com5\", \"com6\", \"com7\", \"com8\",\n\n \"com9\", \"lpt1\", \"lpt2\", \"lpt3\", \"lpt4\", \"lpt5\", \"lpt6\", \"lpt7\", \"lpt8\", \"lpt9\",\n\n ]\n\n .contains(&name.to_ascii_lowercase().as_str())\n\n}\n\n\n", "file_path": "src/cargo/util/restricted_names.rs", "rank": 66, "score": 335544.1280934558 }, { "content": "/// Generate a toml String of Cargo.lock from a Resolve.\n\npub fn resolve_to_string(ws: &Workspace<'_>, resolve: &mut Resolve) -> CargoResult<String> {\n\n let (_orig, out, _ws_root) = resolve_to_string_orig(ws, resolve);\n\n Ok(out)\n\n}\n\n\n", "file_path": "src/cargo/ops/lockfile.rs", "rank": 67, "score": 334224.7234102626 }, { "content": "fn pl_manifest(name: &str, version: &str, extra: &str) -> String {\n\n format!(\n\n r#\"\n\n [package]\n\n name = \"{}\"\n\n version = \"{}\"\n\n authors = []\n\n license = \"MIT\"\n\n description = \"foo\"\n\n documentation = \"foo\"\n\n homepage = \"foo\"\n\n repository = \"foo\"\n\n\n\n {}\n\n \"#,\n\n name, version, extra\n\n )\n\n}\n\n\n", "file_path": "tests/testsuite/publish_lockfile.rs", "rank": 68, "score": 333173.691699386 }, { "content": "/// Changes the filesystem mtime (and atime if possible) for the given file.\n\n///\n\n/// This intentionally does not return an error, as this is sometimes not\n\n/// supported on network filesystems. For the current uses in Cargo, this is a\n\n/// \"best effort\" approach, and errors shouldn't be propagated.\n\npub fn set_file_time_no_err<P: AsRef<Path>>(path: P, time: FileTime) {\n\n let path = path.as_ref();\n\n match filetime::set_file_times(path, time, time) {\n\n Ok(()) => log::debug!(\"set file mtime {} to {}\", path.display(), time),\n\n Err(e) => log::warn!(\n\n \"could not set mtime of {} to {}: {:?}\",\n\n path.display(),\n\n time,\n\n e\n\n ),\n\n }\n\n}\n\n\n", "file_path": "crates/cargo-util/src/paths.rs", "rank": 69, "score": 333158.65954668145 }, { "content": "fn inferred_bins(package_root: &Path, package_name: &str) -> Vec<(String, PathBuf)> {\n\n let main = package_root.join(\"src\").join(\"main.rs\");\n\n let mut result = Vec::new();\n\n if main.exists() {\n\n result.push((package_name.to_string(), main));\n\n }\n\n result.extend(infer_from_directory(&package_root.join(\"src\").join(\"bin\")));\n\n\n\n result\n\n}\n\n\n", "file_path": "src/cargo/util/toml/targets.rs", "rank": 70, "score": 332679.33058841754 }, { "content": "/// Create a new git repository with a project.\n\npub fn new<F>(name: &str, callback: F) -> Project\n\nwhere\n\n F: FnOnce(ProjectBuilder) -> ProjectBuilder,\n\n{\n\n new_repo(name, callback).0\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/git.rs", "rank": 71, "score": 330833.8303565325 }, { "content": "fn check_has_installed_exe<P: AsRef<Path>>(path: P, name: &'static str) -> bool {\n\n path.as_ref().join(\"bin\").join(exe(name)).is_file()\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/install.rs", "rank": 72, "score": 330347.2179970598 }, { "content": "/// Variant of `lines_match` that applies normalization to the strings.\n\npub fn normalized_lines_match(expected: &str, actual: &str, cwd: Option<&Path>) -> bool {\n\n let expected = normalize_matcher(expected, cwd);\n\n let actual = normalize_matcher(actual, cwd);\n\n lines_match(&expected, &actual)\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/lib.rs", "rank": 73, "score": 330053.5866653946 }, { "content": "pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Option<FileLock>> {\n\n if ws.root().join(\"Cargo.lock\").exists() {\n\n // Make sure the Cargo.lock is up-to-date and valid.\n\n let _ = ops::resolve_ws(ws)?;\n\n // If Cargo.lock does not exist, it will be generated by `build_lock`\n\n // below, and will be validated during the verification step.\n\n }\n\n let pkg = ws.current()?;\n\n let config = ws.config();\n\n\n\n let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), config);\n\n src.update()?;\n\n\n\n if opts.check_metadata {\n\n check_metadata(pkg, config)?;\n\n }\n\n\n\n if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() {\n\n config.shell().warn(\n\n \"both package.include and package.exclude are specified; \\\n", "file_path": "src/cargo/ops/cargo_package.rs", "rank": 74, "score": 329523.570997169 }, { "content": "pub fn optinal_opt(name: &'static str, help: &'static str) -> Arg<'static, 'static> {\n\n opt(name, help).min_values(0)\n\n}\n\n\n", "file_path": "src/cargo/util/command_prelude.rs", "rank": 75, "score": 329334.7258169839 }, { "content": "/// Adds an empty file with executable flags (and platform-dependent suffix).\n\n//\n\n// TODO: move this to `Project` if other cases using this emerge.\n\nfn fake_file(proj: Project, dir: &Path, name: &str, kind: &FakeKind<'_>) -> Project {\n\n let path = proj\n\n .root()\n\n .join(dir)\n\n .join(&format!(\"{}{}\", name, env::consts::EXE_SUFFIX));\n\n path.parent().unwrap().mkdir_p();\n\n match *kind {\n\n FakeKind::Executable => {\n\n File::create(&path).unwrap();\n\n make_executable(&path);\n\n }\n\n FakeKind::Symlink { target } => {\n\n make_symlink(&path, target);\n\n }\n\n }\n\n return proj;\n\n\n\n #[cfg(unix)]\n\n fn make_executable(p: &Path) {\n\n use std::os::unix::prelude::*;\n", "file_path": "tests/testsuite/cargo_command.rs", "rank": 76, "score": 328900.0346517237 }, { "content": "fn pkg_id_loc(name: &str, loc: &str) -> PackageId {\n\n let remote = loc.into_url();\n\n let master = GitReference::Branch(\"master\".to_string());\n\n let source_id = SourceId::for_git(&remote.unwrap(), master).unwrap();\n\n\n\n PackageId::new(name, \"1.0.0\", source_id).unwrap()\n\n}\n\n\n", "file_path": "crates/resolver-tests/src/lib.rs", "rank": 77, "score": 328884.5585104194 }, { "content": "pub fn needs_custom_http_transport(config: &Config) -> CargoResult<bool> {\n\n Ok(http_proxy_exists(config)?\n\n || *config.http_config()? != Default::default()\n\n || env::var_os(\"HTTP_TIMEOUT\").is_some())\n\n}\n\n\n", "file_path": "src/cargo/ops/registry.rs", "rank": 78, "score": 327991.65259901586 }, { "content": "/// Checks the result of a crate publish.\n\npub fn validate_upload(expected_json: &str, expected_crate_name: &str, expected_files: &[&str]) {\n\n let new_path = registry::api_path().join(\"api/v1/crates/new\");\n\n _validate_upload(\n\n &new_path,\n\n expected_json,\n\n expected_crate_name,\n\n expected_files,\n\n &[],\n\n );\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/publish.rs", "rank": 79, "score": 326439.15669851966 }, { "content": "pub fn indented_lines(text: &str) -> String {\n\n text.lines()\n\n .map(|line| {\n\n if line.is_empty() {\n\n String::from(\"\\n\")\n\n } else {\n\n format!(\" {}\\n\", line)\n\n }\n\n })\n\n .collect()\n\n}\n", "file_path": "src/cargo/util/mod.rs", "rank": 80, "score": 325422.4067069168 }, { "content": "pub fn dep(name: &str) -> Dependency {\n\n dep_req(name, \"*\")\n\n}\n", "file_path": "crates/resolver-tests/src/lib.rs", "rank": 81, "score": 325417.9512675521 }, { "content": "fn copy_and_checksum(src_path: &Path, dst_path: &Path, buf: &mut [u8]) -> CargoResult<String> {\n\n let mut src = File::open(src_path).with_context(|| format!(\"failed to open {:?}\", src_path))?;\n\n let mut dst_opts = OpenOptions::new();\n\n dst_opts.write(true).create(true).truncate(true);\n\n #[cfg(unix)]\n\n {\n\n use std::os::unix::fs::{MetadataExt, OpenOptionsExt};\n\n let src_metadata = src\n\n .metadata()\n\n .with_context(|| format!(\"failed to stat {:?}\", src_path))?;\n\n dst_opts.mode(src_metadata.mode());\n\n }\n\n let mut dst = dst_opts\n\n .open(dst_path)\n\n .with_context(|| format!(\"failed to create {:?}\", dst_path))?;\n\n // Not going to bother setting mode on pre-existing files, since there\n\n // shouldn't be any under normal conditions.\n\n let mut cksum = Sha256::new();\n\n loop {\n\n let n = src\n", "file_path": "src/cargo/ops/vendor.rs", "rank": 82, "score": 325406.31964101683 }, { "content": "pub fn dep_req(name: &str, req: &str) -> Dependency {\n\n Dependency::parse(name, Some(req), registry_loc()).unwrap()\n\n}\n", "file_path": "crates/resolver-tests/src/lib.rs", "rank": 83, "score": 324353.97355903615 }, { "content": "pub fn dep_loc(name: &str, location: &str) -> Dependency {\n\n let url = location.into_url().unwrap();\n\n let master = GitReference::Branch(\"master\".to_string());\n\n let source_id = SourceId::for_git(&url, master).unwrap();\n\n Dependency::parse(name, Some(\"1.0.0\"), source_id).unwrap()\n\n}\n", "file_path": "crates/resolver-tests/src/lib.rs", "rank": 84, "score": 324353.97355903615 }, { "content": "/// Equivalent to [`std::fs::remove_file`] with better error messages.\n\n///\n\n/// If the file is readonly, this will attempt to change the permissions to\n\n/// force the file to be deleted.\n\npub fn remove_file<P: AsRef<Path>>(p: P) -> Result<()> {\n\n _remove_file(p.as_ref())\n\n}\n\n\n", "file_path": "crates/cargo-util/src/paths.rs", "rank": 85, "score": 323272.6471920719 }, { "content": "// It can often be the case that files of a particular name on one platform\n\n// can't actually be created on another platform. For example files with colons\n\n// in the name are allowed on Unix but not on Windows.\n\n//\n\n// To help out in situations like this, issue about weird filenames when\n\n// packaging as a \"heads up\" that something may not work on other platforms.\n\nfn check_filename(file: &Path, shell: &mut Shell) -> CargoResult<()> {\n\n let name = match file.file_name() {\n\n Some(name) => name,\n\n None => return Ok(()),\n\n };\n\n let name = match name.to_str() {\n\n Some(name) => name,\n\n None => anyhow::bail!(\n\n \"path does not have a unicode filename which may not unpack \\\n\n on all platforms: {}\",\n\n file.display()\n\n ),\n\n };\n\n let bad_chars = ['/', '\\\\', '<', '>', ':', '\"', '|', '?', '*'];\n\n if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) {\n\n anyhow::bail!(\n\n \"cannot package a filename with a special character `{}`: {}\",\n\n c,\n\n file.display()\n\n )\n", "file_path": "src/cargo/ops/cargo_package.rs", "rank": 86, "score": 321522.4915166323 }, { "content": "pub fn new(opts: &NewOptions, config: &Config) -> CargoResult<()> {\n\n let path = &opts.path;\n\n if path.exists() {\n\n anyhow::bail!(\n\n \"destination `{}` already exists\\n\\n\\\n\n Use `cargo init` to initialize the directory\",\n\n path.display()\n\n )\n\n }\n\n\n\n let name = get_name(path, opts)?;\n\n check_name(\n\n name,\n\n opts.name.is_none(),\n\n opts.kind.is_bin(),\n\n &mut config.shell(),\n\n )?;\n\n\n\n let mkopts = MkOptions {\n\n version_control: opts.version_control,\n", "file_path": "src/cargo/ops/cargo_new.rs", "rank": 87, "score": 320397.2202687315 }, { "content": "pub fn init(opts: &NewOptions, config: &Config) -> CargoResult<()> {\n\n // This is here just as a random location to exercise the internal error handling.\n\n if std::env::var_os(\"__CARGO_TEST_INTERNAL_ERROR\").is_some() {\n\n return Err(crate::util::internal(\"internal error test\"));\n\n }\n\n\n\n let path = &opts.path;\n\n\n\n if path.join(\"Cargo.toml\").exists() {\n\n anyhow::bail!(\"`cargo init` cannot be run on existing Cargo packages\")\n\n }\n\n\n\n let name = get_name(path, opts)?;\n\n\n\n let mut src_paths_types = vec![];\n\n\n\n detect_source_paths_and_types(path, name, &mut src_paths_types)?;\n\n\n\n if src_paths_types.is_empty() {\n\n src_paths_types.push(plan_new_source_file(opts.kind.is_bin(), name.to_string()));\n", "file_path": "src/cargo/ops/cargo_new.rs", "rank": 88, "score": 320397.2202687315 }, { "content": "/// Check the entire path for names reserved in Windows.\n\npub fn is_windows_reserved_path(path: &Path) -> bool {\n\n path.iter()\n\n .filter_map(|component| component.to_str())\n\n .any(|component| {\n\n let stem = component.split('.').next().unwrap();\n\n is_windows_reserved(stem)\n\n })\n\n}\n\n\n", "file_path": "src/cargo/util/restricted_names.rs", "rank": 89, "score": 318423.5290878687 }, { "content": "fn default_repo_cfg(repo: &git2::Repository) {\n\n let mut cfg = t!(repo.config());\n\n t!(cfg.set_str(\"user.email\", \"[email protected]\"));\n\n t!(cfg.set_str(\"user.name\", \"Foo Bar\"));\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/git.rs", "rank": 90, "score": 316623.52376309736 }, { "content": "pub fn http_handle_and_timeout(config: &Config) -> CargoResult<(Easy, HttpTimeout)> {\n\n if config.frozen() {\n\n bail!(\n\n \"attempting to make an HTTP request, but --frozen was \\\n\n specified\"\n\n )\n\n }\n\n if !config.network_allowed() {\n\n bail!(\"can't make HTTP request in the offline mode\")\n\n }\n\n\n\n // The timeout option for libcurl by default times out the entire transfer,\n\n // but we probably don't want this. Instead we only set timeouts for the\n\n // connect phase as well as a \"low speed\" timeout so if we don't receive\n\n // many bytes in a large-ish period of time then we time out.\n\n let mut handle = Easy::new();\n\n let timeout = configure_http_handle(config, &mut handle)?;\n\n Ok((handle, timeout))\n\n}\n\n\n", "file_path": "src/cargo/ops/registry.rs", "rank": 91, "score": 316424.169697955 }, { "content": "pub fn loc_names(names: &[(&'static str, &'static str)]) -> Vec<PackageId> {\n\n names\n\n .iter()\n\n .map(|&(name, loc)| pkg_id_loc(name, loc))\n\n .collect()\n\n}\n\n\n\n/// By default `Summary` and `Dependency` have a very verbose `Debug` representation.\n\n/// This replaces with a representation that uses constructors from this file.\n\n///\n\n/// If `registry_strategy` is improved to modify more fields\n\n/// then this needs to update to display the corresponding constructor.\n\npub struct PrettyPrintRegistry(pub Vec<Summary>);\n\n\n\nimpl fmt::Debug for PrettyPrintRegistry {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"vec![\")?;\n\n for s in &self.0 {\n\n if s.dependencies().is_empty() {\n\n write!(f, \"pkg!((\\\"{}\\\", \\\"{}\\\")),\", s.name(), s.version())?;\n", "file_path": "crates/resolver-tests/src/lib.rs", "rank": 92, "score": 315407.4353218565 }, { "content": "fn do_op<F>(path: &Path, desc: &str, mut f: F)\n\nwhere\n\n F: FnMut(&Path) -> io::Result<()>,\n\n{\n\n match f(path) {\n\n Ok(()) => {}\n\n Err(ref e) if e.kind() == ErrorKind::PermissionDenied => {\n\n let mut p = t!(path.metadata()).permissions();\n\n p.set_readonly(false);\n\n t!(fs::set_permissions(path, p));\n\n\n\n // Unix also requires the parent to not be readonly for example when\n\n // removing files\n\n let parent = path.parent().unwrap();\n\n let mut p = t!(parent.metadata()).permissions();\n\n p.set_readonly(false);\n\n t!(fs::set_permissions(parent, p));\n\n\n\n f(path).unwrap_or_else(|e| {\n\n panic!(\"failed to {} {}: {}\", desc, path.display(), e);\n\n })\n\n }\n\n Err(e) => {\n\n panic!(\"failed to {} {}: {}\", desc, path.display(), e);\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/paths.rs", "rank": 93, "score": 315025.5500373513 }, { "content": "fn rm_rf(path: &Path, config: &Config) -> CargoResult<()> {\n\n let m = fs::symlink_metadata(path);\n\n if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) {\n\n config\n\n .shell()\n\n .verbose(|shell| shell.status(\"Removing\", path.display()))?;\n\n paths::remove_dir_all(path).with_context(|| \"could not remove build directory\")?;\n\n } else if m.is_ok() {\n\n config\n\n .shell()\n\n .verbose(|shell| shell.status(\"Removing\", path.display()))?;\n\n paths::remove_file(path).with_context(|| \"failed to remove build artifact\")?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/cargo/ops/cargo_clean.rs", "rank": 94, "score": 314026.2353146708 }, { "content": "/// Resolves dependencies for some packages of the workspace,\n\n/// taking into account `paths` overrides and activated features.\n\n///\n\n/// This function will also write the result of resolution as a new lock file\n\n/// (unless `Workspace::require_optional_deps` is false, such as `cargo\n\n/// install` or `-Z avoid-dev-deps`), or it is an ephemeral workspace (`cargo\n\n/// install` or `cargo package`).\n\n///\n\n/// `specs` may be empty, which indicates it should resolve all workspace\n\n/// members. In this case, `opts.all_features` must be `true`.\n\npub fn resolve_ws_with_opts<'cfg>(\n\n ws: &Workspace<'cfg>,\n\n target_data: &RustcTargetData<'cfg>,\n\n requested_targets: &[CompileKind],\n\n cli_features: &CliFeatures,\n\n specs: &[PackageIdSpec],\n\n has_dev_units: HasDevUnits,\n\n force_all_targets: ForceAllTargets,\n\n) -> CargoResult<WorkspaceResolve<'cfg>> {\n\n let mut registry = PackageRegistry::new(ws.config())?;\n\n let mut add_patches = true;\n\n let resolve = if ws.ignore_lock() {\n\n None\n\n } else if ws.require_optional_deps() {\n\n // First, resolve the root_package's *listed* dependencies, as well as\n\n // downloading and updating all remotes and such.\n\n let resolve = resolve_with_registry(ws, &mut registry)?;\n\n // No need to add patches again, `resolve_with_registry` has done it.\n\n add_patches = false;\n\n\n", "file_path": "src/cargo/ops/resolve.rs", "rank": 95, "score": 313206.447740422 }, { "content": "/// Returns the sysroot as queried from rustc.\n\npub fn sysroot() -> String {\n\n let output = Command::new(\"rustc\")\n\n .arg(\"--print=sysroot\")\n\n .output()\n\n .expect(\"rustc to run\");\n\n assert!(output.status.success());\n\n let sysroot = String::from_utf8(output.stdout).unwrap();\n\n sysroot.trim().to_string()\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/paths.rs", "rank": 96, "score": 312851.3619632261 }, { "content": "/// Wrapper method for network call retry logic.\n\n///\n\n/// Retry counts provided by Config object `net.retry`. Config shell outputs\n\n/// a warning on per retry.\n\n///\n\n/// Closure must return a `CargoResult`.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # use crate::cargo::util::{CargoResult, Config};\n\n/// # let download_something = || return Ok(());\n\n/// # let config = Config::default().unwrap();\n\n/// use cargo::util::network;\n\n/// let cargo_result = network::with_retry(&config, || download_something());\n\n/// ```\n\npub fn with_retry<T, F>(config: &Config, mut callback: F) -> CargoResult<T>\n\nwhere\n\n F: FnMut() -> CargoResult<T>,\n\n{\n\n let mut retry = Retry::new(config)?;\n\n loop {\n\n if let Some(ret) = retry.r#try(&mut callback)? {\n\n return Ok(ret);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/cargo/util/network.rs", "rank": 97, "score": 312651.93897547375 }, { "content": "fn toml_bin(proj: &Project, name: &str) -> String {\n\n proj.bin(name).display().to_string().replace('\\\\', \"\\\\\\\\\")\n\n}\n\n\n", "file_path": "tests/testsuite/credential_process.rs", "rank": 98, "score": 312348.9924716895 }, { "content": "// Generates a project layout inside our fake home dir\n\npub fn project_in_home(name: &str) -> ProjectBuilder {\n\n ProjectBuilder::new(paths::home().join(name))\n\n}\n\n\n\n// === Helpers ===\n\n\n", "file_path": "crates/cargo-test-support/src/lib.rs", "rank": 99, "score": 312348.2587190134 } ]
Rust
game/src/sandbox/mod.rs
jvolker/abstreet
7d4cf4cd5b27bce37cf35d3d8aecf0e3d5148a2f
mod dashboards; pub mod gameplay; mod misc_tools; mod speed; mod uber_turns; use self::misc_tools::{RoutePreview, ShowTrafficSignal, TurnExplorer}; use crate::app::App; use crate::common::{tool_panel, CommonState, ContextualActions, Minimap}; use crate::debug::DebugMode; use crate::edit::{ apply_map_edits, can_edit_lane, save_edits_as, EditMode, LaneEditor, StopSignEditor, TrafficSignalEditor, }; use crate::game::{State, Transition, WizardState}; use crate::helpers::ID; use crate::layer::PickLayer; use crate::managed::{WrappedComposite, WrappedOutcome}; use crate::pregame::MainMenu; use crate::render::AgentColorScheme; use ezgui::{ hotkey, lctrl, Btn, Choice, Color, Composite, EventCtx, GeomBatch, GfxCtx, HorizontalAlignment, Key, Line, Outcome, Text, TextExt, UpdateType, VerticalAlignment, Widget, Wizard, }; pub use gameplay::{spawn_agents_around, GameplayMode, TutorialPointer, TutorialState}; use geom::{Polygon, Time}; use map_model::MapEdits; use sim::{TripMode, VehicleType}; pub use speed::TimeWarpScreen; pub use speed::{SpeedControls, TimePanel}; pub struct SandboxMode { gameplay: Box<dyn gameplay::GameplayState>, pub gameplay_mode: GameplayMode, pub controls: SandboxControls, } pub struct SandboxControls { pub common: Option<CommonState>, route_preview: Option<RoutePreview>, tool_panel: Option<WrappedComposite>, time_panel: Option<TimePanel>, speed: Option<SpeedControls>, pub agent_meter: Option<AgentMeter>, minimap: Option<Minimap>, } impl SandboxMode { pub fn new(ctx: &mut EventCtx, app: &mut App, mode: GameplayMode) -> SandboxMode { app.primary.clear_sim(); let gameplay = mode.initialize(ctx, app); SandboxMode { controls: SandboxControls { common: if gameplay.has_common() { Some(CommonState::new()) } else { None }, route_preview: if gameplay.can_examine_objects() { Some(RoutePreview::new()) } else { None }, tool_panel: if gameplay.has_tool_panel() { Some(tool_panel(ctx, app)) } else { None }, time_panel: if gameplay.has_time_panel() { Some(TimePanel::new(ctx, app)) } else { None }, speed: if gameplay.has_speed() { Some(SpeedControls::new(ctx, app)) } else { None }, agent_meter: if gameplay.has_agent_meter() { Some(AgentMeter::new(ctx, app)) } else { None }, minimap: if gameplay.has_minimap() { Some(Minimap::new(ctx, app)) } else { None }, }, gameplay, gameplay_mode: mode, } } pub fn contextual_actions(&self) -> Actions { Actions { is_paused: self .controls .speed .as_ref() .map(|s| s.is_paused()) .unwrap_or(true), can_interact: self.gameplay.can_examine_objects(), gameplay: self.gameplay_mode.clone(), } } } impl State for SandboxMode { fn event(&mut self, ctx: &mut EventCtx, app: &mut App) -> Transition { if self.gameplay.can_move_canvas() { ctx.canvas_movement(); } if let Some(t) = self.gameplay.event(ctx, app, &mut self.controls) { return t; } if ctx.redo_mouseover() { app.recalculate_current_selection(ctx); } if app.opts.dev && ctx.input.new_was_pressed(&lctrl(Key::D).unwrap()) { return Transition::Push(Box::new(DebugMode::new(ctx, app))); } if let Some(ref mut m) = self.controls.minimap { if let Some(t) = m.event(ctx, app) { return t; } if let Some(t) = PickLayer::update(ctx, app, &m.composite) { return t; } } if let Some(ref mut s) = self.controls.speed { if let Some(t) = s.event(ctx, app, Some(&self.gameplay_mode)) { return t; } } if let Some(ref mut r) = self.controls.route_preview { if let Some(t) = r.event(ctx, app) { return t; } } let mut actions = self.contextual_actions(); if let Some(ref mut c) = self.controls.common { if let Some(t) = c.event(ctx, app, &mut actions) { return t; } } if let Some(ref mut tp) = self.controls.time_panel { tp.event(ctx, app); } if let Some(ref mut tp) = self.controls.tool_panel { match tp.event(ctx, app) { Some(WrappedOutcome::Transition(t)) => { return t; } Some(WrappedOutcome::Clicked(x)) => match x.as_ref() { "back" => { return maybe_exit_sandbox(); } _ => unreachable!(), }, None => {} } } if let Some(ref mut am) = self.controls.agent_meter { if let Some(t) = am.event(ctx, app) { return t; } } if self .controls .speed .as_ref() .map(|s| s.is_paused()) .unwrap_or(true) { Transition::Keep } else { ctx.request_update(UpdateType::Game); Transition::Keep } } fn draw(&self, g: &mut GfxCtx, app: &App) { if let Some(ref l) = app.layer { l.draw(g, app); } if let Some(ref c) = self.controls.common { c.draw(g, app); } else { CommonState::draw_osd(g, app); } if let Some(ref tp) = self.controls.tool_panel { tp.draw(g); } if let Some(ref s) = self.controls.speed { s.draw(g); } if let Some(ref tp) = self.controls.time_panel { tp.draw(g); } if let Some(ref am) = self.controls.agent_meter { am.draw(g); } if let Some(ref m) = self.controls.minimap { m.draw(g, app); } if let Some(ref r) = self.controls.route_preview { r.draw(g); } self.gameplay.draw(g, app); } fn on_destroy(&mut self, _: &mut EventCtx, app: &mut App) { app.layer = None; app.agent_cs = AgentColorScheme::new(&app.cs); self.gameplay.on_destroy(app); } } pub fn maybe_exit_sandbox() -> Transition { Transition::Push(WizardState::new(Box::new(exit_sandbox))) } fn exit_sandbox(wiz: &mut Wizard, ctx: &mut EventCtx, app: &mut App) -> Option<Transition> { let mut wizard = wiz.wrap(ctx); let unsaved = app.primary.map.unsaved_edits(); let (resp, _) = wizard.choose("Are you ready to leave this mode?", || { let mut choices = Vec::new(); choices.push(Choice::new("keep playing", ())); if unsaved { choices.push(Choice::new("save edits first", ())); } choices.push(Choice::new("quit to main screen", ()).key(Key::Q)); choices })?; if resp == "keep playing" { return Some(Transition::Pop); } if resp == "save edits first" { save_edits_as(&mut wizard, app)?; } ctx.loading_screen("reset map and sim", |ctx, mut timer| { if !app.primary.map.get_edits().commands.is_empty() { apply_map_edits(ctx, app, MapEdits::new()); app.primary .map .recalculate_pathfinding_after_edits(&mut timer); } app.primary.clear_sim(); app.set_prebaked(None); }); ctx.canvas.save_camera_state(app.primary.map.get_name()); Some(Transition::Clear(vec![MainMenu::new(ctx, app)])) } pub struct AgentMeter { time: Time, pub composite: Composite, } impl AgentMeter { pub fn new(ctx: &mut EventCtx, app: &App) -> AgentMeter { use abstutil::prettyprint_usize; let (finished, unfinished, by_mode) = app.primary.sim.num_trips(); let rows = vec![ "Active trips".draw_text(ctx), Widget::custom_row(vec![ Widget::custom_row(vec![ Widget::draw_svg(ctx, "../data/system/assets/meters/pedestrian.svg") .margin_right(5), prettyprint_usize(by_mode[&TripMode::Walk]).draw_text(ctx), ]), Widget::custom_row(vec![ Widget::draw_svg(ctx, "../data/system/assets/meters/bike.svg").margin_right(5), prettyprint_usize(by_mode[&TripMode::Bike]).draw_text(ctx), ]), Widget::custom_row(vec![ Widget::draw_svg(ctx, "../data/system/assets/meters/car.svg").margin_right(5), prettyprint_usize(by_mode[&TripMode::Drive]).draw_text(ctx), ]), Widget::custom_row(vec![ Widget::draw_svg(ctx, "../data/system/assets/meters/bus.svg").margin_right(5), prettyprint_usize(by_mode[&TripMode::Transit]).draw_text(ctx), ]), ]) .centered(), Widget::draw_batch( ctx, GeomBatch::from(vec![( Color::WHITE, Polygon::rectangle(0.2 * ctx.canvas.window_width / ctx.get_scale_factor(), 2.0), )]), ) .centered_horiz(), Widget::row(vec![ { let mut txt = Text::new(); let pct = if unfinished == 0 { 100.0 } else { 100.0 * (finished as f64) / ((finished + unfinished) as f64) }; txt.add(Line(format!( "Finished trips: {} ({}%)", prettyprint_usize(finished), pct as usize ))); txt.draw(ctx) }, Btn::svg_def("../data/system/assets/meters/trip_histogram.svg") .build(ctx, "more data", hotkey(Key::Q)) .align_right(), ]), ]; let composite = Composite::new(Widget::col(rows).bg(app.cs.panel_bg).padding(16)) .aligned(HorizontalAlignment::Right, VerticalAlignment::Top) .build(ctx); AgentMeter { time: app.primary.sim.time(), composite, } } pub fn event(&mut self, ctx: &mut EventCtx, app: &mut App) -> Option<Transition> { if self.time != app.primary.sim.time() { *self = AgentMeter::new(ctx, app); return self.event(ctx, app); } match self.composite.event(ctx) { Some(Outcome::Clicked(x)) => match x.as_ref() { "more data" => { return Some(Transition::Push(dashboards::TripTable::new(ctx, app))); } _ => unreachable!(), }, None => {} } None } pub fn draw(&self, g: &mut GfxCtx) { self.composite.draw(g); } } pub struct Actions { is_paused: bool, can_interact: bool, gameplay: GameplayMode, } impl ContextualActions for Actions { fn actions(&self, app: &App, id: ID) -> Vec<(Key, String)> { let mut actions = Vec::new(); if self.can_interact { match id.clone() { ID::Intersection(i) => { if app.primary.map.get_i(i).is_traffic_signal() { actions.push((Key::F, "explore traffic signal details".to_string())); actions.push((Key::E, "edit traffic signal".to_string())); } if app.primary.map.get_i(i).is_stop_sign() && self.gameplay.can_edit_stop_signs() { actions.push((Key::E, "edit stop sign".to_string())); } if app.opts.dev { actions.push((Key::U, "explore uber-turns".to_string())); } } ID::Lane(l) => { if !app.primary.map.get_turns_from_lane(l).is_empty() { actions.push((Key::Z, "explore turns from this lane".to_string())); } if can_edit_lane(&self.gameplay, l, app) { actions.push((Key::E, "edit lane".to_string())); } } ID::Car(c) => { if c.1 == VehicleType::Bus { actions.push((Key::R, "show route".to_string())); } } _ => {} } } actions.extend(self.gameplay.actions(app, id)); actions } fn execute( &mut self, ctx: &mut EventCtx, app: &mut App, id: ID, action: String, close_panel: &mut bool, ) -> Transition { match (id, action.as_ref()) { (ID::Intersection(i), "explore traffic signal details") => { Transition::Push(ShowTrafficSignal::new(ctx, app, i)) } (ID::Intersection(i), "edit traffic signal") => Transition::PushTwice( Box::new(EditMode::new(ctx, app, self.gameplay.clone())), Box::new(TrafficSignalEditor::new(ctx, app, i, self.gameplay.clone())), ), (ID::Intersection(i), "edit stop sign") => Transition::PushTwice( Box::new(EditMode::new(ctx, app, self.gameplay.clone())), Box::new(StopSignEditor::new(ctx, app, i, self.gameplay.clone())), ), (ID::Intersection(i), "explore uber-turns") => { Transition::Push(uber_turns::UberTurnPicker::new(ctx, app, i)) } (ID::Lane(l), "explore turns from this lane") => { Transition::Push(TurnExplorer::new(ctx, app, l)) } (ID::Lane(l), "edit lane") => Transition::PushTwice( Box::new(EditMode::new(ctx, app, self.gameplay.clone())), Box::new(LaneEditor::new(ctx, app, l, self.gameplay.clone())), ), (ID::Car(c), "show route") => { *close_panel = false; app.layer = Some(Box::new(crate::layer::bus::ShowBusRoute::new( ctx, app, app.primary.sim.bus_route_id(c).unwrap(), ))); Transition::Keep } (_, "follow (run the simulation)") => { *close_panel = false; Transition::KeepWithData(Box::new(|state, ctx, app| { let mode = state.downcast_mut::<SandboxMode>().unwrap(); let speed = mode.controls.speed.as_mut().unwrap(); assert!(speed.is_paused()); speed.resume_realtime(ctx, app); })) } (_, "unfollow (pause the simulation)") => { *close_panel = false; Transition::KeepWithData(Box::new(|state, ctx, app| { let mode = state.downcast_mut::<SandboxMode>().unwrap(); let speed = mode.controls.speed.as_mut().unwrap(); assert!(!speed.is_paused()); speed.pause(ctx, app); })) } (id, action) => self .gameplay .execute(ctx, app, id, action.to_string(), close_panel), } } fn is_paused(&self) -> bool { self.is_paused } }
mod dashboards; pub mod gameplay; mod misc_tools; mod speed; mod uber_turns; use self::misc_tools::{RoutePreview, ShowTrafficSignal, TurnExplorer}; use crate::app::App; use crate::common::{tool_panel, CommonState, ContextualActions, Minimap}; use crate::debug::DebugMode; use crate::edit::{ apply_map_edits, can_edit_lane, save_edits_as, EditMode, LaneEditor, StopSignEditor, TrafficSignalEditor, }; use crate::game::{State, Transition, WizardState}; use crate::helpers::ID; use crate::layer::PickLayer; use crate::managed::{WrappedComposite, WrappedOutcome}; use crate::pregame::MainMenu; use crate::render::AgentColorScheme; use ezgui::{ hotkey, lctrl, Btn, Choice, Color, Composite, EventCtx, GeomBatch, GfxCtx, HorizontalAlignment, Key, Line, Outcome, Text, TextExt, UpdateType, VerticalAlignment, Widget, Wizard, }; pub use gameplay::{spawn_agents_around, GameplayMode, TutorialPointer, TutorialState}; use geom::{Polygon, Time}; use map_model::MapEdits; use sim::{TripMode, VehicleType}; pub use speed::TimeWarpScreen; pub use speed::{SpeedControls, TimePanel}; pub struct SandboxMode { gameplay: Box<dyn gameplay::GameplayState>, pub gameplay_mode: GameplayMode, pub controls: SandboxControls, } pub struct SandboxControls { pub common: Option<CommonState>, route_preview: Option<RoutePreview>, tool_panel: Option<WrappedComposite>, time_panel: Option<TimePanel>, speed: Option<SpeedControls>, pub agent_meter: Option<AgentMeter>, minimap: Option<Minimap>, } impl SandboxMode { pub fn new(ctx: &mut EventCtx, app: &mut App, mode: GameplayMode) -> SandboxMode { app.primary.clear_sim(); let gameplay = mode.initialize(ctx, app); SandboxMode { controls: SandboxControls { common: if gameplay.has_common() { Some(CommonState::new()) } else { None }, route_preview: if gameplay.can_examine_objects() { Some(RoutePreview::new()) } else { None }, tool_panel: if gameplay.has_tool_panel() { Some(tool_panel(ctx, app)) } else { None }, time_panel: if gameplay.has_time_panel() { Some(TimePanel::new(ctx, app)) } else { None }, speed: if gameplay.has_speed() { Some(SpeedControls::new(ctx, app)) } else { None }, agent_meter: if gameplay.has_agent_meter() { Some(AgentMeter::new(ctx, app)) } else { None }, minimap: if gameplay.has_minimap() { Some(Minimap::new(ctx, app)) } else { None }, }, gameplay, gameplay_mode: mode, } } pub fn contextual_actions(&self) -> Actions { Actions { is_paused: self .controls .speed .as_ref() .map(|s| s.is_paused()) .
} impl State for SandboxMode { fn event(&mut self, ctx: &mut EventCtx, app: &mut App) -> Transition { if self.gameplay.can_move_canvas() { ctx.canvas_movement(); } if let Some(t) = self.gameplay.event(ctx, app, &mut self.controls) { return t; } if ctx.redo_mouseover() { app.recalculate_current_selection(ctx); } if app.opts.dev && ctx.input.new_was_pressed(&lctrl(Key::D).unwrap()) { return Transition::Push(Box::new(DebugMode::new(ctx, app))); } if let Some(ref mut m) = self.controls.minimap { if let Some(t) = m.event(ctx, app) { return t; } if let Some(t) = PickLayer::update(ctx, app, &m.composite) { return t; } } if let Some(ref mut s) = self.controls.speed { if let Some(t) = s.event(ctx, app, Some(&self.gameplay_mode)) { return t; } } if let Some(ref mut r) = self.controls.route_preview { if let Some(t) = r.event(ctx, app) { return t; } } let mut actions = self.contextual_actions(); if let Some(ref mut c) = self.controls.common { if let Some(t) = c.event(ctx, app, &mut actions) { return t; } } if let Some(ref mut tp) = self.controls.time_panel { tp.event(ctx, app); } if let Some(ref mut tp) = self.controls.tool_panel { match tp.event(ctx, app) { Some(WrappedOutcome::Transition(t)) => { return t; } Some(WrappedOutcome::Clicked(x)) => match x.as_ref() { "back" => { return maybe_exit_sandbox(); } _ => unreachable!(), }, None => {} } } if let Some(ref mut am) = self.controls.agent_meter { if let Some(t) = am.event(ctx, app) { return t; } } if self .controls .speed .as_ref() .map(|s| s.is_paused()) .unwrap_or(true) { Transition::Keep } else { ctx.request_update(UpdateType::Game); Transition::Keep } } fn draw(&self, g: &mut GfxCtx, app: &App) { if let Some(ref l) = app.layer { l.draw(g, app); } if let Some(ref c) = self.controls.common { c.draw(g, app); } else { CommonState::draw_osd(g, app); } if let Some(ref tp) = self.controls.tool_panel { tp.draw(g); } if let Some(ref s) = self.controls.speed { s.draw(g); } if let Some(ref tp) = self.controls.time_panel { tp.draw(g); } if let Some(ref am) = self.controls.agent_meter { am.draw(g); } if let Some(ref m) = self.controls.minimap { m.draw(g, app); } if let Some(ref r) = self.controls.route_preview { r.draw(g); } self.gameplay.draw(g, app); } fn on_destroy(&mut self, _: &mut EventCtx, app: &mut App) { app.layer = None; app.agent_cs = AgentColorScheme::new(&app.cs); self.gameplay.on_destroy(app); } } pub fn maybe_exit_sandbox() -> Transition { Transition::Push(WizardState::new(Box::new(exit_sandbox))) } fn exit_sandbox(wiz: &mut Wizard, ctx: &mut EventCtx, app: &mut App) -> Option<Transition> { let mut wizard = wiz.wrap(ctx); let unsaved = app.primary.map.unsaved_edits(); let (resp, _) = wizard.choose("Are you ready to leave this mode?", || { let mut choices = Vec::new(); choices.push(Choice::new("keep playing", ())); if unsaved { choices.push(Choice::new("save edits first", ())); } choices.push(Choice::new("quit to main screen", ()).key(Key::Q)); choices })?; if resp == "keep playing" { return Some(Transition::Pop); } if resp == "save edits first" { save_edits_as(&mut wizard, app)?; } ctx.loading_screen("reset map and sim", |ctx, mut timer| { if !app.primary.map.get_edits().commands.is_empty() { apply_map_edits(ctx, app, MapEdits::new()); app.primary .map .recalculate_pathfinding_after_edits(&mut timer); } app.primary.clear_sim(); app.set_prebaked(None); }); ctx.canvas.save_camera_state(app.primary.map.get_name()); Some(Transition::Clear(vec![MainMenu::new(ctx, app)])) } pub struct AgentMeter { time: Time, pub composite: Composite, } impl AgentMeter { pub fn new(ctx: &mut EventCtx, app: &App) -> AgentMeter { use abstutil::prettyprint_usize; let (finished, unfinished, by_mode) = app.primary.sim.num_trips(); let rows = vec![ "Active trips".draw_text(ctx), Widget::custom_row(vec![ Widget::custom_row(vec![ Widget::draw_svg(ctx, "../data/system/assets/meters/pedestrian.svg") .margin_right(5), prettyprint_usize(by_mode[&TripMode::Walk]).draw_text(ctx), ]), Widget::custom_row(vec![ Widget::draw_svg(ctx, "../data/system/assets/meters/bike.svg").margin_right(5), prettyprint_usize(by_mode[&TripMode::Bike]).draw_text(ctx), ]), Widget::custom_row(vec![ Widget::draw_svg(ctx, "../data/system/assets/meters/car.svg").margin_right(5), prettyprint_usize(by_mode[&TripMode::Drive]).draw_text(ctx), ]), Widget::custom_row(vec![ Widget::draw_svg(ctx, "../data/system/assets/meters/bus.svg").margin_right(5), prettyprint_usize(by_mode[&TripMode::Transit]).draw_text(ctx), ]), ]) .centered(), Widget::draw_batch( ctx, GeomBatch::from(vec![( Color::WHITE, Polygon::rectangle(0.2 * ctx.canvas.window_width / ctx.get_scale_factor(), 2.0), )]), ) .centered_horiz(), Widget::row(vec![ { let mut txt = Text::new(); let pct = if unfinished == 0 { 100.0 } else { 100.0 * (finished as f64) / ((finished + unfinished) as f64) }; txt.add(Line(format!( "Finished trips: {} ({}%)", prettyprint_usize(finished), pct as usize ))); txt.draw(ctx) }, Btn::svg_def("../data/system/assets/meters/trip_histogram.svg") .build(ctx, "more data", hotkey(Key::Q)) .align_right(), ]), ]; let composite = Composite::new(Widget::col(rows).bg(app.cs.panel_bg).padding(16)) .aligned(HorizontalAlignment::Right, VerticalAlignment::Top) .build(ctx); AgentMeter { time: app.primary.sim.time(), composite, } } pub fn event(&mut self, ctx: &mut EventCtx, app: &mut App) -> Option<Transition> { if self.time != app.primary.sim.time() { *self = AgentMeter::new(ctx, app); return self.event(ctx, app); } match self.composite.event(ctx) { Some(Outcome::Clicked(x)) => match x.as_ref() { "more data" => { return Some(Transition::Push(dashboards::TripTable::new(ctx, app))); } _ => unreachable!(), }, None => {} } None } pub fn draw(&self, g: &mut GfxCtx) { self.composite.draw(g); } } pub struct Actions { is_paused: bool, can_interact: bool, gameplay: GameplayMode, } impl ContextualActions for Actions { fn actions(&self, app: &App, id: ID) -> Vec<(Key, String)> { let mut actions = Vec::new(); if self.can_interact { match id.clone() { ID::Intersection(i) => { if app.primary.map.get_i(i).is_traffic_signal() { actions.push((Key::F, "explore traffic signal details".to_string())); actions.push((Key::E, "edit traffic signal".to_string())); } if app.primary.map.get_i(i).is_stop_sign() && self.gameplay.can_edit_stop_signs() { actions.push((Key::E, "edit stop sign".to_string())); } if app.opts.dev { actions.push((Key::U, "explore uber-turns".to_string())); } } ID::Lane(l) => { if !app.primary.map.get_turns_from_lane(l).is_empty() { actions.push((Key::Z, "explore turns from this lane".to_string())); } if can_edit_lane(&self.gameplay, l, app) { actions.push((Key::E, "edit lane".to_string())); } } ID::Car(c) => { if c.1 == VehicleType::Bus { actions.push((Key::R, "show route".to_string())); } } _ => {} } } actions.extend(self.gameplay.actions(app, id)); actions } fn execute( &mut self, ctx: &mut EventCtx, app: &mut App, id: ID, action: String, close_panel: &mut bool, ) -> Transition { match (id, action.as_ref()) { (ID::Intersection(i), "explore traffic signal details") => { Transition::Push(ShowTrafficSignal::new(ctx, app, i)) } (ID::Intersection(i), "edit traffic signal") => Transition::PushTwice( Box::new(EditMode::new(ctx, app, self.gameplay.clone())), Box::new(TrafficSignalEditor::new(ctx, app, i, self.gameplay.clone())), ), (ID::Intersection(i), "edit stop sign") => Transition::PushTwice( Box::new(EditMode::new(ctx, app, self.gameplay.clone())), Box::new(StopSignEditor::new(ctx, app, i, self.gameplay.clone())), ), (ID::Intersection(i), "explore uber-turns") => { Transition::Push(uber_turns::UberTurnPicker::new(ctx, app, i)) } (ID::Lane(l), "explore turns from this lane") => { Transition::Push(TurnExplorer::new(ctx, app, l)) } (ID::Lane(l), "edit lane") => Transition::PushTwice( Box::new(EditMode::new(ctx, app, self.gameplay.clone())), Box::new(LaneEditor::new(ctx, app, l, self.gameplay.clone())), ), (ID::Car(c), "show route") => { *close_panel = false; app.layer = Some(Box::new(crate::layer::bus::ShowBusRoute::new( ctx, app, app.primary.sim.bus_route_id(c).unwrap(), ))); Transition::Keep } (_, "follow (run the simulation)") => { *close_panel = false; Transition::KeepWithData(Box::new(|state, ctx, app| { let mode = state.downcast_mut::<SandboxMode>().unwrap(); let speed = mode.controls.speed.as_mut().unwrap(); assert!(speed.is_paused()); speed.resume_realtime(ctx, app); })) } (_, "unfollow (pause the simulation)") => { *close_panel = false; Transition::KeepWithData(Box::new(|state, ctx, app| { let mode = state.downcast_mut::<SandboxMode>().unwrap(); let speed = mode.controls.speed.as_mut().unwrap(); assert!(!speed.is_paused()); speed.pause(ctx, app); })) } (id, action) => self .gameplay .execute(ctx, app, id, action.to_string(), close_panel), } } fn is_paused(&self) -> bool { self.is_paused } }
unwrap_or(true), can_interact: self.gameplay.can_examine_objects(), gameplay: self.gameplay_mode.clone(), } }
function_block-function_prefix_line
[ { "content": "// TODO Well, there goes the nice consolidation of stuff in BtnBuilder. :\\\n\npub fn hotkey_btn<I: Into<String>>(ctx: &EventCtx, app: &App, label: I, key: Key) -> Widget {\n\n let label = label.into();\n\n let mut txt = Text::new();\n\n txt.append(Line(key.describe()).fg(ctx.style().hotkey_color));\n\n txt.append(Line(format!(\" - {}\", label)));\n\n Btn::text_bg(label, txt, app.cs.section_bg, app.cs.hovering).build_def(ctx, hotkey(key))\n\n}\n\n\n", "file_path": "game/src/helpers.rs", "rank": 0, "score": 462441.4088042802 }, { "content": "pub fn execute(_: &mut EventCtx, app: &mut App, id: ID, action: String) -> Transition {\n\n let mut tut = app.session.tutorial.as_mut().unwrap();\n\n let response = match (id, action.as_ref()) {\n\n (ID::Car(c), \"draw WASH ME\") => {\n\n let is_parked = app\n\n .primary\n\n .sim\n\n .agent_to_trip(AgentID::Car(ESCORT))\n\n .is_none();\n\n if c == ESCORT {\n\n if is_parked {\n\n tut.prank_done = true;\n\n msg(\n\n \"Prank in progress\",\n\n vec![\"You quickly scribble on the window...\"],\n\n )\n\n } else {\n\n msg(\n\n \"Not yet!\",\n\n vec![\n", "file_path": "game/src/sandbox/gameplay/tutorial.rs", "rank": 1, "score": 455706.00690969004 }, { "content": "fn make_topcenter(ctx: &mut EventCtx, app: &App, mode: &GameplayMode) -> Composite {\n\n Composite::new(\n\n Widget::col(vec![\n\n Line(\"Editing map\")\n\n .small_heading()\n\n .draw(ctx)\n\n .centered_horiz(),\n\n Widget::row(vec![\n\n if mode.can_edit_lanes() {\n\n Btn::text_fg(\"bulk edit\").build_def(ctx, hotkey(Key::B))\n\n } else {\n\n Btn::text_fg(\"bulk edit\").inactive(ctx)\n\n },\n\n PersistentSplit::new(\n\n ctx,\n\n \"finish editing\",\n\n app.opts.resume_after_edit,\n\n hotkey(Key::Escape),\n\n vec![\n\n Choice::new(\n", "file_path": "game/src/edit/mod.rs", "rank": 2, "score": 450903.4443488364 }, { "content": "pub fn execute(ctx: &mut EventCtx, app: &mut App, id: ID, action: String) -> Transition {\n\n match (id, action.as_ref()) {\n\n (ID::Building(b), \"start a trip here\") => {\n\n Transition::Push(AgentSpawner::new(ctx, app, Some(b)))\n\n }\n\n (ID::Intersection(id), \"spawn agents here\") => {\n\n spawn_agents_around(id, app);\n\n Transition::Keep\n\n }\n\n _ => unreachable!(),\n\n }\n\n}\n", "file_path": "game/src/sandbox/gameplay/freeform.rs", "rank": 3, "score": 450591.5555777253 }, { "content": "pub fn save_edits_as(wizard: &mut WrappedWizard, app: &mut App) -> Option<()> {\n\n let map = &mut app.primary.map;\n\n let (prompt, new_default_name) = if map.get_edits().edits_name == \"untitled edits\" {\n\n (\"Name these edits\", \"\".to_string())\n\n } else {\n\n (\n\n \"Name the new copy of these edits\",\n\n format!(\"copy of {}\", map.get_edits().edits_name),\n\n )\n\n };\n\n\n\n let name = loop {\n\n let candidate = wizard.input_something(\n\n prompt,\n\n Some(new_default_name.clone()),\n\n Box::new(|l| {\n\n let l = l.trim().to_string();\n\n if l.contains(\"/\") || l == \"untitled edits\" || l == \"\" {\n\n None\n\n } else {\n", "file_path": "game/src/edit/mod.rs", "rank": 4, "score": 440507.1390527574 }, { "content": "pub fn tool_panel(ctx: &mut EventCtx, app: &App) -> WrappedComposite {\n\n let row = vec![\n\n // TODO Maybe this is confusing -- it doesn't jump to the title screen necessarily.\n\n // Caller has to handle this one\n\n Btn::svg_def(\"../data/system/assets/tools/home.svg\").build(\n\n ctx,\n\n \"back\",\n\n hotkey(Key::Escape),\n\n ),\n\n Btn::svg_def(\"../data/system/assets/tools/settings.svg\").build(ctx, \"settings\", None),\n\n ];\n\n WrappedComposite::new(\n\n Composite::new(Widget::row(row).bg(app.cs.panel_bg).padding(16))\n\n .aligned(HorizontalAlignment::Left, VerticalAlignment::BottomAboveOSD)\n\n .build(ctx),\n\n )\n\n .cb(\n\n \"settings\",\n\n Box::new(|ctx, app| {\n\n Some(Transition::Push(Box::new(options::OptionsPanel::new(\n\n ctx, app,\n\n ))))\n\n }),\n\n )\n\n}\n", "file_path": "game/src/common/panels.rs", "rank": 5, "score": 439151.29667110473 }, { "content": "fn maybe_save_first(wiz: &mut Wizard, ctx: &mut EventCtx, app: &mut App) -> Option<Transition> {\n\n let mut wizard = wiz.wrap(ctx);\n\n let (resp, _) = wizard.choose(\"Wait, do you want to save your map edits first?\", || {\n\n vec![Choice::new(\"save\", ()), Choice::new(\"discard\", ())]\n\n })?;\n\n if resp == \"save\" {\n\n save_edits_as(&mut wizard, app)?;\n\n }\n\n ctx.loading_screen(\"reset map and sim\", |ctx, mut timer| {\n\n if !app.primary.map.get_edits().commands.is_empty() {\n\n apply_map_edits(ctx, app, MapEdits::new());\n\n app.primary\n\n .map\n\n .recalculate_pathfinding_after_edits(&mut timer);\n\n }\n\n });\n\n // TODO Don't make the player pick the FinalScore thing again :(\n\n Some(Transition::Pop)\n\n}\n", "file_path": "game/src/sandbox/gameplay/mod.rs", "rank": 6, "score": 438897.37970003625 }, { "content": "pub fn change_speed_limit(ctx: &mut EventCtx, default: Speed) -> Widget {\n\n Widget::row(vec![\n\n \"Change speed limit:\".draw_text(ctx).centered_vert(),\n\n Widget::dropdown(\n\n ctx,\n\n \"speed limit\",\n\n default,\n\n vec![\n\n Choice::new(\"10 mph\", Speed::miles_per_hour(10.0)),\n\n Choice::new(\"15 mph\", Speed::miles_per_hour(15.0)),\n\n Choice::new(\"20 mph\", Speed::miles_per_hour(20.0)),\n\n Choice::new(\"25 mph\", Speed::miles_per_hour(25.0)),\n\n Choice::new(\"30 mph\", Speed::miles_per_hour(30.0)),\n\n Choice::new(\"35 mph\", Speed::miles_per_hour(35.0)),\n\n Choice::new(\"40 mph\", Speed::miles_per_hour(40.0)),\n\n Choice::new(\"45 mph\", Speed::miles_per_hour(45.0)),\n\n Choice::new(\"50 mph\", Speed::miles_per_hour(50.0)),\n\n Choice::new(\"55 mph\", Speed::miles_per_hour(55.0)),\n\n Choice::new(\"60 mph\", Speed::miles_per_hour(60.0)),\n\n Choice::new(\"65 mph\", Speed::miles_per_hour(65.0)),\n\n Choice::new(\"70 mph\", Speed::miles_per_hour(70.0)),\n\n // Don't need anything higher. Though now I kind of miss 3am drives on TX-71...\n\n ],\n\n ),\n\n ])\n\n}\n\n\n", "file_path": "game/src/edit/mod.rs", "rank": 7, "score": 428325.829756359 }, { "content": "fn make_tool_panel(ctx: &mut EventCtx, app: &App) -> Widget {\n\n Widget::col(vec![\n\n (if ctx.canvas.cam_zoom >= app.opts.min_zoom_for_detail {\n\n Btn::svg_def(\"../data/system/assets/minimap/zoom_out_fully.svg\").build(\n\n ctx,\n\n \"zoom out fully\",\n\n None,\n\n )\n\n } else {\n\n Btn::svg_def(\"../data/system/assets/minimap/zoom_in_fully.svg\").build(\n\n ctx,\n\n \"zoom in fully\",\n\n None,\n\n )\n\n })\n\n .bg(app.cs.inner_panel),\n\n Btn::svg_def(\"../data/system/assets/tools/layers.svg\")\n\n .build(ctx, \"change layers\", hotkey(Key::L))\n\n .bg(app.cs.inner_panel),\n\n Btn::svg_def(\"../data/system/assets/tools/search.svg\")\n\n .build(ctx, \"search\", hotkey(Key::K))\n\n .bg(app.cs.inner_panel),\n\n ])\n\n}\n\n\n", "file_path": "game/src/common/minimap.rs", "rank": 8, "score": 420923.3556221168 }, { "content": "pub fn apply_map_edits(ctx: &mut EventCtx, app: &mut App, edits: MapEdits) {\n\n let mut timer = Timer::new(\"apply map edits\");\n\n\n\n let (roads_changed, turns_deleted, turns_added, mut modified_intersections) =\n\n app.primary.map.apply_edits(edits, &mut timer);\n\n\n\n for r in roads_changed {\n\n let road = app.primary.map.get_r(r);\n\n app.primary.draw_map.roads[r.0] =\n\n DrawRoad::new(road, &app.primary.map, &app.cs, ctx.prerender);\n\n\n\n // An edit to one lane potentially affects markings in all lanes in the same road, because\n\n // of one-way markings, driving lines, etc.\n\n for l in road.all_lanes() {\n\n let lane = app.primary.map.get_l(l);\n\n app.primary.draw_map.lanes[l.0] = DrawLane::new(\n\n lane,\n\n &app.primary.map,\n\n app.primary.current_flags.draw_lane_markings,\n\n &app.cs,\n", "file_path": "game/src/edit/mod.rs", "rank": 9, "score": 419481.558756434 }, { "content": "fn warp_to(wiz: &mut Wizard, ctx: &mut EventCtx, app: &mut App) -> Option<Transition> {\n\n let mut wizard = wiz.wrap(ctx);\n\n let to = wizard.input_string(\"Warp to what?\")?;\n\n if let Some(t) = inner_warp(ctx, app, &to) {\n\n Some(t)\n\n } else {\n\n Some(Transition::Replace(msg(\n\n \"Bad warp ID\",\n\n vec![format!(\"{} isn't a valid ID\", to)],\n\n )))\n\n }\n\n}\n\n\n", "file_path": "game/src/common/warp.rs", "rank": 10, "score": 416594.05819018884 }, { "content": "fn make_horiz_viz_panel(ctx: &mut EventCtx, app: &App) -> Widget {\n\n let mut row = Vec::new();\n\n for (label, color, enabled) in &app.agent_cs.rows {\n\n row.push(Checkbox::colored(ctx, label, *color, *enabled).margin_right(8));\n\n row.push(Line(label).draw(ctx).margin_right(24));\n\n }\n\n let last = row.pop().unwrap();\n\n row.push(last.margin_right(0));\n\n Widget::custom_row(row)\n\n}\n\n\n", "file_path": "game/src/common/minimap.rs", "rank": 11, "score": 415745.9200447254 }, { "content": "fn make_vert_viz_panel(ctx: &mut EventCtx, app: &App) -> Widget {\n\n let mut col = Vec::new();\n\n\n\n for (label, color, enabled) in &app.agent_cs.rows {\n\n let mut row = Vec::new();\n\n row.push(Checkbox::colored(ctx, label, *color, *enabled).margin_right(8));\n\n row.push(Line(label).draw(ctx));\n\n col.push(Widget::custom_row(row));\n\n }\n\n\n\n Widget::col(col)\n\n}\n", "file_path": "game/src/common/minimap.rs", "rank": 12, "score": 415745.9200447254 }, { "content": "fn search_osm(wiz: &mut Wizard, ctx: &mut EventCtx, app: &mut App) -> Option<Transition> {\n\n let filter = wiz.wrap(ctx).input_string(\"Search for what?\")?;\n\n let mut num_matches = 0;\n\n let mut batch = GeomBatch::new();\n\n\n\n // TODO Case insensitive\n\n let map = &app.primary.map;\n\n let color = Color::RED;\n\n for r in map.all_roads() {\n\n if r.osm_tags\n\n .iter()\n\n .any(|(k, v)| format!(\"{} = {}\", k, v).contains(&filter))\n\n {\n\n num_matches += 1;\n\n batch.push(color, r.get_thick_polygon(map).unwrap());\n\n }\n\n }\n\n for a in map.all_areas() {\n\n if a.osm_tags\n\n .iter()\n", "file_path": "game/src/debug/mod.rs", "rank": 13, "score": 411922.8312420872 }, { "content": "fn choose_polygon(wiz: &mut Wizard, ctx: &mut EventCtx, app: &mut App) -> Option<Transition> {\n\n // TODO Sorry, Seattle only right now\n\n let name = wiz.wrap(ctx).choose_string(\"Edit which polygon?\", || {\n\n abstutil::list_all_objects(\"../data/input/seattle/polygons/\".to_string())\n\n })?;\n\n match LonLat::read_osmosis_polygon(format!(\"../data/input/seattle/polygons/{}.poly\", name)) {\n\n Ok(pts) => Some(Transition::Replace(polygon::PolygonEditor::new(\n\n ctx, app, name, pts,\n\n ))),\n\n Err(err) => {\n\n println!(\"Bad polygon {}: {}\", name, err);\n\n Some(Transition::Pop)\n\n }\n\n }\n\n}\n\n\n", "file_path": "game/src/devtools/mod.rs", "rank": 15, "score": 411922.8312420873 }, { "content": "fn choose_kml(wiz: &mut Wizard, ctx: &mut EventCtx, app: &mut App) -> Option<Transition> {\n\n // TODO Sorry, Seattle only right now\n\n let path = wiz.wrap(ctx).choose_string(\"View what KML dataset?\", || {\n\n abstutil::list_dir(std::path::Path::new(\"../data/input/seattle/\"))\n\n .into_iter()\n\n .filter(|x| x.ends_with(\".bin\") && !x.ends_with(\"popdat.bin\"))\n\n .collect()\n\n })?;\n\n Some(Transition::Replace(kml::ViewKML::new(ctx, app, path)))\n\n}\n", "file_path": "game/src/devtools/mod.rs", "rank": 16, "score": 411922.8312420873 }, { "content": "fn load_savestate(wiz: &mut Wizard, ctx: &mut EventCtx, app: &mut App) -> Option<Transition> {\n\n let ss = wiz.wrap(ctx).choose_string(\"Load which savestate?\", || {\n\n abstutil::list_all_objects(app.primary.sim.save_dir())\n\n })?;\n\n // TODO Oh no, we have to do path construction here :(\n\n let ss_path = format!(\"{}/{}.bin\", app.primary.sim.save_dir(), ss);\n\n\n\n ctx.loading_screen(\"load savestate\", |ctx, mut timer| {\n\n app.primary.sim = Sim::load_savestate(ss_path, &app.primary.map, &mut timer)\n\n .expect(\"Can't load savestate\");\n\n app.recalculate_current_selection(ctx);\n\n });\n\n Some(Transition::Pop)\n\n}\n\n\n", "file_path": "game/src/debug/mod.rs", "rank": 17, "score": 411922.8312420872 }, { "content": "fn load_scenario(wiz: &mut Wizard, ctx: &mut EventCtx, app: &mut App) -> Option<Transition> {\n\n let map_name = app.primary.map.get_name().to_string();\n\n let s = wiz.wrap(ctx).choose_string(\"Load which scenario?\", || {\n\n abstutil::list_all_objects(abstutil::path_all_scenarios(&map_name))\n\n })?;\n\n let scenario = abstutil::read_binary(\n\n abstutil::path_scenario(&map_name, &s),\n\n &mut Timer::throwaway(),\n\n );\n\n Some(Transition::Replace(Box::new(\n\n scenario::ScenarioManager::new(scenario, ctx, app),\n\n )))\n\n}\n\n\n", "file_path": "game/src/devtools/mod.rs", "rank": 18, "score": 411922.8312420873 }, { "content": "pub fn preview_trip(g: &mut GfxCtx, app: &App, composite: &Composite) {\n\n let inner_rect = composite.rect_of(\"preview\").clone();\n\n let map_bounds = app.primary.map.get_bounds().clone();\n\n let zoom = 0.15 * g.canvas.window_width / map_bounds.width().max(map_bounds.height());\n\n g.fork(\n\n Pt2D::new(map_bounds.min_x, map_bounds.min_y),\n\n ScreenPt::new(inner_rect.x1, inner_rect.y1),\n\n zoom,\n\n None,\n\n );\n\n g.enable_clipping(inner_rect);\n\n\n\n g.redraw(&app.primary.draw_map.boundary_polygon);\n\n g.redraw(&app.primary.draw_map.draw_all_areas);\n\n g.redraw(&app.primary.draw_map.draw_all_unzoomed_intersections);\n\n g.redraw(&app.primary.draw_map.draw_all_thick_roads);\n\n\n\n if let Some(x) = composite.currently_hovering() {\n\n if let Ok(idx) = x.parse::<usize>() {\n\n let trip = TripID(idx);\n\n preview_route(g, app, trip).draw(g);\n\n }\n\n }\n\n\n\n g.disable_clipping();\n\n g.unfork();\n\n}\n\n\n", "file_path": "game/src/sandbox/dashboards/trip_table.rs", "rank": 19, "score": 411488.45607899176 }, { "content": "fn inner_warp(ctx: &mut EventCtx, app: &mut App, line: &str) -> Option<Transition> {\n\n if line.is_empty() {\n\n return None;\n\n }\n\n // TODO Weird magic shortcut to go to last spot. What should this be?\n\n if line == \"j\" {\n\n if let Some((pt, zoom)) = app.primary.last_warped_from {\n\n return Some(Transition::Replace(Warping::new(\n\n ctx,\n\n pt,\n\n Some(zoom),\n\n None,\n\n &mut app.primary,\n\n )));\n\n }\n\n return None;\n\n }\n\n\n\n let id = match usize::from_str_radix(&line[1..line.len()], 10) {\n\n Ok(idx) => match line.chars().next().unwrap() {\n", "file_path": "game/src/common/warp.rs", "rank": 20, "score": 408191.53967347014 }, { "content": "pub fn color_for_mode(app: &App, m: TripMode) -> Color {\n\n match m {\n\n TripMode::Walk => app.cs.unzoomed_pedestrian,\n\n TripMode::Bike => app.cs.unzoomed_bike,\n\n TripMode::Transit => app.cs.unzoomed_bus,\n\n TripMode::Drive => app.cs.unzoomed_car,\n\n }\n\n}\n\n\n", "file_path": "game/src/helpers.rs", "rank": 21, "score": 388678.4677022757 }, { "content": "fn cutscene_task(mode: &GameplayMode) -> Box<dyn Fn(&mut EventCtx) -> Widget> {\n\n let goal = match mode {\n\n GameplayMode::OptimizeCommute(_, d) => *d,\n\n _ => unreachable!(),\n\n };\n\n\n\n Box::new(move |ctx| {\n\n Widget::custom_col(vec![\n\n Text::from_multiline(vec![\n\n Line(format!(\"Speed up the VIP's trips by a total of {}\", goal)).fg(Color::BLACK),\n\n Line(\"Ignore the damage done to everyone else.\").fg(Color::BLACK),\n\n ])\n\n .draw(ctx)\n\n .margin_below(30),\n\n Widget::row(vec![\n\n Widget::col(vec![\n\n Line(\"Time\").fg(Color::BLACK).draw(ctx),\n\n Widget::draw_svg_transform(\n\n ctx,\n\n \"../data/system/assets/tools/time.svg\",\n", "file_path": "game/src/sandbox/gameplay/commute.rs", "rank": 22, "score": 382397.74426023813 }, { "content": "pub fn can_edit_lane(mode: &GameplayMode, l: LaneID, app: &App) -> bool {\n\n mode.can_edit_lanes()\n\n && !app.primary.map.get_l(l).is_sidewalk()\n\n && app.primary.map.get_l(l).lane_type != LaneType::SharedLeftTurn\n\n && app.primary.map.get_l(l).lane_type != LaneType::LightRail\n\n}\n\n\n", "file_path": "game/src/edit/mod.rs", "rank": 23, "score": 381916.32998707355 }, { "content": "pub fn aborted(ctx: &mut EventCtx, app: &App, trip: TripID) -> Widget {\n\n let (start_time, trip_start, trip_end, _) = app.primary.sim.trip_info(trip);\n\n\n\n let mut col = vec![Text::from_multiline(vec![\n\n Line(\"A glitch in the simulation happened.\"),\n\n Line(\"This trip, however, did not.\"),\n\n ])\n\n .draw(ctx)];\n\n\n\n // TODO Warp buttons. make_table is showing its age.\n\n let (_, _, name1) = endpoint(&trip_start, &app.primary.map);\n\n let (_, _, name2) = endpoint(&trip_end, &app.primary.map);\n\n col.extend(make_table(\n\n ctx,\n\n vec![\n\n (\"Departure\", start_time.ampm_tostring()),\n\n (\"From\", name1),\n\n (\"To\", name2),\n\n ]\n\n .into_iter(),\n\n ));\n\n\n\n Widget::col(col)\n\n}\n\n\n", "file_path": "game/src/info/trip.rs", "rank": 24, "score": 381549.041533019 }, { "content": "pub fn lctrl(key: Key) -> Option<MultiKey> {\n\n Some(MultiKey::LCtrl(key))\n\n}\n\n\n", "file_path": "ezgui/src/event.rs", "rank": 25, "score": 379222.9040690521 }, { "content": "pub fn hotkey(key: Key) -> Option<MultiKey> {\n\n Some(MultiKey::Normal(key))\n\n}\n\n\n", "file_path": "ezgui/src/event.rs", "rank": 26, "score": 379219.53483397607 }, { "content": "pub fn actions(app: &App, id: ID) -> Vec<(Key, String)> {\n\n match (app.session.tutorial.as_ref().unwrap().interaction(), id) {\n\n (Task::LowParking, ID::Lane(_)) => {\n\n vec![(Key::C, \"check the parking occupancy\".to_string())]\n\n }\n\n (Task::Escort, ID::Car(_)) => vec![(Key::C, \"draw WASH ME\".to_string())],\n\n _ => Vec::new(),\n\n }\n\n}\n\n\n", "file_path": "game/src/sandbox/gameplay/tutorial.rs", "rank": 27, "score": 378614.5597565257 }, { "content": "fn make_minimap_panel(ctx: &mut EventCtx, app: &App, zoom_lvl: usize) -> Composite {\n\n if ctx.canvas.cam_zoom < app.opts.min_zoom_for_detail {\n\n return Composite::new(Widget::row(vec![\n\n make_tool_panel(ctx, app).align_right(),\n\n make_vert_viz_panel(ctx, app)\n\n .bg(app.cs.panel_bg)\n\n .padding(16),\n\n ]))\n\n .aligned(\n\n HorizontalAlignment::Right,\n\n VerticalAlignment::BottomAboveOSD,\n\n )\n\n .build(ctx);\n\n }\n\n\n\n let zoom_col = {\n\n let mut col = vec![Btn::svg_def(\"../data/system/assets/speed/speed_up.svg\")\n\n .build(ctx, \"zoom in\", None)\n\n .margin_below(20)];\n\n for i in (0..=3).rev() {\n", "file_path": "game/src/common/minimap.rs", "rank": 28, "score": 378137.6423446029 }, { "content": "pub fn people(ctx: &mut EventCtx, app: &App, details: &mut Details, id: BuildingID) -> Vec<Widget> {\n\n let mut rows = header(ctx, app, details, id, Tab::BldgPeople(id));\n\n\n\n let mut ppl: Vec<(Time, Widget)> = Vec::new();\n\n for p in app.primary.sim.bldg_to_people(id) {\n\n let person = app.primary.sim.get_person(p);\n\n\n\n let mut next_trip: Option<(Time, TripMode)> = None;\n\n for t in &person.trips {\n\n match app.primary.sim.trip_to_agent(*t) {\n\n TripResult::TripNotStarted => {\n\n let (start_time, _, _, mode) = app.primary.sim.trip_info(*t);\n\n next_trip = Some((start_time, mode));\n\n break;\n\n }\n\n TripResult::Ok(_) | TripResult::ModeChange => {\n\n // TODO What to do here? This is meant for building callers right now\n\n break;\n\n }\n\n TripResult::TripDone | TripResult::TripAborted => {}\n", "file_path": "game/src/info/building.rs", "rank": 29, "score": 369293.4592910609 }, { "content": "pub fn info(ctx: &mut EventCtx, app: &App, details: &mut Details, id: BuildingID) -> Vec<Widget> {\n\n let mut rows = header(ctx, app, details, id, Tab::BldgInfo(id));\n\n let b = app.primary.map.get_b(id);\n\n\n\n let mut kv = Vec::new();\n\n\n\n kv.push((\"Address\", b.address.clone()));\n\n if let Some(ref name) = b.name {\n\n kv.push((\"Name\", name.clone()));\n\n }\n\n if app.opts.dev {\n\n kv.push((\"OSM ID\", format!(\"{}\", b.osm_way_id)));\n\n }\n\n\n\n if let Some(ref p) = b.parking {\n\n let free = app.primary.sim.get_free_offstreet_spots(b.id).len();\n\n if let Some(ref n) = p.public_garage_name {\n\n kv.push((\n\n \"Parking\",\n\n format!(\n", "file_path": "game/src/info/building.rs", "rank": 30, "score": 369293.4592910609 }, { "content": "fn transition(ctx: &mut EventCtx, app: &mut App, tut: &mut TutorialState) -> Transition {\n\n tut.reset_state();\n\n let mode = GameplayMode::Tutorial(tut.current);\n\n Transition::Replace(Box::new(SandboxMode::new(ctx, app, mode)))\n\n}\n\n\n\nimpl TutorialState {\n\n // These're mutex to each state, but still important to reset. Otherwise if you go back to a\n\n // previous interaction stage, it'll just be automatically marked done.\n\n fn reset_state(&mut self) {\n\n self.inspected_bike_lane = false;\n\n self.inspected_building = false;\n\n self.inspected_stop_sign = false;\n\n self.inspected_border = false;\n\n self.was_paused = true;\n\n self.num_pauses = 0;\n\n self.score_delivered = false;\n\n self.following_car = false;\n\n self.car_parked = false;\n\n self.prank_done = false;\n", "file_path": "game/src/sandbox/gameplay/tutorial.rs", "rank": 31, "score": 365921.1523510909 }, { "content": "pub fn stop(ctx: &mut EventCtx, app: &App, details: &mut Details, id: BusStopID) -> Vec<Widget> {\n\n let mut rows = vec![];\n\n\n\n let sim = &app.primary.sim;\n\n\n\n rows.push(Widget::row(vec![\n\n Line(\"Bus stop\").small_heading().draw(ctx),\n\n header_btns(ctx),\n\n ]));\n\n rows.push(format!(\"On {}\", app.primary.map.get_parent(id.sidewalk).get_name()).draw_text(ctx));\n\n\n\n let all_arrivals = &sim.get_analytics().bus_arrivals;\n\n for r in app.primary.map.get_routes_serving_stop(id) {\n\n let buses = app.primary.sim.status_of_buses(r.id);\n\n if buses.is_empty() {\n\n rows.push(format!(\"Route {}: no buses running\", r.name).draw_text(ctx));\n\n } else {\n\n rows.push(Btn::text_fg(format!(\"Route {}\", r.name)).build_def(ctx, None));\n\n details\n\n .hyperlinks\n", "file_path": "game/src/info/bus.rs", "rank": 32, "score": 365912.25233207864 }, { "content": "// TODO For now, this conflates a single bus with the whole route, but that's fine, since the sim\n\n// only spawns one per route anyway.\n\npub fn bus_status(ctx: &mut EventCtx, app: &App, details: &mut Details, id: CarID) -> Vec<Widget> {\n\n let mut rows = bus_header(ctx, app, details, id, Tab::BusStatus(id));\n\n\n\n let kv = app.primary.sim.bus_properties(id, &app.primary.map);\n\n rows.extend(make_table(ctx, kv.into_iter()));\n\n\n\n let route = app.primary.sim.bus_route_id(id).unwrap();\n\n rows.push(passenger_delay(ctx, app, details, route));\n\n\n\n rows\n\n}\n\n\n", "file_path": "game/src/info/bus.rs", "rank": 33, "score": 365912.25233207864 }, { "content": "pub fn bus_delays(ctx: &mut EventCtx, app: &App, details: &mut Details, id: CarID) -> Vec<Widget> {\n\n let mut rows = bus_header(ctx, app, details, id, Tab::BusDelays(id));\n\n let route = app.primary.sim.bus_route_id(id).unwrap();\n\n rows.push(delays_over_time(ctx, app, route));\n\n rows\n\n}\n\n\n", "file_path": "game/src/info/bus.rs", "rank": 34, "score": 365912.25233207864 }, { "content": "fn summary(ctx: &mut EventCtx, app: &App, filter: &Filter) -> Widget {\n\n if app.has_prebaked().is_none() {\n\n return Widget::nothing();\n\n }\n\n\n\n let mut num_same = 0;\n\n let mut num_faster = 0;\n\n let mut num_slower = 0;\n\n let mut sum_faster = Duration::ZERO;\n\n let mut sum_slower = Duration::ZERO;\n\n for (b, a, mode) in app\n\n .primary\n\n .sim\n\n .get_analytics()\n\n .both_finished_trips(app.primary.sim.time(), app.prebaked())\n\n {\n\n if !filter.modes.contains(&mode) {\n\n continue;\n\n }\n\n let same = if let Some(pct) = filter.changes_pct {\n", "file_path": "game/src/sandbox/dashboards/summaries.rs", "rank": 35, "score": 364016.17432930396 }, { "content": "pub fn change_traffic_signal(signal: ControlTrafficSignal, ctx: &mut EventCtx, app: &mut App) {\n\n let mut edits = app.primary.map.get_edits().clone();\n\n // TODO Only record one command for the entire session. Otherwise, we can exit this editor and\n\n // undo a few times, potentially ending at an invalid state!\n\n let old = if let Some(prev) = edits.commands.last().and_then(|cmd| match cmd {\n\n EditCmd::ChangeIntersection {\n\n i,\n\n ref new,\n\n ref old,\n\n } => {\n\n if signal.id == *i {\n\n match new {\n\n EditIntersection::TrafficSignal(_) => Some(old.clone()),\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n }\n\n _ => None,\n", "file_path": "game/src/edit/traffic_signals.rs", "rank": 36, "score": 363041.780719145 }, { "content": "pub fn info(ctx: &mut EventCtx, app: &App, details: &mut Details, id: ParkingLotID) -> Vec<Widget> {\n\n let mut rows = header(ctx, details, id, Tab::ParkingLot(id));\n\n let pl = app.primary.map.get_pl(id);\n\n let capacity = pl.spots.len();\n\n\n\n rows.push(\n\n format!(\n\n \"{} / {} spots available\",\n\n prettyprint_usize(app.primary.sim.get_free_lot_spots(pl.id).len()),\n\n prettyprint_usize(capacity)\n\n )\n\n .draw_text(ctx),\n\n );\n\n\n\n let mut series = vec![Series {\n\n label: format!(\"After \\\"{}\\\"\", app.primary.map.get_edits().edits_name),\n\n color: app.cs.after_changes,\n\n pts: app.primary.sim.get_analytics().parking_lot_availability(\n\n app.primary.sim.time(),\n\n pl.id,\n", "file_path": "game/src/info/parking_lot.rs", "rank": 37, "score": 362644.1473552345 }, { "content": "fn contingency_table(ctx: &mut EventCtx, app: &App, filter: &Filter) -> Widget {\n\n if app.has_prebaked().is_none() {\n\n return Widget::nothing();\n\n }\n\n\n\n let total_width = 500.0;\n\n let total_height = 300.0;\n\n\n\n let points = filter.get_trips(app);\n\n if points.is_empty() {\n\n return Widget::nothing();\n\n }\n\n let num_buckets = 10;\n\n let (_, endpts) = points\n\n .iter()\n\n .map(|(b, a)| a.max(b))\n\n .max()\n\n .unwrap()\n\n .make_intervals_for_max(num_buckets);\n\n\n", "file_path": "game/src/sandbox/dashboards/summaries.rs", "rank": 38, "score": 359892.0503214207 }, { "content": "fn scatter_plot(ctx: &mut EventCtx, app: &App, filter: &Filter) -> Widget {\n\n if app.has_prebaked().is_none() {\n\n return Widget::nothing();\n\n }\n\n\n\n let points = filter.get_trips(app);\n\n if points.is_empty() {\n\n return Widget::nothing();\n\n }\n\n\n\n CompareTimes::new(\n\n ctx,\n\n format!(\n\n \"Trip time before \\\"{}\\\"\",\n\n app.primary.map.get_edits().edits_name\n\n ),\n\n format!(\n\n \"Trip time after \\\"{}\\\"\",\n\n app.primary.map.get_edits().edits_name\n\n ),\n\n points,\n\n )\n\n .outline(2.0, Color::WHITE)\n\n .padding(10)\n\n}\n\n\n", "file_path": "game/src/sandbox/dashboards/summaries.rs", "rank": 39, "score": 359892.0503214207 }, { "content": "pub fn actions(_: &App, id: ID) -> Vec<(Key, String)> {\n\n match id {\n\n ID::Building(_) => vec![(Key::Z, \"start a trip here\".to_string())],\n\n ID::Intersection(_) => vec![(Key::Z, \"spawn agents here\".to_string())],\n\n _ => Vec::new(),\n\n }\n\n}\n\n\n", "file_path": "game/src/sandbox/gameplay/freeform.rs", "rank": 40, "score": 359639.4327606405 }, { "content": "pub fn area(ctx: &EventCtx, app: &App, _: &mut Details, id: AreaID) -> Vec<Widget> {\n\n let mut rows = vec![];\n\n\n\n rows.push(Widget::row(vec![\n\n Line(id.to_string()).small_heading().draw(ctx),\n\n header_btns(ctx),\n\n ]));\n\n\n\n let a = app.primary.map.get_a(id);\n\n rows.extend(make_table(\n\n ctx,\n\n a.osm_tags\n\n .iter()\n\n .map(|(k, v)| (k.to_string(), v.to_string())),\n\n ));\n\n\n\n rows\n\n}\n", "file_path": "game/src/info/debug.rs", "rank": 41, "score": 356687.8558330941 }, { "content": "fn delays_over_time(ctx: &mut EventCtx, app: &App, id: BusRouteID) -> Widget {\n\n let route = app.primary.map.get_br(id);\n\n let mut delays_per_stop = app\n\n .primary\n\n .sim\n\n .get_analytics()\n\n .bus_arrivals_over_time(app.primary.sim.time(), id);\n\n\n\n let mut series = Vec::new();\n\n for idx1 in 0..route.stops.len() {\n\n let idx2 = if idx1 == route.stops.len() - 1 {\n\n 0\n\n } else {\n\n idx1 + 1\n\n };\n\n series.push(Series {\n\n label: format!(\"Stop {}->{}\", idx1 + 1, idx2 + 1),\n\n color: app.cs.rotating_color_plot(idx1),\n\n pts: delays_per_stop\n\n .remove(&route.stops[idx2])\n\n .unwrap_or_else(Vec::new),\n\n });\n\n }\n\n Widget::col(vec![\n\n Line(\"Delays between stops\").small_heading().draw(ctx),\n\n LinePlot::new(ctx, series, PlotOptions::fixed()),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/bus.rs", "rank": 42, "score": 355927.59744354675 }, { "content": "pub fn info(ctx: &EventCtx, app: &App, details: &mut Details, id: IntersectionID) -> Vec<Widget> {\n\n let mut rows = header(ctx, app, details, id, Tab::IntersectionInfo(id));\n\n let i = app.primary.map.get_i(id);\n\n\n\n let mut txt = Text::from(Line(\"Connecting\"));\n\n let mut road_names = BTreeSet::new();\n\n for r in &i.roads {\n\n road_names.insert(app.primary.map.get_r(*r).get_name());\n\n }\n\n for r in road_names {\n\n // TODO The spacing is ignored, so use -\n\n txt.add(Line(format!(\"- {}\", r)));\n\n }\n\n if app.opts.dev {\n\n txt.add(Line(format!(\"OSM node ID: {}\", i.orig_id.osm_node_id)).secondary());\n\n }\n\n rows.push(txt.draw(ctx));\n\n\n\n rows\n\n}\n\n\n", "file_path": "game/src/info/intersection.rs", "rank": 43, "score": 353025.9108225951 }, { "content": "pub fn info(ctx: &EventCtx, app: &App, details: &mut Details, id: LaneID) -> Vec<Widget> {\n\n let mut rows = header(ctx, app, details, id, Tab::LaneInfo(id));\n\n let map = &app.primary.map;\n\n let l = map.get_l(id);\n\n let r = map.get_r(l.parent);\n\n\n\n let mut kv = Vec::new();\n\n\n\n if !l.is_sidewalk() {\n\n kv.push((\"Type\", l.lane_type.describe().to_string()));\n\n }\n\n if r.zone.is_some() {\n\n // TODO Ideally the area name, and be more specific about access restrictions\n\n kv.push((\"Access\", \"Private\".to_string()));\n\n }\n\n\n\n if l.is_parking() {\n\n kv.push((\n\n \"Parking\",\n\n format!(\n", "file_path": "game/src/info/lane.rs", "rank": 44, "score": 353025.9108225952 }, { "content": "pub fn debug(ctx: &EventCtx, app: &App, details: &mut Details, id: LaneID) -> Vec<Widget> {\n\n let mut rows = header(ctx, app, details, id, Tab::LaneDebug(id));\n\n let map = &app.primary.map;\n\n let l = map.get_l(id);\n\n let r = map.get_r(l.parent);\n\n\n\n let mut kv = Vec::new();\n\n\n\n kv.push((\"Parent\".to_string(), r.id.to_string()));\n\n\n\n if l.is_driving() {\n\n kv.push((\n\n \"Parking blackhole redirect\".to_string(),\n\n format!(\"{:?}\", l.parking_blackhole),\n\n ));\n\n }\n\n\n\n if let Some(types) = l\n\n .get_turn_restrictions(r)\n\n .map(|types| types.collect::<Vec<_>>())\n", "file_path": "game/src/info/lane.rs", "rank": 45, "score": 353025.9108225952 }, { "content": "fn make_changelist(ctx: &mut EventCtx, app: &App) -> Composite {\n\n // TODO Support redo. Bit harder here to reset the redo_stack when the edits\n\n // change, because nested other places modify it too.\n\n let edits = app.primary.map.get_edits();\n\n let mut col = vec![\n\n Widget::row(vec![\n\n Btn::text_fg(format!(\"{} ↓\", &edits.edits_name)).build(\n\n ctx,\n\n \"load edits\",\n\n lctrl(Key::L),\n\n ),\n\n (if edits.commands.is_empty() {\n\n Widget::draw_svg_transform(\n\n ctx,\n\n \"../data/system/assets/tools/save.svg\",\n\n RewriteColor::ChangeAll(Color::WHITE.alpha(0.5)),\n\n )\n\n } else {\n\n Btn::svg_def(\"../data/system/assets/tools/save.svg\").build(\n\n ctx,\n", "file_path": "game/src/edit/mod.rs", "rank": 46, "score": 352746.31594864646 }, { "content": "#[allow(non_snake_case)]\n\npub fn Line<S: Into<String>>(text: S) -> TextSpan {\n\n TextSpan {\n\n text: text.into(),\n\n fg_color: DEFAULT_FG_COLOR,\n\n size: DEFAULT_FONT_SIZE,\n\n font: DEFAULT_FONT,\n\n underlined: false,\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Text {\n\n // The bg_color will cover the entire block, but some lines can have extra highlighting.\n\n lines: Vec<(Option<Color>, Vec<TextSpan>)>,\n\n // TODO Stop using this as much as possible.\n\n bg_color: Option<Color>,\n\n}\n\n\n\nimpl Text {\n\n pub fn new() -> Text {\n", "file_path": "ezgui/src/text.rs", "rank": 47, "score": 352708.8582585716 }, { "content": "pub fn spawn_agents_around(i: IntersectionID, app: &mut App) {\n\n let map = &app.primary.map;\n\n let sim = &mut app.primary.sim;\n\n let mut rng = app.primary.current_flags.sim_flags.make_rng();\n\n let mut spawner = sim.make_spawner();\n\n\n\n if map.all_buildings().is_empty() {\n\n println!(\"No buildings, can't pick destinations\");\n\n return;\n\n }\n\n\n\n let mut timer = Timer::new(format!(\n\n \"spawning agents around {} (rng seed {:?})\",\n\n i, app.primary.current_flags.sim_flags.rng_seed\n\n ));\n\n\n\n let now = sim.time();\n\n for l in &map.get_i(i).incoming_lanes {\n\n let lane = map.get_l(*l);\n\n if lane.is_driving() || lane.is_biking() {\n", "file_path": "game/src/sandbox/gameplay/freeform.rs", "rank": 48, "score": 346982.9512034424 }, { "content": "fn make_top_center(ctx: &mut EventCtx, app: &App) -> Composite {\n\n let rows = vec![\n\n Widget::row(vec![\n\n Line(\"Sandbox\").small_heading().draw(ctx),\n\n Widget::draw_batch(\n\n ctx,\n\n GeomBatch::from(vec![(Color::WHITE, Polygon::rectangle(2.0, 50.0))]),\n\n ),\n\n \"Map:\".draw_text(ctx),\n\n Btn::text_fg(format!(\"{} ↓\", nice_map_name(app.primary.map.get_name()))).build(\n\n ctx,\n\n \"change map\",\n\n lctrl(Key::L),\n\n ),\n\n \"Traffic:\".draw_text(ctx),\n\n Btn::text_fg(\"none ↓\").build(ctx, \"change traffic\", hotkey(Key::S)),\n\n Btn::svg_def(\"../data/system/assets/tools/edit_map.svg\").build(\n\n ctx,\n\n \"edit map\",\n\n lctrl(Key::E),\n", "file_path": "game/src/sandbox/gameplay/freeform.rs", "rank": 49, "score": 344083.3489374056 }, { "content": "pub fn make_crosswalk(batch: &mut GeomBatch, turn: &Turn, map: &Map, cs: &ColorScheme) {\n\n if make_rainbow_crosswalk(batch, turn, map) {\n\n return;\n\n }\n\n\n\n let width = map.get_l(turn.id.src).width;\n\n // Start at least width out to not hit sidewalk corners. Also account for the thickness of the\n\n // crosswalk line itself. Center the lines inside these two boundaries.\n\n let boundary = width;\n\n let tile_every = width * 0.6;\n\n let line = {\n\n // The middle line in the crosswalk geometry is the main crossing line.\n\n let pts = turn.geom.points();\n\n Line::new(pts[1], pts[2])\n\n };\n\n\n\n let available_length = line.length() - (boundary * 2.0);\n\n if available_length > Distance::ZERO {\n\n let num_markings = (available_length / tile_every).floor() as usize;\n\n let mut dist_along =\n", "file_path": "game/src/render/intersection.rs", "rank": 50, "score": 339681.0515371788 }, { "content": "pub fn challenges_picker(ctx: &mut EventCtx, app: &mut App) -> Box<dyn State> {\n\n Tab::NothingChosen.make(ctx, app)\n\n}\n\n\n", "file_path": "game/src/challenges.rs", "rank": 51, "score": 334176.7521298529 }, { "content": "fn make(ctx: &mut EventCtx, app: &App, opts: &Options) -> Composite {\n\n // Only make one pass through prebaked data\n\n let trip_times_before = if app.has_prebaked().is_some() {\n\n let mut times = HashMap::new();\n\n for (_, id, maybe_mode, dt) in &app.prebaked().finished_trips {\n\n if maybe_mode.is_some() {\n\n times.insert(*id, *dt);\n\n }\n\n }\n\n Some(times)\n\n } else {\n\n None\n\n };\n\n\n\n // Gather raw data\n\n let mut data = Vec::new();\n\n let sim = &app.primary.sim;\n\n let mut aborted = 0;\n\n for (_, id, maybe_mode, duration_after) in &sim.get_analytics().finished_trips {\n\n let mode = if let Some(m) = maybe_mode {\n", "file_path": "game/src/sandbox/dashboards/trip_table.rs", "rank": 52, "score": 330433.09336744674 }, { "content": "fn make(ctx: &mut EventCtx, app: &App, opts: &Options) -> Composite {\n\n // Gather raw data\n\n let mut data = Vec::new();\n\n for (id, phases) in app.primary.sim.get_analytics().get_all_trip_phases() {\n\n let (_, start, end, _) = app.primary.sim.trip_info(id);\n\n if !opts.off_map_starts {\n\n if let TripEndpoint::Border(_, _) = start {\n\n continue;\n\n }\n\n }\n\n if !opts.off_map_ends {\n\n if let TripEndpoint::Border(_, _) = end {\n\n continue;\n\n }\n\n }\n\n\n\n let mut total_duration = Duration::ZERO;\n\n let mut driving_duration = Duration::ZERO;\n\n let mut parking_duration = Duration::ZERO;\n\n let mut walking_duration = Duration::ZERO;\n", "file_path": "game/src/sandbox/dashboards/parking_overhead.rs", "rank": 53, "score": 330433.09336744674 }, { "content": "pub fn hotkeys(keys: Vec<Key>) -> Option<MultiKey> {\n\n Some(MultiKey::Any(keys))\n\n}\n", "file_path": "ezgui/src/event.rs", "rank": 54, "score": 325506.27748654084 }, { "content": "fn make_elevation(ctx: &EventCtx, color: Color, walking: bool, path: &Path, map: &Map) -> Widget {\n\n let mut pts: Vec<(Distance, Distance)> = Vec::new();\n\n let mut dist = Distance::ZERO;\n\n for step in path.get_steps() {\n\n if let PathStep::Turn(t) = step {\n\n pts.push((dist, map.get_i(t.parent).elevation));\n\n }\n\n dist += step.as_traversable().length(map);\n\n }\n\n // TODO Plot needs to support Distance as both X and Y axis. :P\n\n // TODO Show roughly where we are in the trip; use distance covered by current path for this\n\n LinePlot::new(\n\n ctx,\n\n vec![Series {\n\n label: if walking {\n\n \"Elevation for walking\"\n\n } else {\n\n \"Elevation for biking\"\n\n }\n\n .to_string(),\n", "file_path": "game/src/info/trip.rs", "rank": 55, "score": 324450.6916012812 }, { "content": "fn challenge_header(ctx: &mut EventCtx, title: &str) -> Widget {\n\n Widget::row(vec![\n\n Line(title).small_heading().draw(ctx).centered_vert(),\n\n Btn::svg_def(\"../data/system/assets/tools/info.svg\")\n\n .build(ctx, \"instructions\", None)\n\n .centered_vert(),\n\n Widget::draw_batch(\n\n ctx,\n\n GeomBatch::from(vec![(Color::WHITE, Polygon::rectangle(2.0, 50.0))]),\n\n ),\n\n Btn::svg_def(\"../data/system/assets/tools/edit_map.svg\")\n\n .build(ctx, \"edit map\", lctrl(Key::E))\n\n .centered_vert(),\n\n ])\n\n .padding(5)\n\n}\n\n\n\npub struct FinalScore {\n\n composite: Composite,\n\n retry: GameplayMode,\n", "file_path": "game/src/sandbox/gameplay/mod.rs", "rank": 56, "score": 323976.1014453159 }, { "content": "fn make_load_edits(app: &App, mode: GameplayMode) -> Box<dyn State> {\n\n let current_edits_name = app.primary.map.get_edits().edits_name.clone();\n\n\n\n WizardState::new(Box::new(move |wiz, ctx, app| {\n\n let (_, new_edits) = wiz.wrap(ctx).choose(\"Load which edits?\", || {\n\n let mut list = Choice::from(\n\n abstutil::load_all_objects(abstutil::path_all_edits(app.primary.map.get_name()))\n\n .into_iter()\n\n .chain(abstutil::load_all_objects::<PermanentMapEdits>(\n\n \"../data/system/proposals\".to_string(),\n\n ))\n\n .filter_map(|(path, perma)| {\n\n PermanentMapEdits::from_permanent(perma, &app.primary.map)\n\n .map(|edits| (path, edits))\n\n .ok()\n\n })\n\n .filter(|(_, edits)| {\n\n mode.allows(edits) && edits.edits_name != current_edits_name\n\n })\n\n .collect(),\n\n );\n\n list.push(Choice::new(\"start over with blank edits\", MapEdits::new()));\n\n list\n\n })?;\n\n apply_map_edits(ctx, app, new_edits);\n\n Some(Transition::Pop)\n\n }))\n\n}\n\n\n", "file_path": "game/src/edit/mod.rs", "rank": 57, "score": 322618.5798339637 }, { "content": "pub fn make_top_panel(ctx: &mut EventCtx, app: &App, can_undo: bool, can_redo: bool) -> Composite {\n\n let row = vec![\n\n Btn::text_fg(\"Finish\").build_def(ctx, hotkey(Key::Escape)),\n\n Btn::text_fg(\"Preview\").build_def(ctx, lctrl(Key::P)),\n\n (if can_undo {\n\n Btn::svg_def(\"../data/system/assets/tools/undo.svg\").build(ctx, \"undo\", lctrl(Key::Z))\n\n } else {\n\n Widget::draw_svg_transform(\n\n ctx,\n\n \"../data/system/assets/tools/undo.svg\",\n\n RewriteColor::ChangeAll(Color::WHITE.alpha(0.5)),\n\n )\n\n })\n\n .centered_vert(),\n\n (if can_redo {\n\n Btn::svg_def(\"../data/system/assets/tools/redo.svg\").build(\n\n ctx,\n\n \"redo\",\n\n // TODO ctrl+shift+Z!\n\n lctrl(Key::Y),\n", "file_path": "game/src/edit/traffic_signals.rs", "rank": 58, "score": 321843.9120822147 }, { "content": "pub fn color_for_trip_phase(app: &App, tpt: TripPhaseType) -> Color {\n\n match tpt {\n\n TripPhaseType::Driving => app.cs.unzoomed_car,\n\n TripPhaseType::Walking => app.cs.unzoomed_pedestrian,\n\n TripPhaseType::Biking => app.cs.bike_lane,\n\n TripPhaseType::Parking => app.cs.parking_trip,\n\n TripPhaseType::WaitingForBus(_, _) => app.cs.bus_layer,\n\n TripPhaseType::RidingBus(_, _, _) => app.cs.bus_lane,\n\n TripPhaseType::Aborted | TripPhaseType::Finished => unreachable!(),\n\n TripPhaseType::DelayedStart => Color::YELLOW,\n\n TripPhaseType::Remote => Color::PINK,\n\n }\n\n}\n\n\n", "file_path": "game/src/helpers.rs", "rank": 59, "score": 321305.62840239645 }, { "content": "// TODO This needs to update turn restrictions too\n\npub fn clip_map(map: &mut RawMap, timer: &mut Timer) {\n\n timer.start(\"clipping map to boundary\");\n\n\n\n // So we can use retain_btreemap without borrowing issues\n\n let boundary_polygon = map.boundary_polygon.clone();\n\n let boundary_ring = Ring::new(boundary_polygon.points().clone());\n\n\n\n // This is kind of indirect and slow, but first pass -- just remove roads that start or end\n\n // outside the boundary polygon.\n\n retain_btreemap(&mut map.roads, |_, r| {\n\n let first_in = boundary_polygon.contains_pt(r.center_points[0]);\n\n let last_in = boundary_polygon.contains_pt(*r.center_points.last().unwrap());\n\n first_in || last_in || r.is_light_rail()\n\n });\n\n\n\n // First pass: Clip roads beginning out of bounds\n\n let road_ids: Vec<OriginalRoad> = map.roads.keys().cloned().collect();\n\n for id in road_ids {\n\n let r = &map.roads[&id];\n\n if map.boundary_polygon.contains_pt(r.center_points[0]) {\n", "file_path": "convert_osm/src/clip.rs", "rank": 60, "score": 320916.70708053024 }, { "content": "fn make_controls(ctx: &mut EventCtx) -> Composite {\n\n Composite::new(\n\n Widget::col(vec![\n\n Text::from_multiline(vec![\n\n Line(\"ezgui demo\").small_heading(),\n\n Line(\"Click and drag to pan, use touchpad or scroll wheel to zoom\"),\n\n ])\n\n .draw(ctx),\n\n Widget::row(vec![\n\n // This just cycles between two arbitrary buttons\n\n Checkbox::new(\n\n false,\n\n Btn::text_bg1(\"Pause\").build(ctx, \"pause the stopwatch\", hotkey(Key::Space)),\n\n Btn::text_bg1(\"Resume\").build(ctx, \"resume the stopwatch\", hotkey(Key::Space)),\n\n )\n\n .named(\"paused\"),\n\n Btn::text_fg(\"Reset timer\").build(ctx, \"reset the stopwatch\", None),\n\n Btn::text_fg(\"New faces\").build(ctx, \"generate new faces\", hotkey(Key::F)),\n\n Checkbox::text(ctx, \"Draw scrollable canvas\", None, true),\n\n Checkbox::text(ctx, \"Show timeseries\", lctrl(Key::T), false),\n", "file_path": "ezgui/examples/demo.rs", "rank": 61, "score": 320265.3644429615 }, { "content": "fn passenger_delay(ctx: &mut EventCtx, app: &App, details: &mut Details, id: BusRouteID) -> Widget {\n\n let route = app.primary.map.get_br(id);\n\n let mut master_col = vec![Line(\"Passengers waiting\").small_heading().draw(ctx)];\n\n let mut col = Vec::new();\n\n\n\n let mut delay_per_stop = app\n\n .primary\n\n .sim\n\n .get_analytics()\n\n .bus_passenger_delays(app.primary.sim.time(), id)\n\n .collect::<BTreeMap<_, _>>();\n\n for idx in 0..route.stops.len() {\n\n col.push(Widget::row(vec![\n\n format!(\"Stop {}\", idx + 1).draw_text(ctx),\n\n Btn::svg(\n\n \"../data/system/assets/tools/pin.svg\",\n\n RewriteColor::Change(Color::hex(\"#CC4121\"), app.cs.hovering),\n\n )\n\n .build(ctx, format!(\"Stop {}\", idx + 1), None),\n\n if let Some(hgram) = delay_per_stop.remove(&route.stops[idx]) {\n", "file_path": "game/src/info/bus.rs", "rank": 62, "score": 318898.77191691974 }, { "content": "fn preview_route(g: &mut GfxCtx, app: &App, trip: TripID) -> GeomBatch {\n\n let mut batch = GeomBatch::new();\n\n for p in app\n\n .primary\n\n .sim\n\n .get_analytics()\n\n .get_trip_phases(trip, &app.primary.map)\n\n {\n\n if let Some((dist, ref path)) = p.path {\n\n if let Some(trace) = path.trace(&app.primary.map, dist, None) {\n\n batch.push(\n\n color_for_trip_phase(app, p.phase_type),\n\n trace.make_polygons(Distance::meters(20.0)),\n\n );\n\n }\n\n }\n\n }\n\n\n\n let (_, start, end, _) = app.primary.sim.trip_info(trip);\n\n batch.append(\n", "file_path": "game/src/sandbox/dashboards/trip_table.rs", "rank": 63, "score": 315874.45414360793 }, { "content": "pub fn adjust_private_parking(map: &mut Map, scenario: &Scenario) {\n\n for (b, count) in scenario.count_parked_cars_per_bldg().consume() {\n\n map.hack_override_offstreet_spots_individ(b, count);\n\n }\n\n map.save();\n\n}\n", "file_path": "importer/src/seattle.rs", "rank": 64, "score": 312953.8604868052 }, { "content": "fn make_panel(ctx: &mut EventCtx, app: &App) -> Composite {\n\n Composite::new(\n\n Widget::col(vec![\n\n Widget::row(vec![\n\n Line(\"Geometry debugger\").small_heading().draw(ctx),\n\n Btn::text_fg(\"X\")\n\n .build(ctx, \"close\", hotkey(Key::Escape))\n\n .align_right(),\n\n ]),\n\n Widget::row(vec![\n\n // TODO inactive\n\n Btn::text_fg(\"<\").build(ctx, \"previous\", hotkey(Key::LeftArrow)),\n\n \"noun X/Y\".draw_text(ctx).named(\"pointer\"),\n\n Btn::text_fg(\">\").build(ctx, \"next\", hotkey(Key::RightArrow)),\n\n ])\n\n .evenly_spaced(),\n\n Slider::horizontal(ctx, 100.0, 25.0, 0.0)\n\n .named(\"slider\")\n\n .centered_horiz(),\n\n ])\n\n .bg(app.cs.panel_bg)\n\n .padding(16),\n\n )\n\n .aligned(HorizontalAlignment::Center, VerticalAlignment::Top)\n\n .build(ctx)\n\n}\n", "file_path": "game/src/debug/polygons.rs", "rank": 65, "score": 311816.8171956679 }, { "content": "pub fn remove_disconnected_roads(map: &mut RawMap, timer: &mut Timer) {\n\n timer.start(\"removing disconnected roads\");\n\n // This is a simple floodfill, not Tarjan's. Assumes all roads bidirectional.\n\n // All the usizes are indices into the original list of roads\n\n\n\n let mut next_roads: MultiMap<OriginalIntersection, OriginalRoad> = MultiMap::new();\n\n for id in map.roads.keys() {\n\n next_roads.insert(id.i1, *id);\n\n next_roads.insert(id.i2, *id);\n\n }\n\n\n\n let mut partitions: Vec<Vec<OriginalRoad>> = Vec::new();\n\n let mut unvisited_roads: BTreeSet<OriginalRoad> = map\n\n .roads\n\n .iter()\n\n .filter_map(|(id, r)| if r.is_light_rail() { None } else { Some(*id) })\n\n .collect();\n\n\n\n while !unvisited_roads.is_empty() {\n\n let mut queue_roads: Vec<OriginalRoad> = vec![*unvisited_roads.iter().next().unwrap()];\n", "file_path": "map_model/src/make/remove_disconnected.rs", "rank": 66, "score": 311659.54514598905 }, { "content": "fn calc_all_routes(ctx: &EventCtx, app: &mut App) -> (usize, Drawable) {\n\n let agents = app.primary.sim.active_agents();\n\n let mut batch = GeomBatch::new();\n\n let mut cnt = 0;\n\n let sim = &app.primary.sim;\n\n let map = &app.primary.map;\n\n for maybe_trace in\n\n Timer::new(\"calculate all routes\").parallelize(\"route to geometry\", agents, |id| {\n\n sim.trace_route(id, map, None)\n\n .map(|trace| trace.make_polygons(NORMAL_LANE_THICKNESS))\n\n })\n\n {\n\n if let Some(t) = maybe_trace {\n\n cnt += 1;\n\n batch.push(app.cs.route, t);\n\n }\n\n }\n\n (cnt, ctx.upload(batch))\n\n}\n\n\n", "file_path": "game/src/debug/mod.rs", "rank": 67, "score": 302854.0157701216 }, { "content": "pub fn fix_bus_route(map: &Map, r: &mut BusRoute) -> bool {\n\n // Trim out stops if needed; map borders sometimes mean some paths don't work.\n\n let mut stops = Vec::new();\n\n for stop in r.stops.drain(..) {\n\n if stops.is_empty() {\n\n stops.push(stop);\n\n } else {\n\n if check_stops(*stops.last().unwrap(), stop, map) {\n\n stops.push(stop);\n\n }\n\n }\n\n }\n\n // Don't forget the last and first\n\n while stops.len() >= 2 {\n\n if check_stops(*stops.last().unwrap(), stops[0], map) {\n\n break;\n\n }\n\n // TODO Or the front one\n\n stops.pop();\n\n }\n\n r.stops = stops;\n\n r.stops.len() >= 2\n\n}\n\n\n", "file_path": "map_model/src/make/bus_stops.rs", "rank": 68, "score": 300981.1237003909 }, { "content": "// TODO Can we automatically transform text and SVG colors?\n\nfn cutscene_pt1_task(ctx: &mut EventCtx) -> Widget {\n\n Widget::custom_col(vec![\n\n Text::from_multiline(vec![\n\n Line(format!(\n\n \"Don't let anyone be delayed by one traffic signal more than {}!\",\n\n THRESHOLD\n\n ))\n\n .fg(Color::BLACK),\n\n Line(\"Survive as long as possible through 24 hours of a busy weekday.\")\n\n .fg(Color::BLACK),\n\n ])\n\n .draw(ctx)\n\n .margin_below(30),\n\n Widget::custom_row(vec![\n\n Widget::col(vec![\n\n Line(\"Time\").fg(Color::BLACK).draw(ctx),\n\n Widget::draw_svg_transform(\n\n ctx,\n\n \"../data/system/assets/tools/time.svg\",\n\n RewriteColor::ChangeAll(Color::BLACK),\n", "file_path": "game/src/sandbox/gameplay/fix_traffic_signals.rs", "rank": 69, "score": 292308.23755289137 }, { "content": "fn intro_story(ctx: &mut EventCtx, app: &App) -> Box<dyn State> {\n\n CutsceneBuilder::new(\"Introduction\")\n\n .boss(\n\n \"Argh, the mayor's on my case again about the West Seattle bridge. This day couldn't \\\n\n get any worse.\",\n\n )\n\n .player(\"Er, hello? Boss? I'm --\")\n\n .boss(\"Yet somehow it did.. You're the new recruit. Yeah, yeah. Come in.\")\n\n .boss(\n\n \"Due to budget cuts, we couldn't hire a real traffic engineer, so we just called some \\\n\n know-it-all from Reddit who seems to think they can fix Seattle traffic.\",\n\n )\n\n .player(\"Yes, hi, my name is --\")\n\n .boss(\"We can't afford name-tags, didn't you hear, budget cuts? Your name doesn't matter.\")\n\n .player(\"What about my Insta handle?\")\n\n .boss(\"-glare-\")\n\n .boss(\n\n \"Look, you think fixing traffic is easy? Hah! You can't fix one intersection without \\\n\n breaking ten more.\",\n\n )\n", "file_path": "game/src/sandbox/gameplay/tutorial.rs", "rank": 70, "score": 291615.4696956923 }, { "content": "// Returns list of (driving lane, redirect here instead for parking)\n\n//\n\n// It's a bit weird to never attempt parking on roads not part of the largest SCC of the graph.\n\n// This is acceptable, because there shouldn't be too many roads outside of that SCC anyway.\n\npub fn redirect_parking_blackholes(map: &Map, timer: &mut Timer) -> Vec<(LaneID, LaneID)> {\n\n let (largest_group, disconnected) = find_scc(map, PathConstraints::Car);\n\n\n\n let mut redirects = Vec::new();\n\n timer.start_iter(\"find parking blackhole redirects\", disconnected.len());\n\n for l in disconnected {\n\n timer.next();\n\n\n\n // Search forwards and backwards for the nearest driving lane belonging to largest_group.\n\n if let Some(redirect) = bidi_flood(map, l, &largest_group) {\n\n redirects.push((l, redirect));\n\n } else {\n\n // TODO Make this an error after dealing with places like Austin without much parking\n\n // in the first place.\n\n timer.warn(format!(\n\n \"{} is a parking blackhole with no reasonable redirect!\",\n\n l\n\n ));\n\n }\n\n }\n\n timer.note(format!(\n\n \"{} driving lanes are parking blackholes\",\n\n redirects.len()\n\n ));\n\n redirects\n\n}\n\n\n", "file_path": "map_model/src/connectivity.rs", "rank": 71, "score": 286120.34604798106 }, { "content": "fn header(ctx: &EventCtx, app: &App, details: &mut Details, id: LaneID, tab: Tab) -> Vec<Widget> {\n\n let mut rows = vec![];\n\n\n\n let map = &app.primary.map;\n\n let l = map.get_l(id);\n\n let r = map.get_r(l.parent);\n\n\n\n let label = if l.is_sidewalk() { \"Sidewalk\" } else { \"Lane\" };\n\n rows.push(Widget::row(vec![\n\n Line(format!(\"{} #{}\", label, id.0))\n\n .small_heading()\n\n .draw(ctx),\n\n header_btns(ctx),\n\n ]));\n\n rows.push(format!(\"@ {}\", r.get_name()).draw_text(ctx));\n\n\n\n let mut tabs = vec![(\"Info\", Tab::LaneInfo(id))];\n\n if !l.is_parking() {\n\n tabs.push((\"Traffic\", Tab::LaneTraffic(id, DataOptions::new())));\n\n }\n\n if app.opts.dev {\n\n tabs.push((\"Debug\", Tab::LaneDebug(id)));\n\n }\n\n rows.push(make_tabs(ctx, &mut details.hyperlinks, tab, tabs));\n\n\n\n rows\n\n}\n", "file_path": "game/src/info/lane.rs", "rank": 72, "score": 285501.12310552225 }, { "content": "// TODO Temporarily public for debugging.\n\n// TODO This should just draw the turn geometry thickened, once that's stable.\n\npub fn calculate_corners(i: &Intersection, map: &Map, timer: &mut Timer) -> Vec<Polygon> {\n\n let mut corners = Vec::new();\n\n\n\n for turn in map.get_turns_in_intersection(i.id) {\n\n if turn.turn_type == TurnType::SharedSidewalkCorner {\n\n // Avoid double-rendering\n\n if map.get_l(turn.id.src).dst_i != i.id {\n\n continue;\n\n }\n\n let width = map.get_l(turn.id.src).width;\n\n\n\n // Special case for dead-ends: just thicken the geometry.\n\n if i.roads.len() == 1 {\n\n corners.push(turn.geom.make_polygons(width));\n\n continue;\n\n }\n\n\n\n let l1 = map.get_l(turn.id.src);\n\n let l2 = map.get_l(turn.id.dst);\n\n\n", "file_path": "game/src/render/intersection.rs", "rank": 73, "score": 284444.34366199624 }, { "content": "pub fn make_legend<T: Yvalue<T>>(\n\n ctx: &EventCtx,\n\n series: &Vec<Series<T>>,\n\n opts: &PlotOptions<T>,\n\n) -> Widget {\n\n let mut row = Vec::new();\n\n let mut seen = HashSet::new();\n\n for s in series {\n\n if seen.contains(&s.label) {\n\n continue;\n\n }\n\n seen.insert(s.label.clone());\n\n if opts.filterable {\n\n row.push(Widget::row(vec![\n\n Checkbox::colored(ctx, &s.label, s.color, !opts.disabled.contains(&s.label)),\n\n Line(&s.label).draw(ctx),\n\n ]));\n\n } else {\n\n let radius = 15.0;\n\n row.push(Widget::row(vec![\n", "file_path": "ezgui/src/widgets/line_plot.rs", "rank": 74, "score": 278558.2227366552 }, { "content": "fn use_elevation(map: &mut RawMap, path: &str, timer: &mut Timer) {\n\n timer.start(\"apply elevation data to intersections\");\n\n let elevation = srtm::Elevation::load(path).unwrap();\n\n for i in map.intersections.values_mut() {\n\n i.elevation = elevation.get(i.point.forcibly_to_gps(&map.gps_bounds));\n\n }\n\n timer.stop(\"apply elevation data to intersections\");\n\n}\n", "file_path": "convert_osm/src/lib.rs", "rank": 75, "score": 278175.1219996932 }, { "content": "pub fn list_names<F: Fn(TextSpan) -> TextSpan>(txt: &mut Text, styler: F, names: BTreeSet<String>) {\n\n let len = names.len();\n\n for (idx, n) in names.into_iter().enumerate() {\n\n if idx != 0 {\n\n if idx == len - 1 {\n\n if len == 2 {\n\n txt.append(Line(\" and \"));\n\n } else {\n\n txt.append(Line(\", and \"));\n\n }\n\n } else {\n\n txt.append(Line(\", \"));\n\n }\n\n }\n\n txt.append(styler(Line(n)));\n\n }\n\n}\n\n\n", "file_path": "game/src/helpers.rs", "rank": 76, "score": 275032.3305796656 }, { "content": "fn use_sidewalk_hints(map: &mut RawMap, path: String, timer: &mut Timer) {\n\n timer.start(\"apply sidewalk hints\");\n\n let shapes: ExtraShapes = abstutil::read_binary(path, timer);\n\n\n\n // Match shapes with the nearest road + direction (true for forwards)\n\n let mut closest: FindClosest<(OriginalRoad, bool)> =\n\n FindClosest::new(&map.gps_bounds.to_bounds());\n\n for (id, r) in &map.roads {\n\n if r.is_light_rail() {\n\n continue;\n\n }\n\n let center = PolyLine::new(r.center_points.clone());\n\n closest.add(\n\n (*id, true),\n\n map.driving_side\n\n .right_shift(center.clone(), DIRECTED_ROAD_THICKNESS)\n\n .get(timer)\n\n .points(),\n\n );\n\n closest.add(\n", "file_path": "convert_osm/src/lib.rs", "rank": 77, "score": 274977.9206873775 }, { "content": "fn use_parking_hints(map: &mut RawMap, path: String, timer: &mut Timer) {\n\n timer.start(\"apply parking hints\");\n\n let shapes: ExtraShapes = abstutil::read_binary(path, timer);\n\n\n\n // Match shapes with the nearest road + direction (true for forwards)\n\n let mut closest: FindClosest<(OriginalRoad, bool)> =\n\n FindClosest::new(&map.gps_bounds.to_bounds());\n\n for (id, r) in &map.roads {\n\n if r.is_light_rail() {\n\n continue;\n\n }\n\n let center = PolyLine::new(r.center_points.clone());\n\n closest.add(\n\n (*id, true),\n\n map.driving_side\n\n .right_shift(center.clone(), DIRECTED_ROAD_THICKNESS)\n\n .get(timer)\n\n .points(),\n\n );\n\n closest.add(\n", "file_path": "convert_osm/src/lib.rs", "rank": 78, "score": 274977.9206873775 }, { "content": "fn use_offstreet_parking(map: &mut RawMap, path: String, timer: &mut Timer) {\n\n timer.start(\"match offstreet parking points\");\n\n let shapes: ExtraShapes = abstutil::read_binary(path, timer);\n\n\n\n let mut closest: FindClosest<OriginalBuilding> = FindClosest::new(&map.gps_bounds.to_bounds());\n\n for (id, b) in &map.buildings {\n\n closest.add(*id, b.polygon.points());\n\n }\n\n\n\n // TODO Another function just to use ?. Try blocks would rock.\n\n let mut handle_shape: Box<dyn FnMut(kml::ExtraShape) -> Option<()>> = Box::new(|s| {\n\n assert_eq!(s.points.len(), 1);\n\n let pt = Pt2D::from_gps(s.points[0], &map.gps_bounds)?;\n\n let (id, _) = closest.closest_pt(pt, Distance::meters(50.0))?;\n\n // TODO Handle parking lots.\n\n if !map.buildings[&id].polygon.contains_pt(pt) {\n\n return None;\n\n }\n\n let name = s.attributes.get(\"DEA_FACILITY_NAME\")?.to_string();\n\n let num_stalls = s.attributes.get(\"DEA_STALLS\")?.parse::<usize>().ok()?;\n", "file_path": "convert_osm/src/lib.rs", "rank": 79, "score": 274977.9206873775 }, { "content": "// Converts a RawMap to a Map.\n\npub fn raw_to_map(name: &str, build_ch: bool, timer: &mut Timer) -> map_model::Map {\n\n timer.start(format!(\"Raw->Map for {}\", name));\n\n let raw: map_model::raw::RawMap = abstutil::read_binary(abstutil::path_raw_map(name), timer);\n\n let map = map_model::Map::create_from_raw(raw, build_ch, timer);\n\n timer.start(\"save map\");\n\n map.save();\n\n timer.stop(\"save map\");\n\n timer.stop(format!(\"Raw->Map for {}\", name));\n\n\n\n // TODO Just sticking this here for now\n\n if map.get_name() == \"huge_seattle\" {\n\n timer.start(\"generating city manifest\");\n\n abstutil::write_binary(\n\n format!(\"../data/system/cities/{}.bin\", map.get_city_name()),\n\n &map_model::City::new(&map),\n\n );\n\n timer.stop(\"generating city manifest\");\n\n }\n\n\n\n map\n\n}\n", "file_path": "importer/src/utils.rs", "rank": 80, "score": 274076.54015765956 }, { "content": "// TODO Lots of false positives here... why?\n\nfn find_overlapping_stuff(app: &App, timer: &mut Timer) -> Vec<Polygon> {\n\n let map = &app.primary.map;\n\n let mut closest: FindClosest<RoadID> = FindClosest::new(map.get_bounds());\n\n for r in map.all_roads() {\n\n if r.osm_tags.contains_key(\"tunnel\") {\n\n continue;\n\n }\n\n closest.add(r.id, r.center_pts.points());\n\n }\n\n\n\n let mut polygons = Vec::new();\n\n\n\n timer.start_iter(\"check buildings\", map.all_buildings().len());\n\n for b in map.all_buildings() {\n\n timer.next();\n\n for (r, _, _) in closest.all_close_pts(b.label_center, Distance::meters(500.0)) {\n\n if !b\n\n .polygon\n\n .intersection(&map.get_r(r).get_thick_polygon(map).unwrap())\n\n .is_empty()\n", "file_path": "game/src/devtools/mapping.rs", "rank": 81, "score": 271480.1557325693 }, { "content": "pub fn draw_occupants(details: &mut Details, app: &App, id: BuildingID, focus: Option<PersonID>) {\n\n // TODO Lots of fun ideas here. Have a deterministic simulation based on building ID and time\n\n // to have people \"realistically\" move around. Draw little floor plans.\n\n\n\n let mut ppl = app.primary.sim.bldg_to_people(id);\n\n let num_rows_cols = (ppl.len() as f64).sqrt().ceil() as usize;\n\n\n\n let ped_len = SIDEWALK_THICKNESS.inner_meters() / 2.0;\n\n let separation = ped_len * 1.5;\n\n\n\n let total_width_height = (num_rows_cols as f64) * (ped_len + separation);\n\n let top_left = app\n\n .primary\n\n .map\n\n .get_b(id)\n\n .label_center\n\n .offset(-total_width_height / 2.0, -total_width_height / 2.0);\n\n\n\n // TODO Current thing is inefficient and can easily wind up outside the building.\n\n\n", "file_path": "game/src/info/building.rs", "rank": 82, "score": 269457.48373173113 }, { "content": "/// Create a new widget by implementing this trait. You can instantiate your widget by calling\n\n/// `Widget::new(Box::new(instance of your new widget))`, which gives you the usual style options.\n\npub trait WidgetImpl: downcast_rs::Downcast {\n\n /// What width and height does the widget occupy? If this changes, be sure to set\n\n /// `redo_layout` to true in `event`.\n\n fn get_dims(&self) -> ScreenDims;\n\n /// Your widget's top left corner should be here. Handle mouse events and draw appropriately.\n\n fn set_pos(&mut self, top_left: ScreenPt);\n\n /// Your chance to react to an event. Any side effects outside of this widget are communicated\n\n /// through the output.\n\n fn event(&mut self, ctx: &mut EventCtx, output: &mut WidgetOutput);\n\n /// Draw the widget. Be sure to draw relative to the top-left specified by `set_pos`.\n\n fn draw(&self, g: &mut GfxCtx);\n\n /// If a new Composite is being created to replace an older one, all widgets have the chance to\n\n /// preserve state from the previous version.\n\n fn can_restore(&self) -> bool {\n\n false\n\n }\n\n /// Restore state from the previous version of this widget, with the same ID. Implementors must\n\n /// downcast.\n\n fn restore(&mut self, _ctx: &mut EventCtx, _prev: &Box<dyn WidgetImpl>) {\n\n unreachable!()\n", "file_path": "ezgui/src/widgets/mod.rs", "rank": 83, "score": 268610.65018706897 }, { "content": "fn edit_entire_signal(app: &App, i: IntersectionID, mode: GameplayMode) -> Box<dyn State> {\n\n let has_sidewalks = app\n\n .primary\n\n .map\n\n .get_turns_in_intersection(i)\n\n .any(|t| t.between_sidewalks());\n\n let current_offset = app.primary.map.get_traffic_signal(i).offset;\n\n\n\n WizardState::new(Box::new(move |wiz, ctx, app| {\n\n let use_template = \"use template\";\n\n let all_walk = \"add an all-walk phase at the end\";\n\n let stop_sign = \"convert to stop signs\";\n\n let close = \"close intersection for construction\";\n\n let offset = \"edit signal offset\";\n\n let reset = \"reset to default\";\n\n\n\n let mut choices = vec![use_template];\n\n if has_sidewalks {\n\n choices.push(all_walk);\n\n }\n", "file_path": "game/src/edit/traffic_signals.rs", "rank": 84, "score": 267509.995537706 }, { "content": "fn pos(endpt: TripEndpoint, mode: TripMode, from: bool, map: &Map) -> Option<Position> {\n\n match endpt {\n\n TripEndpoint::Bldg(b) => match mode {\n\n TripMode::Walk | TripMode::Transit => Some(map.get_b(b).front_path.sidewalk),\n\n TripMode::Bike => Some(DrivingGoal::ParkNear(b).goal_pos(PathConstraints::Bike, map)),\n\n TripMode::Drive => Some(DrivingGoal::ParkNear(b).goal_pos(PathConstraints::Car, map)),\n\n },\n\n TripEndpoint::Border(i, _) => match mode {\n\n TripMode::Walk | TripMode::Transit => if from {\n\n SidewalkSpot::start_at_border(i, None, map)\n\n } else {\n\n SidewalkSpot::end_at_border(i, None, map)\n\n }\n\n .map(|spot| spot.sidewalk_pos),\n\n TripMode::Bike | TripMode::Drive => (if from {\n\n map.get_i(i).some_outgoing_road(map)\n\n } else {\n\n map.get_i(i).some_incoming_road(map)\n\n })\n\n .and_then(|dr| {\n", "file_path": "game/src/sandbox/gameplay/freeform.rs", "rank": 85, "score": 266409.79222114745 }, { "content": "fn current_status(ctx: &EventCtx, person: &Person, map: &Map) -> Widget {\n\n (match person.state {\n\n PersonState::Inside(b) => {\n\n // TODO hyperlink\n\n format!(\"Currently inside {}\", map.get_b(b).address).draw_text(ctx)\n\n }\n\n PersonState::Trip(_) => unreachable!(),\n\n PersonState::OffMap => \"Currently outside the map boundaries\".draw_text(ctx),\n\n })\n\n .margin_vert(16)\n\n}\n\n\n", "file_path": "game/src/info/person.rs", "rank": 86, "score": 262143.66425531855 }, { "content": "fn prebake(map: &Map, scenario: Scenario, time_limit: Option<Duration>, timer: &mut Timer) {\n\n timer.start(format!(\n\n \"prebake for {} / {}\",\n\n scenario.map_name, scenario.scenario_name\n\n ));\n\n\n\n let mut opts = SimOptions::new(\"prebaked\");\n\n opts.alerts = AlertHandler::Silence;\n\n let mut sim = Sim::new(&map, opts, timer);\n\n // Bit of an abuse of this, but just need to fix the rng seed.\n\n let mut rng = SimFlags::for_test(\"prebaked\").make_rng();\n\n scenario.instantiate(&mut sim, &map, &mut rng, timer);\n\n if let Some(dt) = time_limit {\n\n sim.timed_step(&map, dt, &mut None, timer);\n\n } else {\n\n sim.timed_step(\n\n &map,\n\n sim.get_end_of_day() - Time::START_OF_DAY,\n\n &mut None,\n\n timer,\n", "file_path": "game/src/challenges.rs", "rank": 87, "score": 258333.2823624977 }, { "content": "fn use_amenities(map: &mut RawMap, amenities: Vec<(Pt2D, String, String)>, timer: &mut Timer) {\n\n let mut closest: FindClosest<OriginalBuilding> = FindClosest::new(&map.gps_bounds.to_bounds());\n\n for (id, b) in &map.buildings {\n\n closest.add(*id, b.polygon.points());\n\n }\n\n\n\n timer.start_iter(\"match building amenities\", amenities.len());\n\n for (pt, name, amenity) in amenities {\n\n timer.next();\n\n if let Some((id, _)) = closest.closest_pt(pt, Distance::meters(50.0)) {\n\n let b = map.buildings.get_mut(&id).unwrap();\n\n if b.polygon.contains_pt(pt) {\n\n b.amenities.insert((name, amenity));\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "convert_osm/src/lib.rs", "rank": 88, "score": 257093.43947252806 }, { "content": "fn make_rainbow_crosswalk(batch: &mut GeomBatch, turn: &Turn, map: &Map) -> bool {\n\n // TODO The crosswalks aren't tagged in OSM yet. Manually hardcoding some now.\n\n let node = map.get_i(turn.id.parent).orig_id.osm_node_id;\n\n let way = map.get_parent(turn.id.src).orig_id.osm_way_id;\n\n match (node, way) {\n\n // Broadway and Pine\n\n (53073255, 428246441) |\n\n (53073255, 332601014) |\n\n // Broadway and Pike\n\n (53073254, 6447455) |\n\n (53073254, 607690679) |\n\n // 10th and Pine\n\n (53168934, 6456052) |\n\n // 10th and Pike\n\n (53200834, 6456052) |\n\n // 11th and Pine\n\n (53068795, 607691081) |\n\n (53068795, 65588105) |\n\n // 11th and Pike\n\n (53068794, 65588105) => {}\n", "file_path": "game/src/render/intersection.rs", "rank": 89, "score": 252513.45298807125 }, { "content": "pub fn run<G: 'static + GUI, F: FnOnce(&mut EventCtx) -> G>(settings: Settings, make_gui: F) -> ! {\n\n let (prerender_innards, event_loop, window_size) =\n\n crate::backend::setup(&settings.window_title);\n\n\n\n let mut canvas = Canvas::new(window_size.width, window_size.height);\n\n prerender_innards.window_resized(canvas.window_width, canvas.window_height);\n\n if let Some(ref path) = settings.window_icon {\n\n let image = image::open(path).unwrap();\n\n let (width, height) = image.dimensions();\n\n let mut rgba = Vec::with_capacity((width * height) as usize * 4);\n\n for (_, _, pixel) in image.pixels() {\n\n rgba.extend_from_slice(&pixel.to_rgba().0);\n\n }\n\n let icon = Icon::from_rgba(rgba, width, height).unwrap();\n\n prerender_innards.set_window_icon(icon);\n\n }\n\n let prerender = Prerender {\n\n assets: Assets::new(\n\n settings.default_font_size,\n\n settings.font_dir,\n", "file_path": "ezgui/src/runner.rs", "rank": 91, "score": 251167.2776317821 }, { "content": "pub fn find_bridges(roads: &mut Vec<Road>, bounds: &Bounds, timer: &mut Timer) {\n\n let mut closest: FindClosest<RoadID> = FindClosest::new(bounds);\n\n let mut bridges = Vec::new();\n\n for r in roads.iter() {\n\n closest.add(r.id, r.center_pts.points());\n\n if r.osm_tags.contains_key(\"bridge\") {\n\n bridges.push(r.id);\n\n }\n\n }\n\n\n\n timer.start_iter(\"find roads underneath bridge\", bridges.len());\n\n for bridge in bridges {\n\n timer.next();\n\n let bridge_pts = roads[bridge.0].center_pts.clone();\n\n for (r, _, _) in closest.all_close_pts(bridge_pts.middle(), Distance::meters(500.0)) {\n\n if bridge != r && bridge_pts.intersection(&roads[r.0].center_pts).is_some() {\n\n if roads[r.0].zorder == 0 {\n\n roads[r.0].zorder = -1;\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "map_model/src/make/bridges.rs", "rank": 92, "score": 250966.61794736332 }, { "content": "fn run_experiment(map: &Map, sim: &mut Sim) {\n\n let timer = Timer::new(\"run sim until done\");\n\n sim.run_until_done(\n\n &map,\n\n |sim, _map| {\n\n // This'll run every 30 sim seconds\n\n if false {\n\n println!(\n\n \"At {}, {} infected\",\n\n sim.time(),\n\n prettyprint_usize(sim.get_pandemic_model().unwrap().count_infected())\n\n );\n\n }\n\n },\n\n None,\n\n );\n\n timer.done();\n\n println!(\"Done at {}\", sim.time());\n\n}\n", "file_path": "headless/src/main.rs", "rank": 93, "score": 249907.12879784429 }, { "content": "pub fn make_all_zones(map: &Map) -> Vec<Zone> {\n\n let mut queue = Vec::new();\n\n for r in map.all_roads() {\n\n if r.osm_tags.get(\"access\") == Some(&\"private\".to_string()) {\n\n queue.push(r.id);\n\n }\n\n }\n\n\n\n let mut zones = Vec::new();\n\n let mut seen = BTreeSet::new();\n\n while !queue.is_empty() {\n\n let start = queue.pop().unwrap();\n\n if seen.contains(&start) {\n\n continue;\n\n }\n\n let zone = floodfill(map, start, ZoneID(zones.len()));\n\n seen.extend(zone.members.clone());\n\n zones.push(zone);\n\n }\n\n\n\n zones\n\n}\n\n\n", "file_path": "map_model/src/make/zones.rs", "rank": 94, "score": 249494.15337252157 }, { "content": "// TODO a fan chart might be nicer\n\nfn delay_plot(ctx: &EventCtx, app: &App, i: IntersectionID, opts: &DataOptions) -> Widget {\n\n let data = if opts.show_before {\n\n app.prebaked()\n\n } else {\n\n app.primary.sim.get_analytics()\n\n };\n\n let mut by_mode: BTreeMap<TripMode, Vec<(Time, Duration)>> = TripMode::all()\n\n .into_iter()\n\n .map(|m| (m, Vec::new()))\n\n .collect();\n\n let limit = if opts.show_end_of_day {\n\n app.primary.sim.get_end_of_day()\n\n } else {\n\n app.primary.sim.time()\n\n };\n\n if let Some(list) = data.intersection_delays.get(&i) {\n\n for (t, dt, mode) in list {\n\n if *t > limit {\n\n break;\n\n }\n", "file_path": "game/src/info/intersection.rs", "rank": 95, "score": 246932.56573787308 }, { "content": "pub fn brute_force(map: &Map, i: IntersectionID) {\n\n let turn_groups: Vec<TurnGroup> = TurnGroup::for_i(i, map)\n\n .into_iter()\n\n .filter_map(|(id, tg)| if id.crosswalk { None } else { Some(tg) })\n\n .collect();\n\n let indices: Vec<usize> = (0..turn_groups.len()).collect();\n\n for num_phases in 1..=turn_groups.len() {\n\n println!(\n\n \"For {} turn groups, looking for solution with {} phases\",\n\n turn_groups.len(),\n\n num_phases\n\n );\n\n for partition in helper(&indices, num_phases) {\n\n if okay_partition(turn_groups.iter().collect(), partition) {\n\n return;\n\n }\n\n }\n\n }\n\n unreachable!()\n\n}\n\n\n", "file_path": "map_model/src/make/traffic_signals.rs", "rank": 96, "score": 246895.9622508281 }, { "content": "pub fn convert(opts: Options, timer: &mut abstutil::Timer) -> RawMap {\n\n let (mut map, amenities) = split_ways::split_up_roads(\n\n osm_reader::extract_osm(\n\n &opts.osm_input,\n\n &opts.clip,\n\n &opts.city_name,\n\n &opts.name,\n\n timer,\n\n ),\n\n timer,\n\n );\n\n clip::clip_map(&mut map, timer);\n\n map.driving_side = if opts.drive_on_right {\n\n DrivingSide::Right\n\n } else {\n\n DrivingSide::Left\n\n };\n\n\n\n // Need to do a first pass of removing cul-de-sacs here, or we wind up with loop PolyLines when\n\n // doing the parking hint matching.\n", "file_path": "convert_osm/src/lib.rs", "rank": 97, "score": 242450.34749122697 }, { "content": "fn rand_time(rng: &mut XorShiftRng, low: Time, high: Time) -> Time {\n\n assert!(high > low);\n\n Time::START_OF_DAY + Duration::seconds(rng.gen_range(low.inner_seconds(), high.inner_seconds()))\n\n}\n", "file_path": "sim/src/make/generator.rs", "rank": 98, "score": 241609.59239016776 }, { "content": "struct App {\n\n controls: Composite,\n\n timeseries_panel: Option<(Duration, Composite)>,\n\n scrollable_canvas: Drawable,\n\n elapsed: Duration,\n\n}\n\n\n\nimpl App {\n\n fn new(ctx: &mut EventCtx) -> App {\n\n App {\n\n controls: make_controls(ctx),\n\n timeseries_panel: None,\n\n scrollable_canvas: setup_scrollable_canvas(ctx),\n\n elapsed: Duration::ZERO,\n\n }\n\n }\n\n\n\n fn make_timeseries_panel(&self, ctx: &mut EventCtx) -> Composite {\n\n // Make a table with 3 columns.\n\n let mut col1 = vec![Line(\"Time\").draw(ctx)];\n", "file_path": "ezgui/examples/demo.rs", "rank": 99, "score": 241551.1077146271 } ]
Rust
src/sim2/training.rs
ryotaok/genshin
4f3778b787e59220853ae8cce4f7e7b94d34ac54
use crate::sim2::state::State; use crate::sim2::timeline::{Timeline}; use crate::sim2::attack::{WeaponAttack}; use crate::sim2::types::{WeaponType, SCORE}; use crate::sim2::record::{WeaponRecord, Artifact}; use WeaponType::*; pub enum TrainingWeaponUnion { TrainingSword(TrainingSword), TrainingClaymore(TrainingClaymore), TrainingPolearm(TrainingPolearm), TrainingBow(TrainingBow), TrainingCatalyst(TrainingCatalyst), } impl TrainingWeaponUnion { pub fn timeline(&mut self) -> &mut dyn Timeline { use TrainingWeaponUnion::*; match self { TrainingSword(x) => x, TrainingClaymore(x) => x, TrainingPolearm(x) => x, TrainingBow(x) => x, TrainingCatalyst(x) => x, } } pub fn field(&mut self) -> &mut dyn WeaponAttack { use TrainingWeaponUnion::*; match self { TrainingSword(x) => x, TrainingClaymore(x) => x, TrainingPolearm(x) => x, TrainingBow(x) => x, TrainingCatalyst(x) => x, } } } pub fn weapons() -> Vec<(WeaponRecord, TrainingWeaponUnion)> { vec![ (TrainingSword::record(), TrainingWeaponUnion::TrainingSword(TrainingSword)), (TrainingClaymore::record(), TrainingWeaponUnion::TrainingClaymore(TrainingClaymore)), (TrainingPolearm::record(), TrainingWeaponUnion::TrainingPolearm(TrainingPolearm)), (TrainingBow::record(), TrainingWeaponUnion::TrainingBow(TrainingBow)), (TrainingCatalyst::record(), TrainingWeaponUnion::TrainingCatalyst(TrainingCatalyst)), ] } #[derive(Debug)] pub struct TrainingSword; impl Timeline for TrainingSword {} impl WeaponAttack for TrainingSword {} impl TrainingSword { pub fn record() -> WeaponRecord { WeaponRecord::default() .name("Training Sword").type_(Sword).version(1.0) .base_atk(608.) } } #[derive(Debug)] pub struct TrainingClaymore; impl Timeline for TrainingClaymore {} impl WeaponAttack for TrainingClaymore {} impl TrainingClaymore { pub fn record() -> WeaponRecord { WeaponRecord::default() .name("Training Claymore").type_(Claymore).version(1.0) .base_atk(608.) } } #[derive(Debug)] pub struct TrainingPolearm; impl Timeline for TrainingPolearm {} impl WeaponAttack for TrainingPolearm {} impl TrainingPolearm { pub fn record() -> WeaponRecord { WeaponRecord::default() .name("Training Polearm").type_(Polearm).version(1.0) .base_atk(608.) } } #[derive(Debug)] pub struct TrainingBow; impl Timeline for TrainingBow {} impl WeaponAttack for TrainingBow {} impl TrainingBow { pub fn record() -> WeaponRecord { WeaponRecord::default() .name("Training Bow").type_(Bow).version(1.0) .base_atk(608.) } } #[derive(Debug)] pub struct TrainingCatalyst; impl Timeline for TrainingCatalyst {} impl WeaponAttack for TrainingCatalyst {} impl TrainingCatalyst { pub fn record() -> WeaponRecord { WeaponRecord::default() .name("Training Catalyst").type_(Catalyst).version(1.0) .base_atk(608.) } } pub enum TrainingArtifactUnion { TrainingArtifact0(TrainingArtifact0), TrainingArtifact1(TrainingArtifact1), TrainingArtifact2(TrainingArtifact2), TrainingArtifact3(TrainingArtifact3), TrainingArtifact4(TrainingArtifact4), TrainingArtifact5(TrainingArtifact5), TrainingArtifact6(TrainingArtifact6), TrainingArtifact7(TrainingArtifact7), TrainingArtifact8(TrainingArtifact8), TrainingArtifact9(TrainingArtifact9), } impl TrainingArtifactUnion { pub fn timeline(&mut self) -> &mut dyn Timeline { use TrainingArtifactUnion::*; match self { TrainingArtifact0(x) => x, TrainingArtifact1(x) => x, TrainingArtifact2(x) => x, TrainingArtifact3(x) => x, TrainingArtifact4(x) => x, TrainingArtifact5(x) => x, TrainingArtifact6(x) => x, TrainingArtifact7(x) => x, TrainingArtifact8(x) => x, TrainingArtifact9(x) => x, } } pub fn field(&mut self) -> &mut dyn WeaponAttack { use TrainingArtifactUnion::*; match self { TrainingArtifact0(x) => x, TrainingArtifact1(x) => x, TrainingArtifact2(x) => x, TrainingArtifact3(x) => x, TrainingArtifact4(x) => x, TrainingArtifact5(x) => x, TrainingArtifact6(x) => x, TrainingArtifact7(x) => x, TrainingArtifact8(x) => x, TrainingArtifact9(x) => x, } } } pub fn artifacts() -> Vec<(Artifact, TrainingArtifactUnion)> { vec![ (TrainingArtifact0::record(), TrainingArtifactUnion::TrainingArtifact0(TrainingArtifact0)), (TrainingArtifact1::record(), TrainingArtifactUnion::TrainingArtifact1(TrainingArtifact1)), (TrainingArtifact2::record(), TrainingArtifactUnion::TrainingArtifact2(TrainingArtifact2)), (TrainingArtifact3::record(), TrainingArtifactUnion::TrainingArtifact3(TrainingArtifact3)), (TrainingArtifact4::record(), TrainingArtifactUnion::TrainingArtifact4(TrainingArtifact4)), (TrainingArtifact5::record(), TrainingArtifactUnion::TrainingArtifact5(TrainingArtifact5)), (TrainingArtifact6::record(), TrainingArtifactUnion::TrainingArtifact6(TrainingArtifact6)), (TrainingArtifact7::record(), TrainingArtifactUnion::TrainingArtifact7(TrainingArtifact7)), (TrainingArtifact8::record(), TrainingArtifactUnion::TrainingArtifact8(TrainingArtifact8)), (TrainingArtifact9::record(), TrainingArtifactUnion::TrainingArtifact9(TrainingArtifact9)), ] } #[derive(Debug)] pub struct TrainingArtifact0; impl Timeline for TrainingArtifact0 {} impl WeaponAttack for TrainingArtifact0 {} impl TrainingArtifact0 { pub fn record() -> Artifact { Artifact::default() .name("Atk70 Em280 Er78").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .atk(SCORE.atk(33.3333)) .em(SCORE.em(33.3333)) .er(SCORE.er(33.3333)) } } #[derive(Debug)] pub struct TrainingArtifact1; impl Timeline for TrainingArtifact1 {} impl WeaponAttack for TrainingArtifact1 {} impl TrainingArtifact1 { pub fn record() -> Artifact { Artifact::default() .name("Atk168 Em167").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .atk(SCORE.atk(80.)) .em(SCORE.em(20.)) } } #[derive(Debug)] pub struct TrainingArtifact2; impl Timeline for TrainingArtifact2 {} impl WeaponAttack for TrainingArtifact2 {} impl TrainingArtifact2 { pub fn record() -> Artifact { Artifact::default() .name("Atk168 Er46").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .atk(SCORE.atk(80.)) .er(SCORE.er(20.)) } } #[derive(Debug)] pub struct TrainingArtifact3; impl Timeline for TrainingArtifact3 {} impl WeaponAttack for TrainingArtifact3 {} impl TrainingArtifact3 { pub fn record() -> Artifact { Artifact::default() .name("Atk41 Em671").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .atk(SCORE.atk(20.)) .em(SCORE.em(80.)) } } #[derive(Debug)] pub struct TrainingArtifact4; impl Timeline for TrainingArtifact4 {} impl WeaponAttack for TrainingArtifact4 {} impl TrainingArtifact4 { pub fn record() -> Artifact { Artifact::default() .name("Em671 Er46").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .em(SCORE.em(80.)) .er(SCORE.er(20.)) } } #[derive(Debug)] pub struct TrainingArtifact5; impl Timeline for TrainingArtifact5 {} impl WeaponAttack for TrainingArtifact5 {} impl TrainingArtifact5 { pub fn record() -> Artifact { Artifact::default() .name("Atk42 Er186").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .atk(SCORE.atk(20.)) .er(SCORE.er(80.)) } } #[derive(Debug)] pub struct TrainingArtifact6; impl Timeline for TrainingArtifact6 {} impl WeaponAttack for TrainingArtifact6 {} impl TrainingArtifact6 { pub fn record() -> Artifact { Artifact::default() .name("Em167 Er186").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .em(SCORE.em(20.)) .er(SCORE.er(80.)) } } #[derive(Debug)] pub struct TrainingArtifact7; impl Timeline for TrainingArtifact7 {} impl WeaponAttack for TrainingArtifact7 {} impl TrainingArtifact7 { pub fn record() -> Artifact { Artifact::default() .name("Atk105 Em420").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .atk(SCORE.atk(50.)) .em(SCORE.em(50.)) } } #[derive(Debug)] pub struct TrainingArtifact8; impl Timeline for TrainingArtifact8 {} impl WeaponAttack for TrainingArtifact8 {} impl TrainingArtifact8 { pub fn record() -> Artifact { Artifact::default() .name("Atk105 Er116").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .atk(SCORE.atk(50.)) .er(SCORE.er(50.)) } } #[derive(Debug)] pub struct TrainingArtifact9; impl Timeline for TrainingArtifact9 {} impl WeaponAttack for TrainingArtifact9 {} impl TrainingArtifact9 { pub fn record() -> Artifact { Artifact::default() .name("Em420 Er116").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .em(SCORE.em(50.)) .er(SCORE.er(50.)) } } #[derive(Debug)] pub struct TrainingArtifact10; impl Timeline for TrainingArtifact10 {} impl WeaponAttack for TrainingArtifact10 {} impl TrainingArtifact10 { pub fn record() -> Artifact { Artifact::default() .name("Atk210").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .atk(SCORE.atk(100.)) } } #[derive(Debug)] pub struct TrainingArtifact11; impl Timeline for TrainingArtifact11 {} impl WeaponAttack for TrainingArtifact11 {} impl TrainingArtifact11 { pub fn record() -> Artifact { Artifact::default() .name("Em839").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .em(SCORE.em(100.)) } } #[derive(Debug)] pub struct TrainingArtifact12; impl Timeline for TrainingArtifact12 {} impl WeaponAttack for TrainingArtifact12 {} impl TrainingArtifact12 { pub fn record() -> Artifact { Artifact::default() .name("Er233").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .er(SCORE.er(100.)) } }
use crate::sim2::state::State; use crate::sim2::timeline::{Timeline}; use crate::sim2::attack::{WeaponAttack}; use crate::sim2::types::{WeaponType, SCORE}; use crate::sim2::record::{WeaponRecord, Artifact}; use WeaponType::*; pub enum TrainingWeaponUnion { TrainingSword(TrainingSword), TrainingClaymore(TrainingClaymore), TrainingPolearm(TrainingPolearm), TrainingBow(TrainingBow), TrainingCatalyst(TrainingCatalyst), } impl TrainingWeaponUnion { pub fn timeline(&mut self) -> &mut dyn Timeline { use TrainingWeaponUnion::*; match self { TrainingSword(x) => x, TrainingClaymore(x) => x, TrainingPolearm(x) => x, TrainingBow(x) => x, TrainingCatalyst(x) => x, } } pub fn field(&mut self) -> &mut dyn WeaponAttack { use TrainingWeaponUnion::*; match self { TrainingSword(x) => x, TrainingClaymore(x) => x, TrainingPolearm(x) => x, TrainingBow(x) => x, TrainingCatalyst(x) => x, } } } pub fn weapons() -> Vec<(WeaponRecord, TrainingWeaponUnion)> { vec![ (TrainingSword::record(), TrainingWeaponUnion::TrainingSword(TrainingSword)), (TrainingClaymore::record(), TrainingWeaponUnion::TrainingClaymore(TrainingClaymore)), (TrainingPolearm::record(), TrainingWeaponUnion::TrainingPolearm(TrainingPolearm)), (TrainingBow::record(), TrainingWeaponUnion::TrainingBow(TrainingBow)), (TrainingCatalyst::record(), TrainingWeaponUnion::TrainingCatalyst(TrainingCatalyst)), ] } #[derive(Debug)] pub struct TrainingSword; impl Timeline for TrainingSword {} impl WeaponAttack for TrainingSword {} impl TrainingSword { pub fn record() -> WeaponRecord { WeaponRecord::default() .name("Training Sword").type_(Sword).version(1.0) .base_atk(608.) } } #[derive(Debug)] pub struct TrainingClaymore; impl Timeline for TrainingClaymore {} impl WeaponAttack for TrainingClaymore {} impl TrainingClaymore { pub fn record() -> WeaponRecord { WeaponRecord::default() .name("Training Claymore").type_(Claymore).version(1.0) .base_atk(608.) } } #[derive(Debug)] pub struct TrainingPolearm; impl Timeline for TrainingPolearm {} impl WeaponAttack for TrainingPolearm {} impl TrainingPolearm { pub fn record() -> WeaponRecord { WeaponRecord::default() .name("Training Polearm").type_(Polearm).version(1.0) .base_atk(608.) } } #[derive(Debug)] pub struct TrainingBow; impl Timeline for TrainingBow {} impl WeaponAttack for TrainingBow {} impl TrainingBow { pub fn record() -> WeaponRecord { WeaponRecord::default() .name("Training Bow").type_(Bow).version(1.0) .base_atk(608.) } } #[derive(Debug)] pub struct TrainingCatalyst; impl Timeline for TrainingCatalyst {} impl WeaponAttack for TrainingCatalyst {} impl TrainingCatalyst { pub fn record() -> WeaponRecord { WeaponRecord::default() .name("Training Catalyst").type_(Catalyst).version(1.0) .base_atk(608.) } } pub enum TrainingArtifactUnion { TrainingArtifact0(TrainingArtifact0), TrainingArtifact1(TrainingArtifact1), TrainingArtifact2(TrainingArtifact2), TrainingArtifact3(TrainingArtifact3), TrainingArtifact4(TrainingArtifact4), TrainingArtifact5(TrainingArtifact5), TrainingArtifact6(TrainingArtifact6), TrainingArtifact7(TrainingArtifact7), TrainingArtifact8(TrainingArtifact8), TrainingArtifact9(TrainingArtifact9), } impl TrainingArtifactUnion { pub fn timeline(&mut self) -> &mut dyn Timeline { use TrainingArtifactUnion::*; match self { TrainingArtifact0(x) => x, TrainingArtifact1(x) => x, TrainingArtifact2(x) => x, TrainingArtifact3(x) => x, TrainingArtifact4(x) => x, TrainingArtifact5(x) => x, TrainingArtifact6(x) => x, TrainingArtifact7(x) => x, TrainingArtifact8(x) => x, TrainingArtifact9(x) => x, } } pub fn field(&mut self) -> &mut dyn WeaponAttack { use TrainingArtifactUnion::*; match self { TrainingArtifact0(x) => x, TrainingArtifact1(x) => x,
} pub fn artifacts() -> Vec<(Artifact, TrainingArtifactUnion)> { vec![ (TrainingArtifact0::record(), TrainingArtifactUnion::TrainingArtifact0(TrainingArtifact0)), (TrainingArtifact1::record(), TrainingArtifactUnion::TrainingArtifact1(TrainingArtifact1)), (TrainingArtifact2::record(), TrainingArtifactUnion::TrainingArtifact2(TrainingArtifact2)), (TrainingArtifact3::record(), TrainingArtifactUnion::TrainingArtifact3(TrainingArtifact3)), (TrainingArtifact4::record(), TrainingArtifactUnion::TrainingArtifact4(TrainingArtifact4)), (TrainingArtifact5::record(), TrainingArtifactUnion::TrainingArtifact5(TrainingArtifact5)), (TrainingArtifact6::record(), TrainingArtifactUnion::TrainingArtifact6(TrainingArtifact6)), (TrainingArtifact7::record(), TrainingArtifactUnion::TrainingArtifact7(TrainingArtifact7)), (TrainingArtifact8::record(), TrainingArtifactUnion::TrainingArtifact8(TrainingArtifact8)), (TrainingArtifact9::record(), TrainingArtifactUnion::TrainingArtifact9(TrainingArtifact9)), ] } #[derive(Debug)] pub struct TrainingArtifact0; impl Timeline for TrainingArtifact0 {} impl WeaponAttack for TrainingArtifact0 {} impl TrainingArtifact0 { pub fn record() -> Artifact { Artifact::default() .name("Atk70 Em280 Er78").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .atk(SCORE.atk(33.3333)) .em(SCORE.em(33.3333)) .er(SCORE.er(33.3333)) } } #[derive(Debug)] pub struct TrainingArtifact1; impl Timeline for TrainingArtifact1 {} impl WeaponAttack for TrainingArtifact1 {} impl TrainingArtifact1 { pub fn record() -> Artifact { Artifact::default() .name("Atk168 Em167").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .atk(SCORE.atk(80.)) .em(SCORE.em(20.)) } } #[derive(Debug)] pub struct TrainingArtifact2; impl Timeline for TrainingArtifact2 {} impl WeaponAttack for TrainingArtifact2 {} impl TrainingArtifact2 { pub fn record() -> Artifact { Artifact::default() .name("Atk168 Er46").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .atk(SCORE.atk(80.)) .er(SCORE.er(20.)) } } #[derive(Debug)] pub struct TrainingArtifact3; impl Timeline for TrainingArtifact3 {} impl WeaponAttack for TrainingArtifact3 {} impl TrainingArtifact3 { pub fn record() -> Artifact { Artifact::default() .name("Atk41 Em671").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .atk(SCORE.atk(20.)) .em(SCORE.em(80.)) } } #[derive(Debug)] pub struct TrainingArtifact4; impl Timeline for TrainingArtifact4 {} impl WeaponAttack for TrainingArtifact4 {} impl TrainingArtifact4 { pub fn record() -> Artifact { Artifact::default() .name("Em671 Er46").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .em(SCORE.em(80.)) .er(SCORE.er(20.)) } } #[derive(Debug)] pub struct TrainingArtifact5; impl Timeline for TrainingArtifact5 {} impl WeaponAttack for TrainingArtifact5 {} impl TrainingArtifact5 { pub fn record() -> Artifact { Artifact::default() .name("Atk42 Er186").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .atk(SCORE.atk(20.)) .er(SCORE.er(80.)) } } #[derive(Debug)] pub struct TrainingArtifact6; impl Timeline for TrainingArtifact6 {} impl WeaponAttack for TrainingArtifact6 {} impl TrainingArtifact6 { pub fn record() -> Artifact { Artifact::default() .name("Em167 Er186").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .em(SCORE.em(20.)) .er(SCORE.er(80.)) } } #[derive(Debug)] pub struct TrainingArtifact7; impl Timeline for TrainingArtifact7 {} impl WeaponAttack for TrainingArtifact7 {} impl TrainingArtifact7 { pub fn record() -> Artifact { Artifact::default() .name("Atk105 Em420").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .atk(SCORE.atk(50.)) .em(SCORE.em(50.)) } } #[derive(Debug)] pub struct TrainingArtifact8; impl Timeline for TrainingArtifact8 {} impl WeaponAttack for TrainingArtifact8 {} impl TrainingArtifact8 { pub fn record() -> Artifact { Artifact::default() .name("Atk105 Er116").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .atk(SCORE.atk(50.)) .er(SCORE.er(50.)) } } #[derive(Debug)] pub struct TrainingArtifact9; impl Timeline for TrainingArtifact9 {} impl WeaponAttack for TrainingArtifact9 {} impl TrainingArtifact9 { pub fn record() -> Artifact { Artifact::default() .name("Em420 Er116").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .em(SCORE.em(50.)) .er(SCORE.er(50.)) } } #[derive(Debug)] pub struct TrainingArtifact10; impl Timeline for TrainingArtifact10 {} impl WeaponAttack for TrainingArtifact10 {} impl TrainingArtifact10 { pub fn record() -> Artifact { Artifact::default() .name("Atk210").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .atk(SCORE.atk(100.)) } } #[derive(Debug)] pub struct TrainingArtifact11; impl Timeline for TrainingArtifact11 {} impl WeaponAttack for TrainingArtifact11 {} impl TrainingArtifact11 { pub fn record() -> Artifact { Artifact::default() .name("Em839").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .em(SCORE.em(100.)) } } #[derive(Debug)] pub struct TrainingArtifact12; impl Timeline for TrainingArtifact12 {} impl WeaponAttack for TrainingArtifact12 {} impl TrainingArtifact12 { pub fn record() -> Artifact { Artifact::default() .name("Er233").version(1.0).preference(&[]) .elemental_dmg(15.).physical_dmg(25.).cr(20.) .er(SCORE.er(100.)) } }
TrainingArtifact2(x) => x, TrainingArtifact3(x) => x, TrainingArtifact4(x) => x, TrainingArtifact5(x) => x, TrainingArtifact6(x) => x, TrainingArtifact7(x) => x, TrainingArtifact8(x) => x, TrainingArtifact9(x) => x, } }
function_block-function_prefix_line
[]
Rust
common/functions/src/scalars/dates/date.rs
flaneur2020/databend
b2758473f59bd23f1a278699e6cf292724c204cd
use common_datavalues::DataValueArithmeticOperator; use common_exception::Result; use super::interval_function::MonthsArithmeticFunction; use super::interval_function::SecondsArithmeticFunction; use super::now::NowFunction; use super::RoundFunction; use super::ToStartOfISOYearFunction; use super::ToStartOfMonthFunction; use super::ToStartOfQuarterFunction; use super::ToStartOfWeekFunction; use super::ToStartOfYearFunction; use super::ToYYYYMMDDFunction; use super::ToYYYYMMDDhhmmssFunction; use super::ToYYYYMMFunction; use super::TodayFunction; use super::TomorrowFunction; use super::YesterdayFunction; use crate::scalars::FactoryFuncRef; #[derive(Clone)] pub struct DateFunction {} impl DateFunction { pub fn register(map: FactoryFuncRef) -> Result<()> { let mut map = map.write(); map.insert("today".into(), TodayFunction::try_create); map.insert("yesterday".into(), YesterdayFunction::try_create); map.insert("tomorrow".into(), TomorrowFunction::try_create); map.insert("now".into(), NowFunction::try_create); map.insert("toYYYYMM".into(), ToYYYYMMFunction::try_create); map.insert("toYYYYMMDD".into(), ToYYYYMMDDFunction::try_create); map.insert( "toYYYYMMDDhhmmss".into(), ToYYYYMMDDhhmmssFunction::try_create, ); map.insert("toStartOfYear".into(), ToStartOfYearFunction::try_create); map.insert( "toStartOfISOYear".into(), ToStartOfISOYearFunction::try_create, ); map.insert( "toStartOfQuarter".into(), ToStartOfQuarterFunction::try_create, ); map.insert("toStartOfWeek".into(), ToStartOfWeekFunction::try_create); map.insert("toStartOfMonth".into(), ToStartOfMonthFunction::try_create); { map.insert("toStartOfSecond".into(), |display_name| { RoundFunction::try_create(display_name, 1) }); map.insert("toStartOfMinute".into(), |display_name| { RoundFunction::try_create(display_name, 60) }); map.insert("toStartOfFiveMinutes".into(), |display_name| { RoundFunction::try_create(display_name, 5 * 60) }); map.insert("toStartOfTenMinutes".into(), |display_name| { RoundFunction::try_create(display_name, 10 * 60) }); map.insert("toStartOfFifteenMinutes".into(), |display_name| { RoundFunction::try_create(display_name, 15 * 60) }); map.insert("timeSlot".into(), |display_name| { RoundFunction::try_create(display_name, 30 * 60) }); map.insert("toStartOfHour".into(), |display_name| { RoundFunction::try_create(display_name, 60 * 60) }); map.insert("toStartOfDay".into(), |display_name| { RoundFunction::try_create(display_name, 60 * 60 * 24) }); } { map.insert("addYears".into(), |display_name| { MonthsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Plus, 12, /* one year is 12 months */ ) }); map.insert("subtractYears".into(), |display_name| { MonthsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Minus, 12, /* one year is 12 months */ ) }); map.insert("addMonths".into(), |display_name| { MonthsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Plus, 1, ) }); map.insert("subtractMonths".into(), |display_name| { MonthsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Minus, 1, ) }); map.insert("addDays".into(), |display_name| { SecondsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Plus, 24 * 3600, /* one day is 24 * 3600 seconds */ ) }); map.insert("subtractDays".into(), |display_name| { SecondsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Minus, 24 * 3600, /* one day is 24 * 3600 seconds */ ) }); map.insert("addHours".into(), |display_name| { SecondsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Plus, 3600, /* one hour is 3600 seconds */ ) }); map.insert("subtractHours".into(), |display_name| { SecondsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Minus, 3600, /* one hour is 3600 seconds */ ) }); map.insert("addMinutes".into(), |display_name| { SecondsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Plus, 60, /* one minute is 60 seconds */ ) }); map.insert("subtractMinutes".into(), |display_name| { SecondsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Minus, 60, /* one minute is 60 seconds */ ) }); map.insert("addSeconds".into(), |display_name| { SecondsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Plus, 1, ) }); map.insert("subtractSeconds".into(), |display_name| { SecondsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Minus, 1, ) }); } Ok(()) } }
use common_datavalues::DataValueArithmeticOperator; use common_exception::Result; use super::interval_function::MonthsArithmeticFunction; use super::interval_function::SecondsArithmeticFunction; use super::now::NowFunction; use super::RoundFunction; use super::ToStartOfISOYearFunction; use super::ToStartOfMonthFunction; use super::ToStartOfQuarterFunction; use super::ToStar
}); map.insert("toStartOfFifteenMinutes".into(), |display_name| { RoundFunction::try_create(display_name, 15 * 60) }); map.insert("timeSlot".into(), |display_name| { RoundFunction::try_create(display_name, 30 * 60) }); map.insert("toStartOfHour".into(), |display_name| { RoundFunction::try_create(display_name, 60 * 60) }); map.insert("toStartOfDay".into(), |display_name| { RoundFunction::try_create(display_name, 60 * 60 * 24) }); } { map.insert("addYears".into(), |display_name| { MonthsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Plus, 12, /* one year is 12 months */ ) }); map.insert("subtractYears".into(), |display_name| { MonthsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Minus, 12, /* one year is 12 months */ ) }); map.insert("addMonths".into(), |display_name| { MonthsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Plus, 1, ) }); map.insert("subtractMonths".into(), |display_name| { MonthsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Minus, 1, ) }); map.insert("addDays".into(), |display_name| { SecondsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Plus, 24 * 3600, /* one day is 24 * 3600 seconds */ ) }); map.insert("subtractDays".into(), |display_name| { SecondsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Minus, 24 * 3600, /* one day is 24 * 3600 seconds */ ) }); map.insert("addHours".into(), |display_name| { SecondsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Plus, 3600, /* one hour is 3600 seconds */ ) }); map.insert("subtractHours".into(), |display_name| { SecondsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Minus, 3600, /* one hour is 3600 seconds */ ) }); map.insert("addMinutes".into(), |display_name| { SecondsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Plus, 60, /* one minute is 60 seconds */ ) }); map.insert("subtractMinutes".into(), |display_name| { SecondsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Minus, 60, /* one minute is 60 seconds */ ) }); map.insert("addSeconds".into(), |display_name| { SecondsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Plus, 1, ) }); map.insert("subtractSeconds".into(), |display_name| { SecondsArithmeticFunction::try_create( display_name, DataValueArithmeticOperator::Minus, 1, ) }); } Ok(()) } }
tOfWeekFunction; use super::ToStartOfYearFunction; use super::ToYYYYMMDDFunction; use super::ToYYYYMMDDhhmmssFunction; use super::ToYYYYMMFunction; use super::TodayFunction; use super::TomorrowFunction; use super::YesterdayFunction; use crate::scalars::FactoryFuncRef; #[derive(Clone)] pub struct DateFunction {} impl DateFunction { pub fn register(map: FactoryFuncRef) -> Result<()> { let mut map = map.write(); map.insert("today".into(), TodayFunction::try_create); map.insert("yesterday".into(), YesterdayFunction::try_create); map.insert("tomorrow".into(), TomorrowFunction::try_create); map.insert("now".into(), NowFunction::try_create); map.insert("toYYYYMM".into(), ToYYYYMMFunction::try_create); map.insert("toYYYYMMDD".into(), ToYYYYMMDDFunction::try_create); map.insert( "toYYYYMMDDhhmmss".into(), ToYYYYMMDDhhmmssFunction::try_create, ); map.insert("toStartOfYear".into(), ToStartOfYearFunction::try_create); map.insert( "toStartOfISOYear".into(), ToStartOfISOYearFunction::try_create, ); map.insert( "toStartOfQuarter".into(), ToStartOfQuarterFunction::try_create, ); map.insert("toStartOfWeek".into(), ToStartOfWeekFunction::try_create); map.insert("toStartOfMonth".into(), ToStartOfMonthFunction::try_create); { map.insert("toStartOfSecond".into(), |display_name| { RoundFunction::try_create(display_name, 1) }); map.insert("toStartOfMinute".into(), |display_name| { RoundFunction::try_create(display_name, 60) }); map.insert("toStartOfFiveMinutes".into(), |display_name| { RoundFunction::try_create(display_name, 5 * 60) }); map.insert("toStartOfTenMinutes".into(), |display_name| { RoundFunction::try_create(display_name, 10 * 60)
random
[ { "content": "-- {ErrorCode 3, but it not work, because it's trimed in msql-srv}\n\nUSE not_exists_db;\n\nUSE default;\n\nUSE system;\n", "file_path": "tests/suites/0_stateless/07_0000_use_database.sql", "rank": 0, "score": 56735.32093857074 }, { "content": "use common_streams::SendableDataBlockStream;\n\n\n\nuse crate::interpreters::Interpreter;\n\nuse crate::interpreters::InterpreterPtr;\n\nuse crate::sessions::DatabendQueryContextRef;\n\n\n\npub struct UseDatabaseInterpreter {\n\n ctx: DatabendQueryContextRef,\n\n plan: UseDatabasePlan,\n\n}\n\n\n\nimpl UseDatabaseInterpreter {\n\n pub fn try_create(\n\n ctx: DatabendQueryContextRef,\n\n plan: UseDatabasePlan,\n\n ) -> Result<InterpreterPtr> {\n\n Ok(Arc::new(UseDatabaseInterpreter { ctx, plan }))\n\n }\n\n}\n\n\n", "file_path": "query/src/interpreters/interpreter_use_database.rs", "rank": 1, "score": 55465.0561091037 }, { "content": "// Copyright 2020 Datafuse Labs.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse std::sync::Arc;\n\n\n\nuse common_datavalues::DataSchema;\n\nuse common_exception::Result;\n\nuse common_planners::UseDatabasePlan;\n\nuse common_streams::DataBlockStream;\n", "file_path": "query/src/interpreters/interpreter_use_database.rs", "rank": 2, "score": 55464.772894206355 }, { "content": "// Copyright 2020 Datafuse Labs.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse std::sync::Arc;\n\n\n\nuse common_datavalues::DataSchema;\n\nuse common_datavalues::DataSchemaRef;\n\n\n\n#[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq)]\n", "file_path": "common/planners/src/plan_use_database.rs", "rank": 3, "score": 55464.186475005896 }, { "content": "pub struct UseDatabasePlan {\n\n pub db: String,\n\n}\n\n\n\nimpl UseDatabasePlan {\n\n pub fn schema(&self) -> DataSchemaRef {\n\n Arc::new(DataSchema::empty())\n\n }\n\n}\n", "file_path": "common/planners/src/plan_use_database.rs", "rank": 4, "score": 55464.15024137163 }, { "content": "#[async_trait::async_trait]\n\nimpl Interpreter for UseDatabaseInterpreter {\n\n fn name(&self) -> &str {\n\n \"UseDatabaseInterpreter\"\n\n }\n\n\n\n async fn execute(&self) -> Result<SendableDataBlockStream> {\n\n self.ctx.set_current_database(self.plan.db.clone())?;\n\n let schema = Arc::new(DataSchema::empty());\n\n Ok(Box::pin(DataBlockStream::create(schema, None, vec![])))\n\n }\n\n}\n", "file_path": "query/src/interpreters/interpreter_use_database.rs", "rank": 5, "score": 55463.65778632025 }, { "content": "select database();\n", "file_path": "tests/suites/0_stateless/07_0000_use_database.sql", "rank": 6, "score": 55457.44327664045 }, { "content": "use crate::interpreters::*;\n\nuse crate::sql::*;\n\n\n\n#[tokio::test]\n\nasync fn test_use_interpreter() -> Result<()> {\n\n let ctx = crate::tests::try_create_context()?;\n\n\n\n if let PlanNode::UseDatabase(plan) =\n\n PlanParser::create(ctx.clone()).build_from_sql(\"use default\")?\n\n {\n\n let executor = UseDatabaseInterpreter::try_create(ctx, plan)?;\n\n assert_eq!(executor.name(), \"UseDatabaseInterpreter\");\n\n\n\n let mut stream = executor.execute().await?;\n\n while let Some(_block) = stream.next().await {}\n\n } else {\n\n assert!(false)\n\n }\n\n\n\n Ok(())\n", "file_path": "query/src/interpreters/interpreter_use_database_test.rs", "rank": 7, "score": 54250.01251815096 }, { "content": "// Copyright 2020 Datafuse Labs.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse common_base::tokio;\n\nuse common_exception::Result;\n\nuse common_planners::*;\n\nuse futures::stream::StreamExt;\n\nuse pretty_assertions::assert_eq;\n\n\n", "file_path": "query/src/interpreters/interpreter_use_database_test.rs", "rank": 8, "score": 54249.76962431586 }, { "content": "}\n\n\n\n#[tokio::test]\n\nasync fn test_use_database_interpreter_error() -> Result<()> {\n\n let ctx = crate::tests::try_create_context()?;\n\n\n\n if let PlanNode::UseDatabase(plan) = PlanParser::create(ctx.clone()).build_from_sql(\"use xx\")? {\n\n let executor = UseDatabaseInterpreter::try_create(ctx, plan)?;\n\n\n\n if let Err(e) = executor.execute().await {\n\n let expect = \"Code: 3, displayText = Cannot USE 'xx', because the 'xx' doesn't exist.\";\n\n assert_eq!(expect, format!(\"{}\", e));\n\n } else {\n\n assert!(false);\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "query/src/interpreters/interpreter_use_database_test.rs", "rank": 9, "score": 54249.75158822425 }, { "content": "#[test]\n\nfn use_database_test() -> Result<()> {\n\n expect_parse_ok(\n\n \"USe db1\",\n\n DfStatement::UseDatabase(DfUseDatabase {\n\n name: ObjectName(vec![Ident::new(\"db1\")]),\n\n }),\n\n )?;\n\n expect_parse_ok(\n\n \"use db1\",\n\n DfStatement::UseDatabase(DfUseDatabase {\n\n name: ObjectName(vec![Ident::new(\"db1\")]),\n\n }),\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "query/src/sql/sql_parser_test.rs", "rank": 10, "score": 50897.53504257314 }, { "content": "use std::net::Ipv6Addr;\n\nuse std::ptr;\n\nuse std::slice;\n\n\n\nuse chrono::prelude::*;\n\nuse chrono::Date;\n\nuse chrono_tz::Tz;\n\n\n\nuse crate::errors::Error;\n\nuse crate::errors::FromSqlError;\n\nuse crate::errors::Result;\n\nuse crate::types::column::column_data::ArcColumnData;\n\nuse crate::types::column::datetime64::to_datetime;\n\nuse crate::types::column::StringPool;\n\nuse crate::types::decimal::NoBits;\n\nuse crate::types::Column;\n\nuse crate::types::ColumnType;\n\nuse crate::types::Complex;\n\nuse crate::types::Decimal;\n\nuse crate::types::Simple;\n", "file_path": "common/clickhouse-srv/src/types/column/iter/mod.rs", "rank": 11, "score": 7.912354856644118 }, { "content": "use crate::CreateDatabasePlan;\n\nuse crate::CreateTablePlan;\n\nuse crate::DescribeTablePlan;\n\nuse crate::DropDatabasePlan;\n\nuse crate::DropTablePlan;\n\nuse crate::EmptyPlan;\n\nuse crate::ExplainPlan;\n\nuse crate::Expression;\n\nuse crate::ExpressionPlan;\n\nuse crate::FilterPlan;\n\nuse crate::HavingPlan;\n\nuse crate::InsertIntoPlan;\n\nuse crate::KillPlan;\n\nuse crate::LimitByPlan;\n\nuse crate::LimitPlan;\n\nuse crate::PlanNode;\n\nuse crate::ProjectionPlan;\n\nuse crate::ReadDataSourcePlan;\n\nuse crate::RemotePlan;\n\nuse crate::ScanPlan;\n", "file_path": "common/planners/src/plan_visitor.rs", "rank": 12, "score": 7.908825683527116 }, { "content": "use common_store_api_sdk::StoreDoGet;\n\nuse common_tracing::tracing;\n\nuse futures::Stream;\n\nuse futures::StreamExt;\n\nuse log::info;\n\nuse metasrv::meta_service::MetaNode;\n\nuse prost::Message;\n\nuse serde::Serialize;\n\nuse tokio_stream::wrappers::ReceiverStream;\n\nuse tonic::metadata::MetadataMap;\n\nuse tonic::Request;\n\nuse tonic::Response;\n\nuse tonic::Status;\n\nuse tonic::Streaming;\n\n\n\nuse crate::configs::Config;\n\nuse crate::executor::ActionHandler;\n\nuse crate::executor::ReplySerializer;\n\nuse crate::fs::FileSystem;\n\n\n", "file_path": "store/src/api/rpc/flight_service.rs", "rank": 13, "score": 7.90164932787605 }, { "content": "\n\nuse chrono::prelude::*;\n\nuse chrono_tz::Tz;\n\nuse hostname::get;\n\nuse lazy_static::lazy_static;\n\n\n\npub use self::block::Block;\n\npub use self::block::RCons;\n\npub use self::block::RNil;\n\npub use self::block::Row;\n\npub use self::block::RowBuilder;\n\npub use self::block::Rows;\n\npub use self::column::Column;\n\npub use self::column::ColumnType;\n\npub use self::column::Complex;\n\npub use self::column::Simple;\n\npub(crate) use self::date_converter::DateConverter;\n\npub use self::decimal::Decimal;\n\npub use self::enums::Enum16;\n\npub use self::enums::Enum8;\n", "file_path": "common/clickhouse-srv/src/types/mod.rs", "rank": 14, "score": 7.898087253043626 }, { "content": "use common_datavalues::DataSchemaRefExt;\n\nuse common_datavalues::DataType;\n\nuse common_exception::Result;\n\n\n\nuse crate::col;\n\nuse crate::plan_subqueries_set::SubQueriesSetPlan;\n\nuse crate::validate_expression;\n\nuse crate::AggregatorFinalPlan;\n\nuse crate::AggregatorPartialPlan;\n\nuse crate::EmptyPlan;\n\nuse crate::ExplainPlan;\n\nuse crate::ExplainType;\n\nuse crate::Expression;\n\nuse crate::ExpressionPlan;\n\nuse crate::FilterPlan;\n\nuse crate::HavingPlan;\n\nuse crate::LimitByPlan;\n\nuse crate::LimitPlan;\n\nuse crate::PlanNode;\n\nuse crate::ProjectionPlan;\n", "file_path": "common/planners/src/plan_builder.rs", "rank": 15, "score": 7.891930422091914 }, { "content": "use common_planners::ExplainType;\n\nuse metrics::histogram;\n\nuse sqlparser::ast::BinaryOperator;\n\nuse sqlparser::ast::ColumnDef;\n\nuse sqlparser::ast::ColumnOptionDef;\n\nuse sqlparser::ast::Expr;\n\nuse sqlparser::ast::Ident;\n\nuse sqlparser::ast::SqlOption;\n\nuse sqlparser::ast::TableConstraint;\n\nuse sqlparser::ast::Value;\n\nuse sqlparser::dialect::keywords::Keyword;\n\nuse sqlparser::dialect::Dialect;\n\nuse sqlparser::dialect::GenericDialect;\n\nuse sqlparser::parser::Parser;\n\nuse sqlparser::parser::ParserError;\n\nuse sqlparser::tokenizer::Token;\n\nuse sqlparser::tokenizer::Tokenizer;\n\nuse sqlparser::tokenizer::Whitespace;\n\n\n\nuse crate::sql::DfCreateDatabase;\n", "file_path": "query/src/sql/sql_parser.rs", "rank": 16, "score": 7.881839882880795 }, { "content": "use common_planners::Expression;\n\nuse common_planners::InsertIntoPlan;\n\nuse common_planners::KillPlan;\n\nuse common_planners::PlanBuilder;\n\nuse common_planners::PlanNode;\n\nuse common_planners::SelectPlan;\n\nuse common_planners::SettingPlan;\n\nuse common_planners::ShowCreateTablePlan;\n\nuse common_planners::TableScanInfo;\n\nuse common_planners::TruncateTablePlan;\n\nuse common_planners::UseDatabasePlan;\n\nuse common_planners::VarValue;\n\nuse common_streams::Source;\n\nuse common_streams::ValueSource;\n\nuse common_tracing::tracing;\n\nuse nom::FindSubstring;\n\nuse sqlparser::ast::FunctionArg;\n\nuse sqlparser::ast::Ident;\n\nuse sqlparser::ast::ObjectName;\n\nuse sqlparser::ast::OrderByExpr;\n", "file_path": "query/src/sql/plan_parser.rs", "rank": 17, "score": 7.881398157131634 }, { "content": "use chrono_tz::Tz;\n\nuse uuid::Uuid;\n\n\n\nuse crate::errors::Error;\n\nuse crate::errors::FromSqlError;\n\nuse crate::errors::Result;\n\nuse crate::types::column::datetime64::to_datetime;\n\nuse crate::types::column::Either;\n\nuse crate::types::decimal::Decimal;\n\nuse crate::types::value::decode_ipv4;\n\nuse crate::types::value::decode_ipv6;\n\nuse crate::types::value::AppDate;\n\nuse crate::types::value::AppDateTime;\n\nuse crate::types::DateTimeType;\n\nuse crate::types::Enum16;\n\nuse crate::types::Enum8;\n\nuse crate::types::SqlType;\n\nuse crate::types::Value;\n\n\n\n#[derive(Clone, Debug)]\n", "file_path": "common/clickhouse-srv/src/types/value_ref.rs", "rank": 18, "score": 7.878632466441647 }, { "content": "use common_exception::ErrorCode;\n\nuse common_exception::Result;\n\n\n\nuse crate::plan_broadcast::BroadcastPlan;\n\nuse crate::plan_subqueries_set::SubQueriesSetPlan;\n\nuse crate::AggregatorFinalPlan;\n\nuse crate::AggregatorPartialPlan;\n\nuse crate::CreateDatabasePlan;\n\nuse crate::CreateTablePlan;\n\nuse crate::DescribeTablePlan;\n\nuse crate::DropDatabasePlan;\n\nuse crate::DropTablePlan;\n\nuse crate::EmptyPlan;\n\nuse crate::ExplainPlan;\n\nuse crate::Expression;\n\nuse crate::ExpressionPlan;\n\nuse crate::Expressions;\n\nuse crate::FilterPlan;\n\nuse crate::HavingPlan;\n\nuse crate::InsertIntoPlan;\n", "file_path": "common/planners/src/plan_rewriter.rs", "rank": 19, "score": 7.878482101437525 }, { "content": "use crate::plan_broadcast::BroadcastPlan;\n\nuse crate::plan_subqueries_set::SubQueriesSetPlan;\n\nuse crate::AggregatorFinalPlan;\n\nuse crate::AggregatorPartialPlan;\n\nuse crate::CreateDatabasePlan;\n\nuse crate::CreateTablePlan;\n\nuse crate::DescribeTablePlan;\n\nuse crate::DropDatabasePlan;\n\nuse crate::DropTablePlan;\n\nuse crate::EmptyPlan;\n\nuse crate::ExplainPlan;\n\nuse crate::ExpressionPlan;\n\nuse crate::FilterPlan;\n\nuse crate::HavingPlan;\n\nuse crate::InsertIntoPlan;\n\nuse crate::KillPlan;\n\nuse crate::LimitByPlan;\n\nuse crate::LimitPlan;\n\nuse crate::ProjectionPlan;\n\nuse crate::ReadDataSourcePlan;\n", "file_path": "common/planners/src/plan_node.rs", "rank": 20, "score": 7.876824279734635 }, { "content": "use axum::extract::Extension;\n\nuse axum::handler::get;\n\nuse axum::http::Response;\n\nuse axum::response::Html;\n\nuse axum::response::IntoResponse;\n\nuse axum::AddExtensionLayer;\n\nuse axum::Router;\n\nuse common_base::tokio;\n\nuse common_base::tokio::task::JoinHandle;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse futures::future::AbortHandle;\n\nuse futures::future::AbortRegistration;\n\nuse futures::future::Abortable;\n\nuse futures::StreamExt;\n\nuse hyper::server::conn::Http;\n\nuse metrics_exporter_prometheus::PrometheusBuilder;\n\nuse metrics_exporter_prometheus::PrometheusHandle;\n\nuse tokio_stream::wrappers::TcpListenerStream;\n\n\n", "file_path": "query/src/metrics/metric_service.rs", "rank": 21, "score": 7.871775114034517 }, { "content": "use common_exception::Result;\n\nuse common_management::NodeInfo;\n\nuse common_planners::AggregatorFinalPlan;\n\nuse common_planners::AggregatorPartialPlan;\n\nuse common_planners::BroadcastPlan;\n\nuse common_planners::EmptyPlan;\n\nuse common_planners::Expression;\n\nuse common_planners::ExpressionPlan;\n\nuse common_planners::Expressions;\n\nuse common_planners::FilterPlan;\n\nuse common_planners::HavingPlan;\n\nuse common_planners::LimitByPlan;\n\nuse common_planners::LimitPlan;\n\nuse common_planners::Partitions;\n\nuse common_planners::PlanNode;\n\nuse common_planners::ProjectionPlan;\n\nuse common_planners::ReadDataSourcePlan;\n\nuse common_planners::RemotePlan;\n\nuse common_planners::ScanPlan;\n\nuse common_planners::SelectPlan;\n", "file_path": "query/src/interpreters/plan_scheduler.rs", "rank": 22, "score": 7.86409611374544 }, { "content": "use async_raft::raft::MembershipConfig;\n\nuse async_raft::LogId;\n\nuse common_base::tokio;\n\nuse common_metatypes::Cmd;\n\nuse common_metatypes::Database;\n\nuse common_metatypes::KVMeta;\n\nuse common_metatypes::KVValue;\n\nuse common_metatypes::LogEntry;\n\nuse common_metatypes::MatchSeq;\n\nuse common_metatypes::Node;\n\nuse common_metatypes::Operation;\n\nuse common_metatypes::SeqValue;\n\nuse common_metatypes::Slot;\n\nuse common_tracing::tracing;\n\nuse maplit::btreeset;\n\nuse pretty_assertions::assert_eq;\n\n\n\nuse crate::init_raft_store_ut;\n\nuse crate::state_machine::testing::pretty_snapshot;\n\nuse crate::state_machine::testing::pretty_snapshot_iter;\n", "file_path": "common/raft-store/src/state_machine/state_machine_test.rs", "rank": 23, "score": 7.861736016956057 }, { "content": "use common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse common_infallible::Mutex;\n\nuse common_planners::Expression;\n\nuse common_streams::SendableDataBlockStream;\n\nuse common_streams::SubQueriesStream;\n\nuse futures::future::join_all;\n\nuse futures::future::BoxFuture;\n\nuse futures::future::JoinAll;\n\nuse futures::future::Shared;\n\nuse futures::Future;\n\nuse futures::FutureExt;\n\nuse futures::StreamExt;\n\n\n\nuse crate::pipelines::processors::EmptyProcessor;\n\nuse crate::pipelines::processors::Pipeline;\n\nuse crate::pipelines::processors::PipelineBuilder;\n\nuse crate::pipelines::processors::Processor;\n\nuse crate::sessions::DatabendQueryContext;\n\nuse crate::sessions::DatabendQueryContextRef;\n", "file_path": "query/src/pipelines/transforms/transform_create_sets.rs", "rank": 24, "score": 7.861736016956057 }, { "content": "use async_raft::raft::EntryPayload;\n\nuse async_raft::raft::MembershipConfig;\n\nuse common_dfs_api_vo::AppendResult;\n\nuse common_dfs_api_vo::DataPartInfo;\n\nuse common_exception::prelude::ErrorCode;\n\nuse common_exception::ToErrorCode;\n\nuse common_metatypes::Cmd;\n\nuse common_metatypes::Database;\n\nuse common_metatypes::KVMeta;\n\nuse common_metatypes::KVValue;\n\nuse common_metatypes::LogEntry;\n\nuse common_metatypes::LogId;\n\nuse common_metatypes::MatchSeqExt;\n\nuse common_metatypes::Node;\n\nuse common_metatypes::NodeId;\n\nuse common_metatypes::Operation;\n\nuse common_metatypes::SeqValue;\n\nuse common_metatypes::Slot;\n\nuse common_metatypes::Table;\n\nuse common_planners::Part;\n", "file_path": "common/raft-store/src/state_machine/sm.rs", "rank": 25, "score": 7.854576951358923 }, { "content": "use crate::errors::Result;\n\nuse crate::types::column::column_data::BoxColumnData;\n\nuse crate::types::column::column_data::ColumnData;\n\nuse crate::types::column::list::List;\n\nuse crate::types::column::nullable::NullableColumnData;\n\nuse crate::types::column::BoxColumnWrapper;\n\nuse crate::types::column::ColumnFrom;\n\nuse crate::types::column::ColumnWrapper;\n\nuse crate::types::column::Either;\n\nuse crate::types::column::VectorColumnData;\n\nuse crate::types::enums::Enum16;\n\nuse crate::types::enums::Enum8;\n\nuse crate::types::from_sql::FromSql;\n\nuse crate::types::Column;\n\nuse crate::types::ColumnType;\n\nuse crate::types::SqlType;\n\nuse crate::types::Value;\n\nuse crate::types::ValueRef;\n\n\n\npub(crate) struct Enum16ColumnData {\n", "file_path": "common/clickhouse-srv/src/types/column/enums.rs", "rank": 26, "score": 7.852070830968373 }, { "content": "use common_base::tokio::task::JoinHandle;\n\nuse common_base::ProgressCallback;\n\nuse common_base::ProgressValues;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse common_infallible::RwLock;\n\nuse common_metatypes::MetaId;\n\nuse common_metatypes::MetaVersion;\n\nuse common_planners::Part;\n\nuse common_planners::Partitions;\n\nuse common_planners::PlanNode;\n\nuse common_planners::Statistics;\n\nuse common_streams::AbortStream;\n\nuse common_streams::SendableDataBlockStream;\n\n\n\nuse crate::catalogs::impls::DatabaseCatalog;\n\nuse crate::catalogs::Catalog;\n\nuse crate::catalogs::TableFunctionMeta;\n\nuse crate::catalogs::TableMeta;\n\nuse crate::clusters::ClusterRef;\n", "file_path": "query/src/sessions/context.rs", "rank": 27, "score": 7.851722493550725 }, { "content": "use std::task::Poll;\n\n\n\nuse bytes::BufMut;\n\nuse common_base::tokio::io::ErrorKind;\n\nuse futures::ready;\n\nuse futures::stream::Fuse;\n\nuse futures::Future;\n\nuse futures::FutureExt;\n\nuse futures::Stream;\n\nuse futures::StreamExt;\n\nuse rusoto_s3::GetObjectRequest;\n\nuse rusoto_s3::HeadObjectRequest;\n\nuse rusoto_s3::S3Client;\n\nuse rusoto_s3::StreamingBody;\n\nuse rusoto_s3::S3;\n\n\n", "file_path": "query/src/datasources/dal/impls/aws_s3/s3_input_stream.rs", "rank": 28, "score": 7.84943821346357 }, { "content": "use std::os::raw::c_char;\n\n\n\nuse byteorder::LittleEndian;\n\nuse byteorder::WriteBytesExt;\n\nuse chrono_tz::Tz;\n\nuse clickhouse_rs_cityhash_sys::city_hash_128;\n\nuse lz4::liblz4::LZ4_compressBound;\n\nuse lz4::liblz4::LZ4_compress_default;\n\n\n\npub use self::block_info::BlockInfo;\n\npub use self::builder::RCons;\n\npub use self::builder::RNil;\n\npub use self::builder::RowBuilder;\n\nuse self::chunk_iterator::ChunkIterator;\n\npub(crate) use self::row::BlockRef;\n\npub use self::row::Row;\n\npub use self::row::Rows;\n\nuse crate::binary::Encoder;\n\nuse crate::binary::ReadEx;\n\nuse crate::errors::Error;\n", "file_path": "common/clickhouse-srv/src/types/block/mod.rs", "rank": 29, "score": 7.848553306067031 }, { "content": "use common_arrow::arrow_flight::HandshakeRequest;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse common_kv_api_util::STORE_RUNTIME;\n\nuse common_kv_api_util::STORE_SYNC_CALL_TIMEOUT;\n\nuse common_tracing::tracing;\n\nuse futures::stream;\n\nuse futures::StreamExt;\n\nuse prost::Message;\n\nuse serde::de::DeserializeOwned;\n\nuse tonic::codegen::InterceptedService;\n\nuse tonic::metadata::MetadataValue;\n\nuse tonic::service::Interceptor;\n\nuse tonic::transport::Channel;\n\nuse tonic::Request;\n\n\n\nuse crate::common::flight_result_to_str;\n\nuse crate::store_client_conf::StoreClientConf;\n\nuse crate::store_do_action::RequestFor;\n\nuse crate::store_do_action::StoreDoAction;\n", "file_path": "common/store-api-sdk/src/store_client.rs", "rank": 30, "score": 7.848390318571404 }, { "content": "use common_base::tokio;\n\nuse common_base::tokio::sync::mpsc::channel;\n\nuse common_base::tokio::time::interval;\n\nuse common_base::ProgressValues;\n\nuse common_clickhouse_srv::types::Block as ClickHouseBlock;\n\nuse common_clickhouse_srv::CHContext;\n\nuse common_datablocks::DataBlock;\n\nuse common_datavalues::DataSchemaRef;\n\nuse common_exception::Result;\n\nuse common_planners::InsertIntoPlan;\n\nuse common_planners::PlanNode;\n\nuse futures::channel::mpsc;\n\nuse futures::channel::mpsc::Receiver;\n\nuse futures::SinkExt;\n\nuse futures::StreamExt;\n\nuse metrics::histogram;\n\nuse tokio_stream::wrappers::IntervalStream;\n\nuse tokio_stream::wrappers::ReceiverStream;\n\n\n\nuse super::writers::from_clickhouse_block;\n", "file_path": "query/src/servers/clickhouse/interactive_worker_base.rs", "rank": 31, "score": 7.845060970660909 }, { "content": "use chrono_tz::Tz;\n\nuse common_io::prelude::Marshal;\n\nuse common_io::prelude::Unmarshal;\n\n\n\nuse crate::binary::Encoder;\n\nuse crate::binary::ReadEx;\n\nuse crate::errors::Result;\n\nuse crate::types::column::array::ArrayColumnData;\n\nuse crate::types::column::column_data::BoxColumnData;\n\nuse crate::types::column::column_data::ColumnData;\n\nuse crate::types::column::list::List;\n\nuse crate::types::column::nullable::NullableColumnData;\n\nuse crate::types::column::numeric::save_data;\n\nuse crate::types::column::ArcColumnWrapper;\n\nuse crate::types::column::ColumnFrom;\n\nuse crate::types::column::ColumnWrapper;\n\nuse crate::types::column::Either;\n\nuse crate::types::DateConverter;\n\nuse crate::types::SqlType;\n\nuse crate::types::StatBuffer;\n", "file_path": "common/clickhouse-srv/src/types/column/date.rs", "rank": 32, "score": 7.845060970660909 }, { "content": "use crate::errors::Result;\n\nuse crate::types::column::column_data::BoxColumnData;\n\nuse crate::types::column::column_data::ColumnData;\n\nuse crate::types::column::list::List;\n\nuse crate::types::column::nullable::NullableColumnData;\n\nuse crate::types::column::BoxColumnWrapper;\n\nuse crate::types::column::ColumnFrom;\n\nuse crate::types::column::ColumnWrapper;\n\nuse crate::types::column::Either;\n\nuse crate::types::column::VectorColumnData;\n\nuse crate::types::decimal::Decimal;\n\nuse crate::types::decimal::NoBits;\n\nuse crate::types::from_sql::FromSql;\n\nuse crate::types::Column;\n\nuse crate::types::ColumnType;\n\nuse crate::types::SqlType;\n\nuse crate::types::Value;\n\nuse crate::types::ValueRef;\n\n\n\npub(crate) struct DecimalColumnData {\n", "file_path": "common/clickhouse-srv/src/types/column/decimal.rs", "rank": 33, "score": 7.845038931275107 }, { "content": "use super::ColumnFrom;\n\nuse crate::binary::Encoder;\n\nuse crate::binary::ReadEx;\n\nuse crate::errors::Result;\n\nuse crate::types::column::array::ArrayColumnData;\n\nuse crate::types::column::list::List;\n\nuse crate::types::column::nullable::NullableColumnData;\n\nuse crate::types::column::ArcColumnWrapper;\n\nuse crate::types::column::ColumnWrapper;\n\nuse crate::types::column::Either;\n\nuse crate::types::column::StringPool;\n\nuse crate::types::Column;\n\nuse crate::types::ColumnType;\n\nuse crate::types::FromSql;\n\nuse crate::types::SqlType;\n\nuse crate::types::Value;\n\nuse crate::types::ValueRef;\n\n\n\npub(crate) struct StringColumnData {\n\n pool: StringPool,\n", "file_path": "common/clickhouse-srv/src/types/column/string.rs", "rank": 34, "score": 7.845014299166929 }, { "content": "use common_base::tokio::macros::support::Pin;\n\nuse common_base::tokio::macros::support::Poll;\n\nuse common_datablocks::DataBlock;\n\nuse common_datavalues::DataSchemaRef;\n\nuse common_exception::Result;\n\nuse common_management::NodeInfo;\n\nuse common_planners::SelectPlan;\n\nuse common_streams::SendableDataBlockStream;\n\nuse common_tracing::tracing;\n\nuse futures::Stream;\n\nuse futures::StreamExt;\n\n\n\nuse crate::api::CancelAction;\n\nuse crate::api::FlightAction;\n\nuse crate::interpreters::plan_scheduler::PlanScheduler;\n\nuse crate::interpreters::Interpreter;\n\nuse crate::interpreters::InterpreterPtr;\n\nuse crate::optimizers::Optimizers;\n\nuse crate::pipelines::processors::PipelineBuilder;\n\nuse crate::sessions::DatabendQueryContextRef;\n", "file_path": "query/src/interpreters/interpreter_select.rs", "rank": 35, "score": 7.838410741667426 }, { "content": "use common_base::tokio::task;\n\nuse common_datablocks::DataBlock;\n\nuse common_datavalues::DataSchemaRef;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse common_planners::Part;\n\nuse common_planners::ReadDataSourcePlan;\n\nuse common_planners::ScanPlan;\n\nuse common_planners::Statistics;\n\nuse common_planners::TableOptions;\n\nuse common_streams::ParquetStream;\n\nuse common_streams::SendableDataBlockStream;\n\nuse crossbeam::channel::bounded;\n\nuse crossbeam::channel::Receiver;\n\nuse crossbeam::channel::Sender;\n\n\n\nuse crate::catalogs::Table;\n\nuse crate::sessions::DatabendQueryContextRef;\n\n\n\npub struct ParquetTable {\n", "file_path": "query/src/datasources/table/parquet/parquet_table.rs", "rank": 36, "score": 7.830164807827503 }, { "content": "use sqlparser::ast::Query;\n\nuse sqlparser::ast::Statement;\n\nuse sqlparser::ast::TableFactor;\n\nuse sqlparser::ast::UnaryOperator;\n\n\n\nuse crate::catalogs::Catalog;\n\nuse crate::functions::ContextFunction;\n\nuse crate::sessions::DatabendQueryContextRef;\n\nuse crate::sql::sql_statement::DfCreateTable;\n\nuse crate::sql::sql_statement::DfDropDatabase;\n\nuse crate::sql::sql_statement::DfUseDatabase;\n\nuse crate::sql::DfCreateDatabase;\n\nuse crate::sql::DfDescribeTable;\n\nuse crate::sql::DfDropTable;\n\nuse crate::sql::DfExplain;\n\nuse crate::sql::DfHint;\n\nuse crate::sql::DfKillStatement;\n\nuse crate::sql::DfParser;\n\nuse crate::sql::DfShowCreateTable;\n\nuse crate::sql::DfShowDatabases;\n", "file_path": "query/src/sql/plan_parser.rs", "rank": 37, "score": 7.829304820776408 }, { "content": "\n\nuse axum::handler::get;\n\nuse axum::routing::BoxRoute;\n\nuse axum::AddExtensionLayer;\n\nuse axum::Router;\n\nuse axum_server;\n\nuse axum_server::tls::TlsLoader;\n\nuse axum_server::Handle;\n\nuse common_base::tokio;\n\nuse common_base::tokio::task::JoinHandle;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse tokio_rustls::rustls::internal::pemfile::certs;\n\nuse tokio_rustls::rustls::internal::pemfile::pkcs8_private_keys;\n\nuse tokio_rustls::rustls::AllowAnyAuthenticatedClient;\n\nuse tokio_rustls::rustls::Certificate;\n\nuse tokio_rustls::rustls::NoClientAuth;\n\nuse tokio_rustls::rustls::PrivateKey;\n\nuse tokio_rustls::rustls::RootCertStore;\n\nuse tokio_rustls::rustls::ServerConfig;\n", "file_path": "query/src/api/http_service.rs", "rank": 38, "score": 7.828456511484589 }, { "content": "use common_planners::ExpressionPlan;\n\nuse common_planners::FilterPlan;\n\nuse common_planners::HavingPlan;\n\nuse common_planners::LimitByPlan;\n\nuse common_planners::LimitPlan;\n\nuse common_planners::PlanNode;\n\nuse common_planners::ProjectionPlan;\n\nuse common_planners::ReadDataSourcePlan;\n\nuse common_planners::RemotePlan;\n\nuse common_planners::SelectPlan;\n\nuse common_planners::SortPlan;\n\nuse common_planners::StagePlan;\n\nuse common_planners::SubQueriesSetPlan;\n\nuse common_tracing::tracing;\n\n\n\nuse crate::api::FlightTicket;\n\nuse crate::pipelines::processors::Pipeline;\n\nuse crate::pipelines::transforms::AggregatorFinalTransform;\n\nuse crate::pipelines::transforms::AggregatorPartialTransform;\n\nuse crate::pipelines::transforms::CreateSetsTransform;\n", "file_path": "query/src/pipelines/processors/pipeline_builder.rs", "rank": 39, "score": 7.828456511484589 }, { "content": "use common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse common_planners::AggregatorFinalPlan;\n\nuse common_planners::AggregatorPartialPlan;\n\nuse common_planners::EmptyPlan;\n\nuse common_planners::Expression;\n\nuse common_planners::ExpressionPlan;\n\nuse common_planners::ExpressionVisitor;\n\nuse common_planners::FilterPlan;\n\nuse common_planners::PlanBuilder;\n\nuse common_planners::PlanNode;\n\nuse common_planners::PlanRewriter;\n\nuse common_planners::ProjectionPlan;\n\nuse common_planners::ReadDataSourcePlan;\n\nuse common_planners::Recursion;\n\nuse common_planners::SortPlan;\n\n\n\nuse crate::optimizers::Optimizer;\n\nuse crate::sessions::DatabendQueryContextRef;\n\n\n\npub struct ProjectionPushDownOptimizer {}\n\n\n", "file_path": "query/src/optimizers/optimizer_projection_push_down.rs", "rank": 40, "score": 7.827514161181354 }, { "content": "use std::task::Poll;\n\nuse std::time::Duration;\n\n\n\nuse common_base::tokio;\n\nuse common_base::tokio::task::JoinHandle;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse hyper::client::connect::dns::Name;\n\nuse hyper::client::HttpConnector;\n\nuse hyper::service::Service;\n\nuse hyper::Uri;\n\nuse lazy_static::lazy_static;\n\nuse tonic::transport::Certificate;\n\nuse tonic::transport::Channel;\n\nuse tonic::transport::ClientTlsConfig;\n\nuse trust_dns_resolver::TokioAsyncResolver;\n\n\n\nuse crate::common::RpcClientTlsConfig;\n\n\n\npub struct DNSResolver {\n", "file_path": "common/store-api-sdk/src/dns_resolver.rs", "rank": 41, "score": 7.825276988206924 }, { "content": "use common_planners::Statistics;\n\nuse common_sled_store::get_sled_db;\n\nuse common_sled_store::sled;\n\nuse common_sled_store::AsKeySpace;\n\nuse common_sled_store::SledTree;\n\nuse common_tracing::tracing;\n\nuse serde::Deserialize;\n\nuse serde::Serialize;\n\nuse sled::IVec;\n\n\n\nuse crate::config::RaftConfig;\n\nuse crate::sled_key_spaces::Files;\n\nuse crate::sled_key_spaces::GenericKV;\n\nuse crate::sled_key_spaces::Nodes;\n\nuse crate::sled_key_spaces::Sequences;\n\nuse crate::sled_key_spaces::StateMachineMeta;\n\nuse crate::state_machine::placement::rand_n_from_m;\n\nuse crate::state_machine::AppliedState;\n\nuse crate::state_machine::Placement;\n\nuse crate::state_machine::StateMachineMetaKey;\n", "file_path": "common/raft-store/src/state_machine/sm.rs", "rank": 42, "score": 7.825144050678376 }, { "content": "use std::fmt::Formatter;\n\n\n\npub use cluster::Node;\n\npub use cluster::Slot;\n\npub use cmd::Cmd;\n\npub use common_sled_store::KVMeta;\n\npub use common_sled_store::KVValue;\n\npub use common_sled_store::SeqValue;\n\npub use errors::ConflictSeq;\n\npub use log_entry::LogEntry;\n\npub use match_seq::MatchSeq;\n\npub use match_seq::MatchSeqExt;\n\npub use raft_txid::RaftTxId;\n\npub use raft_types::LogId;\n\npub use raft_types::LogIndex;\n\npub use raft_types::NodeId;\n\npub use raft_types::Term;\n\nuse serde::Deserialize;\n\nuse serde::Serialize;\n\n\n", "file_path": "common/metatypes/src/lib.rs", "rank": 43, "score": 7.824018598486283 }, { "content": "use std::sync::Arc;\n\n\n\nuse chrono::prelude::*;\n\nuse chrono_tz::Tz;\n\nuse uuid::Uuid;\n\n\n\nuse crate::types::column::datetime64::to_datetime;\n\nuse crate::types::column::Either;\n\nuse crate::types::decimal::Decimal;\n\nuse crate::types::decimal::NoBits;\n\nuse crate::types::DateConverter;\n\nuse crate::types::DateTimeType;\n\nuse crate::types::Enum16;\n\nuse crate::types::Enum8;\n\nuse crate::types::HasSqlType;\n\nuse crate::types::SqlType;\n\n\n\npub(crate) type AppDateTime = DateTime<Tz>;\n\npub(crate) type AppDate = Date<Tz>;\n\n\n", "file_path": "common/clickhouse-srv/src/types/value.rs", "rank": 44, "score": 7.822402501328304 }, { "content": "use crate::errors::FromSqlError;\n\nuse crate::errors::Result;\n\nuse crate::types::block::ColumnIdx;\n\nuse crate::types::column::ArcColumnWrapper;\n\nuse crate::types::column::ColumnData;\n\nuse crate::types::column::Either;\n\nuse crate::types::Column;\n\nuse crate::types::ColumnType;\n\nuse crate::types::SqlType;\n\nuse crate::types::Value;\n\nuse crate::Block;\n\n\n", "file_path": "common/clickhouse-srv/src/types/block/builder.rs", "rank": 45, "score": 7.821848332197867 }, { "content": "use common_exception::Result;\n\nuse common_io::prelude::*;\n\nuse common_planners::PlanNode;\n\nuse metrics::histogram;\n\nuse msql_srv::ErrorKind;\n\nuse msql_srv::InitWriter;\n\nuse msql_srv::MysqlShim;\n\nuse msql_srv::ParamParser;\n\nuse msql_srv::QueryResultWriter;\n\nuse msql_srv::StatementMetaWriter;\n\nuse rand::RngCore;\n\nuse tokio_stream::StreamExt;\n\n\n\nuse crate::interpreters::InterpreterFactory;\n\nuse crate::servers::mysql::writers::DFInitResultWriter;\n\nuse crate::servers::mysql::writers::DFQueryResultWriter;\n\nuse crate::servers::server::mock::get_mock_user;\n\nuse crate::sessions::DatabendQueryContextRef;\n\nuse crate::sessions::SessionRef;\n\nuse crate::sql::PlanParser;\n\n\n", "file_path": "query/src/servers/mysql/mysql_interactive_worker.rs", "rank": 46, "score": 7.821834391894703 }, { "content": "pub use plan_limit::LimitPlan;\n\npub use plan_limit_by::LimitByPlan;\n\npub use plan_node::PlanNode;\n\npub use plan_partition::Part;\n\npub use plan_partition::Partitions;\n\npub use plan_projection::ProjectionPlan;\n\npub use plan_read_datasource::ReadDataSourcePlan;\n\npub use plan_remote::RemotePlan;\n\npub use plan_rewriter::PlanRewriter;\n\npub use plan_rewriter::RewriteHelper;\n\npub use plan_scan::ScanPlan;\n\npub use plan_select::SelectPlan;\n\npub use plan_setting::SettingPlan;\n\npub use plan_setting::VarValue;\n\npub use plan_show_table_create::ShowCreateTablePlan;\n\npub use plan_sort::SortPlan;\n\npub use plan_stage::StageKind;\n\npub use plan_stage::StagePlan;\n\npub use plan_statistics::Statistics;\n\npub use plan_subqueries_set::SubQueriesSetPlan;\n\npub use plan_table_create::CreateTablePlan;\n\npub use plan_table_create::TableOptions;\n\npub use plan_table_drop::DropTablePlan;\n\npub use plan_truncate_table::TruncateTablePlan;\n\npub use plan_use_database::UseDatabasePlan;\n\npub use plan_visitor::PlanVisitor;\n", "file_path": "common/planners/src/lib.rs", "rank": 47, "score": 7.82042542838054 }, { "content": "use combine::optional;\n\nuse combine::parser::char::digit;\n\nuse combine::parser::char::spaces;\n\nuse combine::parser::char::string;\n\nuse combine::sep_by1;\n\nuse combine::token;\n\nuse combine::Parser;\n\n\n\nuse crate::binary::ReadEx;\n\nuse crate::errors::Result;\n\nuse crate::types::column::array::ArrayColumnData;\n\nuse crate::types::column::column_data::ColumnData;\n\nuse crate::types::column::date::DateColumnData;\n\nuse crate::types::column::datetime64::DateTime64ColumnData;\n\nuse crate::types::column::decimal::DecimalColumnData;\n\nuse crate::types::column::enums::Enum16ColumnData;\n\nuse crate::types::column::enums::Enum8ColumnData;\n\nuse crate::types::column::fixed_string::FixedStringColumnData;\n\nuse crate::types::column::ip::IpColumnData;\n\nuse crate::types::column::ip::Ipv4;\n", "file_path": "common/clickhouse-srv/src/types/column/factory.rs", "rank": 48, "score": 7.818527531579711 }, { "content": "use common_arrow::arrow::datatypes::Schema as ArrowSchema;\n\nuse common_arrow::arrow_flight::FlightData;\n\nuse common_base::Runtime;\n\nuse common_cache::Cache;\n\nuse common_cache::LruCache;\n\nuse common_datavalues::DataSchema;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse common_infallible::Mutex;\n\nuse common_metatypes::MetaId;\n\nuse common_metatypes::MetaVersion;\n\nuse common_planners::CreateDatabasePlan;\n\nuse common_planners::CreateTablePlan;\n\nuse common_planners::DropDatabasePlan;\n\nuse common_planners::DropTablePlan;\n\n\n\nuse crate::catalogs::meta_backend::DatabaseInfo;\n\nuse crate::catalogs::meta_backend::MetaBackend;\n\nuse crate::catalogs::meta_backend::TableInfo;\n\nuse crate::common::StoreApiProvider;\n\n\n", "file_path": "query/src/catalogs/impls/meta_backends/remote_meta_backend.rs", "rank": 49, "score": 7.81522346618555 }, { "content": "use common_exception::Result;\n\nuse common_functions::aggregates::AggregateFunctionFactory;\n\nuse common_infallible::Mutex;\n\nuse common_planners::expand_aggregate_arg_exprs;\n\nuse common_planners::expand_wildcard;\n\nuse common_planners::expr_as_column_expr;\n\nuse common_planners::extract_aliases;\n\nuse common_planners::find_aggregate_exprs;\n\nuse common_planners::find_columns_not_satisfy_exprs;\n\nuse common_planners::rebase_expr;\n\nuse common_planners::rebase_expr_from_input;\n\nuse common_planners::resolve_aliases_to_exprs;\n\nuse common_planners::sort_to_inner_expr;\n\nuse common_planners::unwrap_alias_exprs;\n\nuse common_planners::CreateDatabasePlan;\n\nuse common_planners::CreateTablePlan;\n\nuse common_planners::DescribeTablePlan;\n\nuse common_planners::DropDatabasePlan;\n\nuse common_planners::DropTablePlan;\n\nuse common_planners::ExplainPlan;\n", "file_path": "query/src/sql/plan_parser.rs", "rank": 50, "score": 7.81359215711737 }, { "content": "\n\nuse chrono_tz::Tz;\n\n\n\nuse self::chunk::ChunkColumnData;\n\npub(crate) use self::column_data::ColumnData;\n\npub use self::concat::ConcatColumnData;\n\npub use self::numeric::VectorColumnData;\n\npub(crate) use self::string_pool::StringPool;\n\nuse crate::binary::Encoder;\n\nuse crate::binary::ReadEx;\n\nuse crate::errors::Error;\n\nuse crate::errors::FromSqlError;\n\nuse crate::errors::Result;\n\nuse crate::types::column::column_data::ArcColumnData;\n\nuse crate::types::column::decimal::DecimalAdapter;\n\nuse crate::types::column::decimal::NullableDecimalAdapter;\n\nuse crate::types::column::enums::Enum16Adapter;\n\nuse crate::types::column::enums::Enum8Adapter;\n\nuse crate::types::column::enums::NullableEnum16Adapter;\n\nuse crate::types::column::enums::NullableEnum8Adapter;\n", "file_path": "common/clickhouse-srv/src/types/column/mod.rs", "rank": 51, "score": 7.813550617658888 }, { "content": "\n\nuse super::column_data::BoxColumnData;\n\nuse super::column_data::ColumnData;\n\nuse super::list::List;\n\nuse super::ColumnFrom;\n\nuse crate::binary::Encoder;\n\nuse crate::binary::ReadEx;\n\nuse crate::errors::Result;\n\nuse crate::types::column::array::ArrayColumnData;\n\nuse crate::types::column::nullable::NullableColumnData;\n\nuse crate::types::column::ArcColumnWrapper;\n\nuse crate::types::column::ColumnWrapper;\n\nuse crate::types::HasSqlType;\n\nuse crate::types::SqlType;\n\nuse crate::types::StatBuffer;\n\nuse crate::types::Value;\n\nuse crate::types::ValueRef;\n\n\n\npub struct VectorColumnData<T>\n\nwhere T: StatBuffer\n", "file_path": "common/clickhouse-srv/src/types/column/numeric.rs", "rank": 52, "score": 7.813498693956884 }, { "content": "use crate::errors::Error;\n\nuse crate::errors::FromSqlError;\n\nuse crate::errors::Result;\n\nuse crate::types::column::datetime64::to_datetime;\n\nuse crate::types::column::Either;\n\nuse crate::types::value::decode_ipv4;\n\nuse crate::types::value::decode_ipv6;\n\nuse crate::types::Decimal;\n\nuse crate::types::Enum16;\n\nuse crate::types::Enum8;\n\nuse crate::types::SqlType;\n\nuse crate::types::ValueRef;\n\n\n\npub type FromSqlResult<T> = Result<T>;\n\n\n", "file_path": "common/clickhouse-srv/src/types/from_sql.rs", "rank": 53, "score": 7.813342926991397 }, { "content": "\n\nuse common_clickhouse_srv::connection::Connection;\n\nuse common_clickhouse_srv::errors::Result;\n\nuse common_clickhouse_srv::types::Block;\n\nuse common_clickhouse_srv::types::Progress;\n\nuse common_clickhouse_srv::CHContext;\n\nuse common_clickhouse_srv::ClickHouseServer;\n\nuse futures::task::Context;\n\nuse futures::task::Poll;\n\nuse futures::Stream;\n\nuse futures::StreamExt;\n\nuse log::debug;\n\nuse log::info;\n\nuse tokio::net::TcpListener;\n\nuse tokio::sync::mpsc;\n\nuse tokio_stream::wrappers::ReceiverStream;\n\n\n\nextern crate common_clickhouse_srv;\n\n\n\n#[tokio::main]\n", "file_path": "common/clickhouse-srv/examples/simple.rs", "rank": 54, "score": 7.811377348744396 }, { "content": "mod group_hash_test;\n\n#[cfg(test)]\n\nmod if_test;\n\n#[cfg(test)]\n\nmod scatter_test;\n\n#[cfg(test)]\n\nmod take_random_test;\n\n#[cfg(test)]\n\nmod take_test;\n\n\n\npub use agg::*;\n\npub use apply::*;\n\npub use boolean::*;\n\npub use cast::*;\n\npub use contain::*;\n\npub use fill::*;\n\npub use group_hash::GroupHash;\n\npub use r#if::*;\n\npub use scatter::*;\n\npub use take::*;\n\npub use take_random::*;\n\npub use take_single::*;\n\npub use to_values::*;\n\npub use vec_hash::*;\n", "file_path": "common/datavalues/src/arrays/ops/mod.rs", "rank": 55, "score": 7.8089543642433465 }, { "content": "use common_exception::Result;\n\nuse common_planners::InsertIntoPlan;\n\nuse common_planners::Partitions;\n\nuse common_planners::ReadDataSourcePlan;\n\nuse common_planners::ScanPlan;\n\nuse common_planners::Statistics;\n\nuse common_planners::TableOptions;\n\nuse common_planners::TruncateTablePlan;\n\nuse common_streams::ProgressStream;\n\nuse common_streams::SendableDataBlockStream;\n\nuse tokio_stream::wrappers::ReceiverStream;\n\nuse uuid::Uuid;\n\n\n\nuse crate::catalogs::Table;\n\nuse crate::datasources::dal::DataAccessor;\n\n//use crate::datasources::table::fuse::parse_storage_scheme;\n\nuse crate::datasources::table::fuse::project_col_idx;\n\nuse crate::datasources::table::fuse::range_filter;\n\nuse crate::datasources::table::fuse::read_part;\n\nuse crate::datasources::table::fuse::read_table_snapshot;\n", "file_path": "query/src/datasources/table/fuse/table.rs", "rank": 56, "score": 7.808623705998275 }, { "content": "use async_raft::async_trait::async_trait;\n\nuse async_raft::config::Config;\n\nuse async_raft::raft::ClientWriteRequest;\n\nuse async_raft::raft::Entry;\n\nuse async_raft::raft::EntryPayload;\n\nuse async_raft::raft::MembershipConfig;\n\nuse async_raft::storage::CurrentSnapshotData;\n\nuse async_raft::storage::HardState;\n\nuse async_raft::storage::InitialState;\n\nuse async_raft::ClientWriteError;\n\nuse async_raft::Raft;\n\nuse async_raft::RaftMetrics;\n\nuse async_raft::RaftStorage;\n\nuse async_raft::SnapshotMeta;\n\nuse async_raft::SnapshotPolicy;\n\nuse common_base::tokio;\n\nuse common_base::tokio::sync::watch;\n\nuse common_base::tokio::sync::Mutex;\n\nuse common_base::tokio::sync::RwLock;\n\nuse common_base::tokio::sync::RwLockWriteGuard;\n", "file_path": "metasrv/src/meta_service/raftmeta.rs", "rank": 57, "score": 7.807324736421997 }, { "content": "use tokio::io::AsyncReadExt;\n\nuse tokio::io::AsyncWriteExt;\n\nuse tokio::io::BufWriter;\n\nuse tokio::net::TcpStream;\n\n\n\nuse crate::binary::Encoder;\n\nuse crate::binary::Parser;\n\nuse crate::errors::Error;\n\nuse crate::errors::Result;\n\nuse crate::protocols::ExceptionResponse;\n\nuse crate::protocols::Packet;\n\nuse crate::protocols::SERVER_END_OF_STREAM;\n\nuse crate::types::Block;\n\nuse crate::types::Progress;\n\nuse crate::CHContext;\n\nuse crate::ClickHouseSession;\n\n\n\n/// Send and receive `Packet` values from a remote peer.\n\n///\n\n/// When implementing networking protocols, a message on that protocol is\n", "file_path": "common/clickhouse-srv/src/connection.rs", "rank": 58, "score": 7.807200319204874 }, { "content": "\n\nuse bytes::BytesMut;\n\nuse common_datavalues::prelude::*;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse common_io::prelude::*;\n\nuse num::traits::AsPrimitive;\n\n\n\nuse super::AggregateFunctionRef;\n\nuse super::StateAddr;\n\nuse crate::aggregates::assert_unary_params;\n\nuse crate::aggregates::assert_variadic_arguments;\n\nuse crate::aggregates::AggregateFunction;\n\nuse crate::dispatch_unsigned_numeric_types;\n\n\n", "file_path": "common/functions/src/aggregates/aggregate_window_funnel.rs", "rank": 59, "score": 7.802903474196343 }, { "content": "use common_base::tokio::sync::oneshot::Sender;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::ToErrorCode;\n\nuse common_tracing::tracing;\n\nuse common_tracing::tracing::Instrument;\n\nuse metasrv::meta_service::MetaNode;\n\nuse tonic::transport;\n\nuse tonic::transport::Identity;\n\nuse tonic::transport::Server;\n\nuse transport::ServerTlsConfig;\n\n\n\nuse crate::api::rpc::StoreFlightImpl;\n\nuse crate::configs::Config;\n\nuse crate::dfs::Dfs;\n\nuse crate::localfs::LocalFS;\n\n\n\npub struct StoreServer {\n\n conf: Config,\n\n}\n\n\n", "file_path": "store/src/api/rpc_service.rs", "rank": 60, "score": 7.800014598779455 }, { "content": "pub use crate::columns::DataColumn;\n\npub use crate::columns::DataColumnCommon;\n\npub use crate::columns::DataColumnWithField;\n\npub use crate::columns::DataColumnsWithField;\n\npub use crate::data_array_filter::*;\n\npub use crate::data_value::DFTryFrom;\n\n// series\n\npub use crate::series::IntoSeries;\n\npub use crate::series::Series;\n\npub use crate::series::SeriesFrom;\n\npub use crate::series::SeriesTrait;\n\npub use crate::types::*;\n\npub use crate::utils::*;\n\npub use crate::DFHasher;\n\n// common structs\n\npub use crate::DataField;\n\npub use crate::DataGroupValue;\n\npub use crate::DataSchema;\n\npub use crate::DataSchemaRef;\n\npub use crate::DataSchemaRefExt;\n", "file_path": "common/datavalues/src/prelude.rs", "rank": 61, "score": 7.799636804698707 }, { "content": "mod comparison;\n\nmod ops;\n\nmod trusted_len;\n\nmod upstream_traits;\n\n\n\nmod boolean;\n\nmod list;\n\nmod null;\n\nmod primitive;\n\nmod string;\n\nmod r#struct;\n\n\n\npub use arithmetic::*;\n\npub use boolean::*;\n\npub use builder::*;\n\npub use comparison::*;\n\npub use list::*;\n\npub use null::*;\n\npub use ops::*;\n\npub use primitive::*;\n\npub use r#struct::*;\n\npub use string::*;\n\npub use trusted_len::*;\n\npub use upstream_traits::*;\n", "file_path": "common/datavalues/src/arrays/mod.rs", "rank": 62, "score": 7.797694201974938 }, { "content": "use common_arrow::arrow_flight::Action;\n\nuse common_arrow::arrow_flight::ActionType;\n\nuse common_arrow::arrow_flight::BasicAuth;\n\nuse common_arrow::arrow_flight::Criteria;\n\nuse common_arrow::arrow_flight::Empty;\n\nuse common_arrow::arrow_flight::FlightData;\n\nuse common_arrow::arrow_flight::FlightDescriptor;\n\nuse common_arrow::arrow_flight::FlightInfo;\n\nuse common_arrow::arrow_flight::HandshakeRequest;\n\nuse common_arrow::arrow_flight::HandshakeResponse;\n\nuse common_arrow::arrow_flight::PutResult;\n\nuse common_arrow::arrow_flight::SchemaResult;\n\nuse common_arrow::arrow_flight::Ticket;\n\nuse common_store_api_sdk::FlightClaim;\n\nuse common_store_api_sdk::FlightToken;\n\nuse common_store_api_sdk::StoreDoAction;\n\nuse common_tracing::tracing;\n\nuse futures::Stream;\n\nuse futures::StreamExt;\n\nuse log::info;\n", "file_path": "metasrv/src/api/rpc/flight_service.rs", "rank": 63, "score": 7.791700084731321 }, { "content": "\n\nuse lazy_static::lazy_static;\n\nuse opentelemetry::global;\n\nuse opentelemetry::sdk::propagation::TraceContextPropagator;\n\nuse tracing::Subscriber;\n\nuse tracing_appender::non_blocking::WorkerGuard;\n\nuse tracing_appender::rolling::RollingFileAppender;\n\nuse tracing_appender::rolling::Rotation;\n\nuse tracing_bunyan_formatter::BunyanFormattingLayer;\n\nuse tracing_bunyan_formatter::JsonStorageLayer;\n\nuse tracing_subscriber::fmt;\n\nuse tracing_subscriber::fmt::Layer;\n\nuse tracing_subscriber::prelude::*;\n\nuse tracing_subscriber::registry::Registry;\n\nuse tracing_subscriber::EnvFilter;\n\n\n\nuse crate::tracing::subscriber::DefaultGuard;\n\n\n\n/// Write logs to stdout.\n", "file_path": "common/tracing/src/logging.rs", "rank": 64, "score": 7.790536777912607 }, { "content": "use sqlparser::ast::Ident;\n\nuse sqlparser::ast::JoinConstraint;\n\nuse sqlparser::ast::JoinOperator as SqlparserJoinOperator;\n\nuse sqlparser::ast::ObjectName;\n\nuse sqlparser::ast::Query as SqlparserQuery;\n\nuse sqlparser::ast::Select as SqlparserSelect;\n\nuse sqlparser::ast::SelectItem;\n\nuse sqlparser::ast::SetExpr as SqlparserSetExpr;\n\nuse sqlparser::ast::SetOperator as SqlparserSetOperator;\n\nuse sqlparser::ast::Statement as SqlparserStatement;\n\nuse sqlparser::ast::TableAlias as SqlparserTableAlias;\n\nuse sqlparser::ast::TableFactor;\n\nuse sqlparser::ast::TableWithJoins;\n\nuse sqlparser::ast::UnaryOperator as SqlparserUnaryOperator;\n\nuse sqlparser::ast::Value;\n\n\n\nuse super::AstTransformer;\n\nuse crate::sql::parser::ast::Statement::Explain;\n\nuse crate::sql::parser::ast::*;\n\n\n", "file_path": "query/src/sql/parser/transformer/transform_sqlparser.rs", "rank": 65, "score": 7.78923392105618 }, { "content": "use common_arrow::arrow::record_batch::RecordBatch;\n\nuse common_datablocks::DataBlock;\n\nuse common_datavalues::columns::DataColumn;\n\nuse common_datavalues::DataType;\n\nuse common_dfs_api_vo::BlockStream;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse futures::StreamExt;\n\nuse uuid::Uuid;\n\n\n\nuse crate::datasources::dal::DataAccessor;\n\nuse crate::datasources::table::fuse::block_location;\n\nuse crate::datasources::table::fuse::column_stats_reduce;\n\nuse crate::datasources::table::fuse::BlockLocation;\n\nuse crate::datasources::table::fuse::BlockMeta;\n\nuse crate::datasources::table::fuse::ColStats;\n\nuse crate::datasources::table::fuse::ColumnId;\n\nuse crate::datasources::table::fuse::FuseTable;\n\nuse crate::datasources::table::fuse::SegmentInfo;\n\nuse crate::datasources::table::fuse::Stats;\n", "file_path": "query/src/datasources/table/fuse/io/block_appender.rs", "rank": 66, "score": 7.785612066637417 }, { "content": "use common_base::tokio::task::JoinHandle;\n\nuse common_exception::prelude::ErrorCode;\n\nuse common_exception::prelude::ToErrorCode;\n\nuse common_metatypes::Cmd;\n\nuse common_metatypes::Database;\n\nuse common_metatypes::KVValue;\n\nuse common_metatypes::LogEntry;\n\nuse common_metatypes::Node;\n\nuse common_metatypes::NodeId;\n\nuse common_metatypes::SeqValue;\n\nuse common_metatypes::Table;\n\nuse common_raft_store::config::RaftConfig;\n\nuse common_raft_store::log::RaftLog;\n\nuse common_raft_store::state::RaftState;\n\nuse common_raft_store::state_machine::AppliedState;\n\nuse common_raft_store::state_machine::SerializableSnapshot;\n\nuse common_raft_store::state_machine::Snapshot;\n\nuse common_raft_store::state_machine::StateMachine;\n\nuse common_sled_store::get_sled_db;\n\nuse common_store_api_sdk::storage_api_impl::AppendResult;\n", "file_path": "metasrv/src/meta_service/raftmeta.rs", "rank": 67, "score": 7.785467720868187 }, { "content": "use common_arrow::arrow_flight::utils::flight_data_from_arrow_batch;\n\nuse common_arrow::arrow_flight::FlightData;\n\nuse common_base::tokio::sync::mpsc::Sender;\n\nuse common_exception::ErrorCode;\n\nuse common_planners::PlanNode;\n\nuse common_store_api_sdk::storage_api_impl::AppendResult;\n\nuse common_store_api_sdk::storage_api_impl::ReadAction;\n\nuse common_store_api_sdk::RequestFor;\n\nuse common_store_api_sdk::StoreDoAction;\n\nuse futures::Stream;\n\nuse metasrv::meta_service::MetaNode;\n\nuse serde::Serialize;\n\nuse tokio_stream::StreamExt;\n\nuse tonic::Status;\n\nuse tonic::Streaming;\n\n\n\nuse crate::data_part::appender::Appender;\n\nuse crate::fs::FileSystem;\n\n\n", "file_path": "store/src/executor/action_handler.rs", "rank": 68, "score": 7.782289828476766 }, { "content": "use common_base::tokio::task::JoinHandle;\n\nuse common_base::Runtime;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse futures::future::AbortHandle;\n\nuse futures::future::AbortRegistration;\n\nuse futures::future::Abortable;\n\nuse futures::StreamExt;\n\nuse msql_srv::*;\n\nuse tokio_stream::wrappers::TcpListenerStream;\n\n\n\nuse crate::servers::mysql::mysql_session::MySQLConnection;\n\nuse crate::servers::mysql::reject_connection::RejectConnection;\n\nuse crate::servers::server::ListeningStream;\n\nuse crate::servers::server::Server;\n\nuse crate::sessions::SessionManager;\n\nuse crate::sessions::SessionManagerRef;\n\n\n\npub struct MySQLHandler {\n\n sessions: SessionManagerRef,\n", "file_path": "query/src/servers/mysql/mysql_handler.rs", "rank": 69, "score": 7.782262558890881 }, { "content": "use common_datavalues::prelude::*;\n\nuse common_datavalues::DFTryFrom;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse common_io::prelude::*;\n\nuse num::traits::AsPrimitive;\n\n\n\nuse super::AggregateFunctionRef;\n\nuse super::StateAddr;\n\nuse crate::aggregates::aggregator_common::assert_unary_arguments;\n\nuse crate::aggregates::AggregateFunction;\n\nuse crate::with_match_primitive_type;\n\n\n", "file_path": "common/functions/src/aggregates/aggregate_sum.rs", "rank": 70, "score": 7.7820518458074055 }, { "content": "use common_arrow::arrow_flight::ActionType;\n\nuse common_arrow::arrow_flight::Criteria;\n\nuse common_arrow::arrow_flight::Empty;\n\nuse common_arrow::arrow_flight::FlightData;\n\nuse common_arrow::arrow_flight::FlightDescriptor;\n\nuse common_arrow::arrow_flight::FlightInfo;\n\nuse common_arrow::arrow_flight::HandshakeRequest;\n\nuse common_arrow::arrow_flight::HandshakeResponse;\n\nuse common_arrow::arrow_flight::PutResult;\n\nuse common_arrow::arrow_flight::Result as FlightResult;\n\nuse common_arrow::arrow_flight::SchemaResult;\n\nuse common_arrow::arrow_flight::Ticket;\n\nuse tokio_stream::Stream;\n\nuse tonic::Request;\n\nuse tonic::Response as RawResponse;\n\nuse tonic::Status;\n\nuse tonic::Streaming;\n\n\n\nuse crate::api::rpc::flight_actions::FlightAction;\n\nuse crate::api::rpc::flight_dispatcher::DatabendQueryFlightDispatcher;\n", "file_path": "query/src/api/rpc/flight_service.rs", "rank": 71, "score": 7.779062202914619 }, { "content": "pub use plan_expression_common::rebase_expr_from_input;\n\npub use plan_expression_common::resolve_aliases_to_exprs;\n\npub use plan_expression_common::sort_to_inner_expr;\n\npub use plan_expression_common::unwrap_alias_exprs;\n\npub use plan_expression_function::add;\n\npub use plan_expression_function::avg;\n\npub use plan_expression_function::modular;\n\npub use plan_expression_function::not;\n\npub use plan_expression_function::sum;\n\npub use plan_expression_literal::lit;\n\npub use plan_expression_rewriter::ExprRewriter;\n\npub use plan_expression_sort::sort;\n\npub use plan_expression_validator::validate_expression;\n\npub use plan_expression_visitor::ExpressionVisitor;\n\npub use plan_expression_visitor::Recursion;\n\npub use plan_extras::Extras;\n\npub use plan_filter::FilterPlan;\n\npub use plan_having::HavingPlan;\n\npub use plan_insert_into::InsertIntoPlan;\n\npub use plan_kill::KillPlan;\n", "file_path": "common/planners/src/lib.rs", "rank": 72, "score": 7.776137846774479 }, { "content": "use std::ops::Sub;\n\n\n\nuse common_arrow::arrow::array::Array;\n\nuse common_arrow::arrow::array::PrimitiveArray;\n\nuse common_arrow::arrow::array::UInt64Array;\n\nuse common_arrow::arrow::compute::arithmetics::basic;\n\nuse common_arrow::arrow::compute::arithmetics::negate;\n\nuse common_arrow::arrow::compute::arity::unary;\n\nuse common_arrow::arrow::datatypes::DataType as ArrowDataType;\n\nuse common_arrow::arrow::error::ArrowError;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse num::cast::AsPrimitive;\n\nuse num::NumCast;\n\nuse num::ToPrimitive;\n\nuse strength_reduce::StrengthReducedU64;\n\n\n\nuse crate::arrays::ops::*;\n\nuse crate::prelude::*;\n\n\n\n/// TODO: sundy\n\n/// check division by zero in rem & div ops\n", "file_path": "common/datavalues/src/arrays/arithmetic.rs", "rank": 73, "score": 7.771324777419694 }, { "content": "use crate::KillPlan;\n\nuse crate::LimitByPlan;\n\nuse crate::LimitPlan;\n\nuse crate::PlanBuilder;\n\nuse crate::PlanNode;\n\nuse crate::ProjectionPlan;\n\nuse crate::ReadDataSourcePlan;\n\nuse crate::RemotePlan;\n\nuse crate::ScanPlan;\n\nuse crate::SelectPlan;\n\nuse crate::SettingPlan;\n\nuse crate::ShowCreateTablePlan;\n\nuse crate::SortPlan;\n\nuse crate::StagePlan;\n\nuse crate::TruncateTablePlan;\n\nuse crate::UseDatabasePlan;\n\n\n\n/// `PlanRewriter` is a visitor that can help to rewrite `PlanNode`\n\n/// By default, a `PlanRewriter` will traverse the plan tree in pre-order and return rewritten plan tree.\n\n/// Every `rewrite_xxx` method should return a new `PlanNode`(in default implementation it will return a clone of given plan node)\n", "file_path": "common/planners/src/plan_rewriter.rs", "rank": 74, "score": 7.771324777419694 }, { "content": "use common_planners::AggregatorFinalPlan;\n\nuse common_planners::AggregatorPartialPlan;\n\nuse common_planners::BroadcastPlan;\n\nuse common_planners::Expression;\n\nuse common_planners::LimitByPlan;\n\nuse common_planners::LimitPlan;\n\nuse common_planners::PlanBuilder;\n\nuse common_planners::PlanNode;\n\nuse common_planners::PlanRewriter;\n\nuse common_planners::ReadDataSourcePlan;\n\nuse common_planners::SortPlan;\n\nuse common_planners::StageKind;\n\nuse common_planners::StagePlan;\n\n\n\nuse crate::optimizers::Optimizer;\n\nuse crate::sessions::DatabendQueryContext;\n\nuse crate::sessions::DatabendQueryContextRef;\n\n\n\npub struct ScattersOptimizer {\n\n ctx: DatabendQueryContextRef,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n", "file_path": "query/src/optimizers/optimizer_scatters.rs", "rank": 75, "score": 7.7706132995732125 }, { "content": "use common_base::Runtime;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse futures::future::AbortHandle;\n\nuse futures::future::AbortRegistration;\n\nuse futures::stream::Abortable;\n\nuse futures::Future;\n\nuse futures::StreamExt;\n\nuse tokio_stream::wrappers::TcpListenerStream;\n\n\n\nuse crate::servers::clickhouse::clickhouse_session::ClickHouseConnection;\n\nuse crate::servers::clickhouse::reject_connection::RejectCHConnection;\n\nuse crate::servers::server::ListeningStream;\n\nuse crate::servers::server::Server;\n\nuse crate::sessions::SessionManager;\n\nuse crate::sessions::SessionManagerRef;\n\n\n\npub struct ClickHouseHandler {\n\n sessions: SessionManagerRef,\n\n\n", "file_path": "query/src/servers/clickhouse/clickhouse_handler.rs", "rank": 76, "score": 7.765674414138493 }, { "content": "use crate::CreateDatabasePlan;\n\nuse crate::CreateTablePlan;\n\nuse crate::DropDatabasePlan;\n\nuse crate::DropTablePlan;\n\nuse crate::Expression;\n\nuse crate::ExpressionPlan;\n\nuse crate::LimitPlan;\n\nuse crate::PlanNode;\n\nuse crate::ProjectionPlan;\n\nuse crate::ReadDataSourcePlan;\n\nuse crate::SortPlan;\n\nuse crate::StagePlan;\n\nuse crate::SubQueriesSetPlan;\n\n\n\npub struct PlanNodeIndentFormatDisplay<'a> {\n\n indent: usize,\n\n node: &'a PlanNode,\n\n printed_indent: bool,\n\n}\n\n\n", "file_path": "common/planners/src/plan_display_indent.rs", "rank": 77, "score": 7.763057697124784 }, { "content": "use common_base::tokio::sync::*;\n\nuse common_datablocks::DataBlock;\n\nuse common_datavalues::DataSchemaRef;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse common_exception::ToErrorCode;\n\nuse common_infallible::RwLock;\n\nuse tokio_stream::StreamExt;\n\n\n\nuse crate::api::rpc::flight_scatter::FlightScatter;\n\nuse crate::api::rpc::flight_scatter_broadcast::BroadcastFlightScatter;\n\nuse crate::api::rpc::flight_scatter_hash::HashFlightScatter;\n\nuse crate::api::rpc::flight_tickets::StreamTicket;\n\nuse crate::api::FlightAction;\n\nuse crate::pipelines::processors::PipelineBuilder;\n\nuse crate::sessions::DatabendQueryContext;\n\nuse crate::sessions::SessionRef;\n\n\n", "file_path": "query/src/api/rpc/flight_dispatcher.rs", "rank": 78, "score": 7.762866482669966 }, { "content": "use common_infallible::RwLock;\n\nuse common_metatypes::MetaId;\n\nuse common_metatypes::MetaVersion;\n\nuse common_planners::CreateDatabasePlan;\n\nuse common_planners::DropDatabasePlan;\n\n\n\nuse crate::catalogs::catalog::Catalog;\n\nuse crate::catalogs::impls::meta_backends::EmbeddedMetaBackend;\n\nuse crate::catalogs::impls::meta_backends::RemoteMeteStoreClient;\n\nuse crate::catalogs::meta_backend::DatabaseInfo;\n\nuse crate::catalogs::meta_backend::MetaBackend;\n\nuse crate::catalogs::Database;\n\nuse crate::catalogs::TableFunctionMeta;\n\nuse crate::catalogs::TableMeta;\n\nuse crate::common::StoreApiProvider;\n\nuse crate::configs::Config;\n\nuse crate::datasources::database::prelude::register_prelude_db_engines;\n\nuse crate::datasources::database_engine::DatabaseEngine;\n\nuse crate::datasources::database_engine_registry::DatabaseEngineRegistry;\n\nuse crate::datasources::database_engine_registry::EngineDescription;\n", "file_path": "query/src/catalogs/impls/catalog/metastore_catalog.rs", "rank": 79, "score": 7.762735657154542 }, { "content": "use crate::errors::FromSqlError;\n\nuse crate::errors::Result;\n\nuse crate::protocols;\n\nuse crate::types::column::ArcColumnWrapper;\n\nuse crate::types::column::Column;\n\nuse crate::types::column::ColumnFrom;\n\nuse crate::types::column::{self};\n\nuse crate::types::ColumnType;\n\nuse crate::types::Complex;\n\nuse crate::types::FromSql;\n\nuse crate::types::Simple;\n\nuse crate::types::SqlType;\n\n\n\nmod block_info;\n\nmod builder;\n\nmod chunk_iterator;\n\nmod compressed;\n\nmod row;\n\n\n\nconst INSERT_BLOCK_SIZE: usize = 1_048_576;\n\n\n\nconst DEFAULT_CAPACITY: usize = 100;\n\n\n", "file_path": "common/clickhouse-srv/src/types/block/mod.rs", "rank": 80, "score": 7.761423144307057 }, { "content": "pub use function_column::ColumnFunction;\n\npub use function_factory::FactoryFuncRef;\n\npub use function_factory::FunctionFactory;\n\npub use function_literal::LiteralFunction;\n\npub use hashes::*;\n\npub use logics::*;\n\npub use nullables::*;\n\npub use strings::*;\n\npub use udfs::*;\n", "file_path": "common/functions/src/scalars/mod.rs", "rank": 81, "score": 7.76121821554247 }, { "content": "use crate::sql::DfCreateTable;\n\nuse crate::sql::DfDescribeTable;\n\nuse crate::sql::DfDropDatabase;\n\nuse crate::sql::DfDropTable;\n\nuse crate::sql::DfExplain;\n\nuse crate::sql::DfHint;\n\nuse crate::sql::DfKillStatement;\n\nuse crate::sql::DfShowCreateTable;\n\nuse crate::sql::DfShowDatabases;\n\nuse crate::sql::DfShowProcessList;\n\nuse crate::sql::DfShowSettings;\n\nuse crate::sql::DfShowTables;\n\nuse crate::sql::DfStatement;\n\nuse crate::sql::DfTruncateTable;\n\nuse crate::sql::DfUseDatabase;\n\n\n\n// Use `Parser::expected` instead, if possible\n\nmacro_rules! parser_err {\n\n ($MSG:expr) => {\n\n Err(ParserError::ParserError($MSG.to_string().into()))\n", "file_path": "query/src/sql/sql_parser.rs", "rank": 82, "score": 7.760390581851515 }, { "content": "use common_datavalues::DataField;\n\nuse common_datavalues::DataSchemaRef;\n\nuse common_datavalues::DataSchemaRefExt;\n\nuse common_datavalues::DataType;\n\nuse common_exception::Result;\n\nuse common_planners::Part;\n\nuse common_planners::ReadDataSourcePlan;\n\nuse common_planners::ScanPlan;\n\nuse common_planners::Statistics;\n\nuse common_streams::DataBlockStream;\n\nuse common_streams::SendableDataBlockStream;\n\n\n\nuse crate::catalogs::Table;\n\nuse crate::sessions::DatabendQueryContextRef;\n\nuse crate::sessions::ProcessInfo;\n\n\n\npub struct ProcessesTable {\n\n schema: DataSchemaRef,\n\n}\n\n\n", "file_path": "query/src/datasources/database/system/processes_table.rs", "rank": 83, "score": 7.755621053872908 }, { "content": "use common_arrow::arrow_flight::Action;\n\nuse common_arrow::arrow_flight::ActionType;\n\nuse common_arrow::arrow_flight::BasicAuth;\n\nuse common_arrow::arrow_flight::Criteria;\n\nuse common_arrow::arrow_flight::Empty;\n\nuse common_arrow::arrow_flight::FlightData;\n\nuse common_arrow::arrow_flight::FlightDescriptor;\n\nuse common_arrow::arrow_flight::FlightInfo;\n\nuse common_arrow::arrow_flight::HandshakeRequest;\n\nuse common_arrow::arrow_flight::HandshakeResponse;\n\nuse common_arrow::arrow_flight::PutResult;\n\nuse common_arrow::arrow_flight::SchemaResult;\n\nuse common_arrow::arrow_flight::Ticket;\n\nuse common_base::tokio;\n\nuse common_base::tokio::sync::mpsc::Receiver;\n\nuse common_base::tokio::sync::mpsc::Sender;\n\nuse common_store_api_sdk::storage_api_impl;\n\nuse common_store_api_sdk::FlightClaim;\n\nuse common_store_api_sdk::FlightToken;\n\nuse common_store_api_sdk::StoreDoAction;\n", "file_path": "store/src/api/rpc/flight_service.rs", "rank": 84, "score": 7.754454858646989 }, { "content": "use lazy_static::lazy_static;\n\nuse unicase::UniCase;\n\n\n\nuse crate::scalars::ArithmeticFunction;\n\nuse crate::scalars::ComparisonFunction;\n\nuse crate::scalars::ConditionalFunction;\n\nuse crate::scalars::DateFunction;\n\nuse crate::scalars::Function;\n\nuse crate::scalars::HashesFunction;\n\nuse crate::scalars::LogicFunction;\n\nuse crate::scalars::NullableFunction;\n\nuse crate::scalars::StringFunction;\n\nuse crate::scalars::ToCastFunction;\n\nuse crate::scalars::UdfFunction;\n\n\n\npub struct FunctionFactory;\n\npub type FactoryFunc = fn(name: &str) -> Result<Box<dyn Function>>;\n\n\n", "file_path": "common/functions/src/scalars/function_factory.rs", "rank": 85, "score": 7.753543863106994 }, { "content": "use common_planners::SortPlan;\n\nuse common_planners::StageKind;\n\nuse common_planners::StagePlan;\n\nuse common_planners::SubQueriesSetPlan;\n\nuse common_tracing::tracing;\n\n\n\nuse crate::api::BroadcastAction;\n\nuse crate::api::FlightAction;\n\nuse crate::api::ShuffleAction;\n\nuse crate::catalogs::TablePtr;\n\nuse crate::sessions::DatabendQueryContext;\n\nuse crate::sessions::DatabendQueryContextRef;\n\n\n", "file_path": "query/src/interpreters/plan_scheduler.rs", "rank": 86, "score": 7.748261485815671 }, { "content": "use common_arrow::arrow::compute::aggregate::sum_primitive;\n\nuse common_arrow::arrow::types::simd::Simd;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse num::cast::AsPrimitive;\n\nuse num::Num;\n\nuse num::NumCast;\n\nuse num::Zero;\n\n\n\nuse crate::prelude::*;\n\n\n\n/// Same common aggregators\n", "file_path": "common/datavalues/src/arrays/ops/agg.rs", "rank": 87, "score": 7.747489994306716 }, { "content": "use common_dfs_api_vo::ReadPlanResult;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse common_planners::InsertIntoPlan;\n\nuse common_planners::Part;\n\nuse common_planners::ReadDataSourcePlan;\n\nuse common_planners::ScanPlan;\n\nuse common_planners::Statistics;\n\nuse common_planners::TableOptions;\n\nuse common_planners::TruncateTablePlan;\n\nuse common_streams::SendableDataBlockStream;\n\n\n\nuse crate::catalogs::Table;\n\nuse crate::common::StoreApiProvider;\n\nuse crate::datasources::table_engine::TableEngine;\n\nuse crate::sessions::DatabendQueryContextRef;\n\n\n\n#[allow(dead_code)]\n\npub struct RemoteTable {\n\n pub(crate) db: String,\n", "file_path": "query/src/datasources/table/remote/remote_table.rs", "rank": 88, "score": 7.745076677070525 }, { "content": "pub use plan_database_create::CreateDatabasePlan;\n\npub use plan_database_create::DatabaseOptions;\n\npub use plan_database_drop::DropDatabasePlan;\n\npub use plan_describe_table::DescribeTablePlan;\n\npub use plan_empty::EmptyPlan;\n\npub use plan_explain::ExplainPlan;\n\npub use plan_explain::ExplainType;\n\npub use plan_expression::Expression;\n\npub use plan_expression::ExpressionPlan;\n\npub use plan_expression::Expressions;\n\npub use plan_expression_action::*;\n\npub use plan_expression_chain::ExpressionChain;\n\npub use plan_expression_column::col;\n\npub use plan_expression_common::expand_aggregate_arg_exprs;\n\npub use plan_expression_common::expand_wildcard;\n\npub use plan_expression_common::expr_as_column_expr;\n\npub use plan_expression_common::extract_aliases;\n\npub use plan_expression_common::find_aggregate_exprs;\n\npub use plan_expression_common::find_columns_not_satisfy_exprs;\n\npub use plan_expression_common::rebase_expr;\n", "file_path": "common/planners/src/lib.rs", "rank": 89, "score": 7.742118784731868 }, { "content": "use common_datavalues::prelude::*;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse common_io::prelude::*;\n\n\n\nuse super::StateAddr;\n\nuse crate::aggregates::assert_unary_arguments;\n\nuse crate::aggregates::AggregateFunction;\n\nuse crate::aggregates::AggregateFunctionRef;\n\nuse crate::with_match_primitive_type;\n\n\n", "file_path": "common/functions/src/aggregates/aggregate_min_max.rs", "rank": 90, "score": 7.74064407969112 }, { "content": "use common_datavalues::prelude::*;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse common_io::prelude::*;\n\n\n\nuse super::StateAddr;\n\nuse crate::aggregates::assert_binary_arguments;\n\nuse crate::aggregates::AggregateFunction;\n\nuse crate::aggregates::AggregateFunctionRef;\n\nuse crate::with_match_primitive_type;\n\n\n", "file_path": "common/functions/src/aggregates/aggregate_arg_min_max.rs", "rank": 91, "score": 7.74064407969112 }, { "content": "use common_datavalues::DataType;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse common_planners::Part;\n\nuse common_planners::ReadDataSourcePlan;\n\nuse common_planners::ScanPlan;\n\nuse common_planners::Statistics;\n\nuse common_streams::SendableDataBlockStream;\n\nuse common_tracing::tracing;\n\nuse walkdir::WalkDir;\n\n\n\nuse crate::catalogs::Table;\n\nuse crate::datasources::database::system::TracingTableStream;\n\nuse crate::sessions::DatabendQueryContextRef;\n\n\n\npub struct TracingTable {\n\n schema: DataSchemaRef,\n\n}\n\n\n\nimpl TracingTable {\n", "file_path": "query/src/datasources/database/system/tracing_table.rs", "rank": 92, "score": 7.7393167548162065 }, { "content": "use common_datavalues::DataSchemaRefExt;\n\nuse common_datavalues::DataType;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse common_planners::Expression;\n\nuse common_planners::ReadDataSourcePlan;\n\nuse common_planners::ScanPlan;\n\nuse common_planners::Statistics;\n\nuse common_streams::SendableDataBlockStream;\n\n\n\nuse crate::catalogs::Table;\n\nuse crate::catalogs::TableFunction;\n\nuse crate::datasources::common::generate_parts;\n\nuse crate::datasources::database::system::NumbersStream;\n\nuse crate::sessions::DatabendQueryContextRef;\n\n\n\npub struct NumbersTable {\n\n table: &'static str,\n\n schema: DataSchemaRef,\n\n}\n", "file_path": "query/src/datasources/database/system/numbers_table.rs", "rank": 93, "score": 7.738004806645645 }, { "content": "\n\nuse common_base::tokio;\n\nuse common_base::tokio::sync::mpsc::Receiver;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse common_infallible::RwLock;\n\nuse futures::future::Either;\n\nuse metrics::counter;\n\n\n\nuse crate::catalogs::impls::DatabaseCatalog;\n\nuse crate::catalogs::Catalog;\n\nuse crate::clusters::ClusterDiscoveryRef;\n\nuse crate::configs::Config;\n\nuse crate::datasources::database::example::ExampleDatabaseEngine;\n\nuse crate::sessions::session::Session;\n\nuse crate::sessions::session_ref::SessionRef;\n\n\n\npub struct SessionManager {\n\n pub(in crate::sessions) conf: Config,\n\n pub(in crate::sessions) discovery: ClusterDiscoveryRef,\n", "file_path": "query/src/sessions/sessions.rs", "rank": 94, "score": 7.738004806645645 }, { "content": "use common_datavalues::DFTryFrom;\n\nuse common_exception::ErrorCode;\n\nuse common_exception::Result;\n\nuse common_io::prelude::*;\n\nuse num::cast::AsPrimitive;\n\nuse num::NumCast;\n\n\n\nuse super::StateAddr;\n\nuse crate::aggregates::aggregator_common::assert_unary_arguments;\n\nuse crate::aggregates::AggregateFunction;\n\nuse crate::aggregates::AggregateFunctionRef;\n\nuse crate::with_match_primitive_type;\n\n\n\n// count = 0 means it's all nullable\n\n// so we do not need option like sum\n", "file_path": "common/functions/src/aggregates/aggregate_avg.rs", "rank": 95, "score": 7.737063147459175 }, { "content": "use std::io;\n\nuse std::io::prelude::*;\n\nuse std::path::Path;\n\nuse std::path::PathBuf;\n\n\n\nuse filetime::set_file_times;\n\nuse filetime::FileTime;\n\nuse ritelinked::DefaultHashBuilder;\n\nuse walkdir::WalkDir;\n\n\n\nuse crate::Cache;\n\nuse crate::FileSize;\n\nuse crate::LruCache;\n\n\n\n/// Return an iterator of `(path, size)` of files under `path` sorted by ascending last-modified\n\n/// time, such that the oldest modified file is returned first.\n", "file_path": "common/cache/src/disk_cache.rs", "rank": 96, "score": 7.737063147459175 }, { "content": "mod env;\n\nmod helps;\n\nmod packages;\n\nmod processor;\n\nmod status;\n\nmod versions;\n\nmod writer;\n\n\n\npub use comments::comment::CommentCommand;\n\npub use config::Config;\n\npub use env::Env;\n\npub use helps::help::HelpCommand;\n\npub use packages::fetch::FetchCommand;\n\npub use packages::list::ListCommand;\n\npub use packages::package::PackageCommand;\n\npub use packages::switch::SwitchCommand;\n\npub use processor::Processor;\n\npub use status::Status;\n\npub use versions::version::VersionCommand;\n\npub use writer::Writer;\n", "file_path": "cli/src/cmds/mod.rs", "rank": 97, "score": 7.735721151903395 }, { "content": "pub mod prelude;\n\npub mod series;\n\npub mod types;\n\n\n\n/// third partry\n\npub use chrono;\n\npub use chrono_tz::Tz;\n\n/// Own\n\npub use data_array_filter::*;\n\npub use data_field::DataField;\n\npub use data_group_value::DataGroupValue;\n\npub use data_hasher::*;\n\npub use data_schema::DataSchema;\n\npub use data_schema::DataSchemaRef;\n\npub use data_schema::DataSchemaRefExt;\n\npub use data_value::DFTryFrom;\n\npub use data_value::DataValue;\n\npub use data_value::DataValueRef;\n\npub use data_value_operator::*;\n\npub use types::*;\n", "file_path": "common/datavalues/src/lib.rs", "rank": 98, "score": 7.733613242066679 }, { "content": "pub use interpreter_database_drop::DropDatabaseInterpreter;\n\npub use interpreter_describe_table::DescribeTableInterpreter;\n\npub use interpreter_explain::ExplainInterpreter;\n\npub use interpreter_factory::InterpreterFactory;\n\npub use interpreter_insert_into::InsertIntoInterpreter;\n\npub use interpreter_select::SelectInterpreter;\n\npub use interpreter_setting::SettingInterpreter;\n\npub use interpreter_show_create_table::ShowCreateTableInterpreter;\n\npub use interpreter_table_create::CreateTableInterpreter;\n\npub use interpreter_table_drop::DropTableInterpreter;\n\npub use interpreter_truncate_table::TruncateTableInterpreter;\n\npub use interpreter_use_database::UseDatabaseInterpreter;\n", "file_path": "query/src/interpreters/mod.rs", "rank": 99, "score": 7.733613242066679 } ]
Rust
src/raycasting_engine.rs
N-Hoque/rustenstein3D
97a483eeb20977a4d241d7b68e5be12e673fcad9
use rsfml::{ graphics::{ Color, PrimitiveType, RenderStates, RenderTarget, RenderWindow, Vertex, VertexArray, }, system::{Vector2f, Vector2i}, window::Key, }; use crate::{event_handler::EventHandler, map::Map, texture_loader::TextureLoader}; pub struct RaycastEngine { player_position: Vector2f, vector_direction: Vector2f, cam_plane: Vector2f, map: Map, window_size: Vector2f, vertex_array: Vec<Box<VertexArray>>, textures_id: Vec<i32>, ground: Vec<Box<VertexArray>>, sky: Vec<Box<VertexArray>>, no_ground: bool, } impl RaycastEngine { pub fn new(map: Map, window_size: &Vector2f, no_ground: bool) -> RaycastEngine { RaycastEngine { player_position: Vector2f { x: 22., y: 12. }, vector_direction: Vector2f { x: -1., y: 0. }, cam_plane: Vector2f { x: 0., y: 0.66 }, map, window_size: Vector2f { x: window_size.x, y: window_size.y - 80., }, vertex_array: RaycastEngine::create_line_array(window_size), textures_id: Vec::new(), ground: RaycastEngine::create_ground_array(window_size), sky: RaycastEngine::create_ground_array(window_size), no_ground, } } pub fn update(&mut self, event_handler: &EventHandler) { self.textures_id.clear(); let ray_pos = self.player_position.clone(); let mut ray_dir = Vector2f { x: 0., y: 0. }; let mut map_pos = Vector2i { x: 0, y: 0 }; let mut side_dist = Vector2f { x: 0., y: 0. }; let mut delta_dist = Vector2f { x: 0., y: 0. }; let mut step = Vector2i { x: 0, y: 0 }; let mut draw_start: i32 = 0; let mut draw_end: i32 = 0; let mut camera_x: f32; let mut side: i32; let mut perpendicular_wall_dist: f32 = 0.; let mut wall_x: f32 = 0.; for x in 0..(self.window_size.x as i32) { camera_x = 2. * x as f32 / self.window_size.x - 1.; ray_dir.x = self.vector_direction.x + self.cam_plane.x * camera_x; ray_dir.y = self.vector_direction.y + self.cam_plane.y * camera_x; map_pos.x = ray_pos.x as i32; map_pos.y = ray_pos.y as i32; delta_dist.x = (1. + (ray_dir.y * ray_dir.y) / (ray_dir.x * ray_dir.x)).sqrt(); delta_dist.y = (1. + (ray_dir.x * ray_dir.x) / (ray_dir.y * ray_dir.y)).sqrt(); side = 0; self.calculate_step( &ray_dir, &mut step, &ray_pos, &map_pos, &delta_dist, &mut side_dist, ); self.hit_wall( &mut map_pos, &mut side_dist, &mut step, &mut delta_dist, &mut side, ); self.calculate_wall_height( side, &mut draw_start, &mut draw_end, &map_pos, &ray_pos, &ray_dir, &step, &mut perpendicular_wall_dist, ); self.calculate_wall_texture( side, &ray_dir, x, &map_pos, &step, &ray_pos, draw_end, draw_start, &mut wall_x, ); if !self.no_ground { self.calculate_ground( side, &map_pos, wall_x, &ray_dir, perpendicular_wall_dist, &mut draw_end, x, ); } } self.update_events(event_handler); } fn calculate_ground( &mut self, side: i32, map_pos: &Vector2i, wall_x: f32, ray_dir: &Vector2f, perpendicular_wall_dist: f32, draw_end: &mut i32, x: i32, ) { if *draw_end < 0 { *draw_end = self.window_size.y as i32; } self.ground.get_mut(x as usize).unwrap().clear(); self.sky.get_mut(x as usize).unwrap().clear(); let mut vertex = Vertex::default(); let mut current_dist: f32; let mut weight: f32; let mut current_floor = Vector2f { x: 0., y: 0. }; let mut tex_coord = Vector2f { x: 0., y: 0. }; let mut pos = Vector2f { x: x as f32, y: 0. }; let dist_player: f32 = 0.; let (map_pos_x, map_pos_y) = (map_pos.x as f32, map_pos.y as f32); let floor = if side == 0 && ray_dir.x > 0. { Vector2f { x: map_pos_x, y: map_pos_y + wall_x, } } else if side == 0 && ray_dir.x < 0. { Vector2f { x: map_pos_x + 1., y: map_pos_y + wall_x, } } else if side == 1 && ray_dir.y > 0. { Vector2f { x: map_pos_x + wall_x, y: map_pos_y, } } else { Vector2f { x: map_pos_x + wall_x, y: map_pos_y + 1., } }; for y in (*draw_end + 1)..(self.window_size.y as i32) { current_dist = self.window_size.y / (2. * y as f32 - self.window_size.y as f32); weight = (current_dist - dist_player) / (perpendicular_wall_dist - dist_player); current_floor.x = weight * floor.x + (1. - weight) * self.player_position.x; current_floor.y = weight * floor.y + (1. - weight) * self.player_position.y; tex_coord.x = ((current_floor.x * 128.) as i32 % 128) as f32; tex_coord.y = ((current_floor.y * 128.) as i32 % 128) as f32; pos.y = y as f32; vertex.position.x = pos.x; vertex.position.y = pos.y; vertex.tex_coords.x = tex_coord.x; vertex.tex_coords.y = tex_coord.y; self.ground.get_mut(x as usize).unwrap().append(&vertex); pos.y = self.window_size.y - y as f32; vertex.position.x = pos.x; vertex.position.y = pos.y; vertex.tex_coords.x = tex_coord.x; vertex.tex_coords.y = tex_coord.y; self.sky.get_mut(x as usize).unwrap().append(&vertex); } } fn calculate_wall_height( &mut self, side: i32, draw_start: &mut i32, draw_end: &mut i32, map_pos: &Vector2i, ray_pos: &Vector2f, ray_dir: &Vector2f, step: &Vector2i, perpendicular_wall_dist: &mut f32, ) { *perpendicular_wall_dist = if side == 0 { (map_pos.x as f32 - ray_pos.x + (1 - step.x) as f32 / 2.) / ray_dir.x } else { (map_pos.y as f32 - ray_pos.y + (1 - step.y) as f32 / 2.) / ray_dir.y } .abs(); let line_height: i32 = if *perpendicular_wall_dist as i32 == 0 { self.window_size.y as i32 } else { ((self.window_size.y / *perpendicular_wall_dist) as i32).abs() }; *draw_start = (self.window_size.y as i32 / 2) - (line_height / 2); if *draw_start < 0 { *draw_start = 0; } *draw_end = line_height / 2 + self.window_size.y as i32 / 2; if *draw_end > self.window_size.y as i32 { *draw_end = self.window_size.y as i32 - 1; } } fn calculate_wall_texture( &mut self, side: i32, ray_dir: &Vector2f, x: i32, map_pos: &Vector2i, step: &Vector2i, ray_pos: &Vector2f, draw_end: i32, draw_start: i32, wall_x: &mut f32, ) { let mut texture_id = self .map .get_block(map_pos) .expect(&format!("ERROR: Cannot get block ID {:?}", map_pos)); *wall_x = if side == 1 { ray_pos.x + ((map_pos.y as f32 - ray_pos.y + (1. - step.y as f32) / 2.) / ray_dir.y) * ray_dir.x } else { ray_pos.y + ((map_pos.x as f32 - ray_pos.x + (1. - step.x as f32) / 2.) / ray_dir.x) * ray_dir.y }; *wall_x -= wall_x.floor(); let mut texture_x = (*wall_x * 128.) as i32; if side == 0 && ray_dir.x > 0. { texture_x = 128 - texture_x - 1; } if side == 1 && ray_dir.y < 0. { texture_x = 128 - texture_x - 1; } if side == 1 { texture_id += 5; } self.textures_id.push(texture_id); self.vertex_array.get_mut(x as usize).unwrap().clear(); self.vertex_array .get_mut(x as usize) .unwrap() .append(&Vertex::new( Vector2f::new(x as f32, draw_end as f32), Color::WHITE, Vector2f::new(texture_x as f32, 128.), )); self.vertex_array .get_mut(x as usize) .unwrap() .append(&Vertex::new( Vector2f::new(x as f32, draw_start as f32), Color::WHITE, Vector2f::new(texture_x as f32, 0.), )); } fn calculate_step( &self, ray_dir: &Vector2f, step: &mut Vector2i, ray_pos: &Vector2f, map_pos: &Vector2i, delta_dist: &Vector2f, side_dist: &mut Vector2f, ) { if ray_dir.x < 0. { step.x = -1; side_dist.x = (ray_pos.x - map_pos.x as f32) * delta_dist.x; } else { step.x = 1; side_dist.x = (map_pos.x as f32 + 1. - ray_pos.x) * delta_dist.x; } if ray_dir.y < 0. { step.y = -1; side_dist.y = (ray_pos.y - map_pos.y as f32) * delta_dist.y; } else { step.y = 1; side_dist.y = (map_pos.y as f32 + 1. - ray_pos.y) * delta_dist.y; } } fn hit_wall( &self, map_pos: &mut Vector2i, side_dist: &mut Vector2f, step: &mut Vector2i, delta_dist: &mut Vector2f, side: &mut i32, ) { let mut hit: bool = false; while !hit { if side_dist.x < side_dist.y { side_dist.x += delta_dist.x; map_pos.x += step.x; *side = 0; } else { side_dist.y += delta_dist.y; map_pos.y += step.y; *side = 1; } hit = match self.map.get_block(map_pos) { Some(block) if block == 0 => false, _ => true, }; } } fn update_events(&mut self, event_handler: &EventHandler) { let mut pos = Vector2i { x: 0, y: 0 }; if event_handler.is_key_pressed(Key::W) { pos.x = (self.player_position.x + (self.vector_direction.x * 0.1)) as i32; pos.y = self.player_position.y as i32; if self .map .get_block(&pos) .expect(&format!("ERROR: Cannot get block ID {:?}", &pos)) == 0 { self.player_position.x += self.vector_direction.x * 0.1; } pos.y = (self.player_position.y + (self.vector_direction.y * 0.1)) as i32; pos.x = self.player_position.x as i32; if self .map .get_block(&pos) .expect(&format!("ERROR: Cannot get block ID {:?}", &pos)) == 0 { self.player_position.y += self.vector_direction.y * 0.1; } } if event_handler.is_key_pressed(Key::S) { pos.x = (self.player_position.x - (self.vector_direction.x * 0.1)) as i32; pos.y = self.player_position.y as i32; if self .map .get_block(&pos) .expect(&format!("ERROR: Cannot get block ID {:?}", &pos)) == 0 { self.player_position.x -= self.vector_direction.x * 0.1; } pos.y = (self.player_position.y - (self.vector_direction.y * 0.1)) as i32; pos.x = self.player_position.x as i32; if self .map .get_block(&pos) .expect(&format!("ERROR: Cannot get block ID {:?}", &pos)) == 0 { self.player_position.y -= self.vector_direction.y * 0.1; } } let mouse_move = match event_handler.has_mouse_moved_event() { Some((x, _)) => x as f32 - (self.window_size.x / 2.) as f32, None => 0., } / -250.; let old_dir_x = self.vector_direction.x; self.vector_direction.x = self.vector_direction.x * (mouse_move).cos() - self.vector_direction.y * (mouse_move).sin(); self.vector_direction.y = old_dir_x * (mouse_move).sin() + self.vector_direction.y * (mouse_move).cos(); let old_cam_plane_x = self.cam_plane.x; self.cam_plane.x = self.cam_plane.x * (mouse_move).cos() - self.cam_plane.y * (mouse_move).sin(); self.cam_plane.y = old_cam_plane_x * (mouse_move).sin() + self.cam_plane.y * (mouse_move).cos(); } fn create_line_array(window_size: &Vector2f) -> Vec<Box<VertexArray>> { let mut lines: Vec<Box<VertexArray>> = Vec::new(); for _ in 0..(window_size.x as i32) { let mut line: Box<VertexArray> = Box::new(VertexArray::default()); line.set_primitive_type(PrimitiveType::Lines); lines.push(line); } lines } fn create_ground_array(window_size: &Vector2f) -> Vec<Box<VertexArray>> { let mut lines: Vec<Box<VertexArray>> = Vec::new(); for _ in 0..(window_size.x as i32) { let line: Box<VertexArray> = Box::new(VertexArray::default()); lines.push(line); } lines } pub fn get_player_pos(&self) -> Vector2f { self.player_position.clone() } pub fn draw<'r>(&self, render_window: &'r mut RenderWindow, texture_loader: &'r TextureLoader) { let mut render_states = RenderStates::default(); for (line_idx, line) in self.vertex_array.iter().enumerate() { render_states.texture = Some(texture_loader.get_texture(self.textures_id[line_idx as usize])); render_window.draw_with_renderstates(&*(*line), render_states); } render_states.texture = Some(texture_loader.get_texture(0)); for gr in self.ground.iter() { render_window.draw_with_renderstates(&*(*gr), render_states); } render_states.texture = Some(texture_loader.get_texture(11)); for sky in self.sky.iter() { render_window.draw_with_renderstates(&*(*sky), render_states); } } }
use rsfml::{ graphics::{ Color, PrimitiveType, RenderStates, RenderTarget, RenderWindow, Vertex, VertexArray, }, system::{Vector2f, Vector2i}, window::Key, }; use crate::{event_handler::EventHandler, map::Map, texture_loader::TextureLoader}; pub struct RaycastEngine { player_position: Vector2f, vector_direction: Vector2f, cam_plane: Vector2f, map: Map, window_size: Vector2f, vertex_array: Vec<Box<VertexArray>>, textures_id: Vec<i32>, ground: Vec<Box<VertexArray>>, sky: Vec<Box<VertexArray>>, no_ground: bool, } impl RaycastEngine { pub fn new(map: Map, window_size: &Vector2f, no_ground: bool) -> RaycastEngine { RaycastEngine { player_position: Vector2f { x: 22., y: 12. }, vector_direction: Vector2f { x: -1., y: 0. }, cam_plane: Vector2f { x: 0., y: 0.66 }, map, window_size: Vector2f { x: window_size.x, y: window_size.y - 80., }, vertex_array: RaycastEngine::create_line_array(window_size), textures_id: Vec::new(), ground: RaycastEngine::create_ground_array(window_size), sky: RaycastEngine::create_ground_array(window_size), no_ground, } } pub fn update(&mut self, event_handler: &EventHandler) { self.textures_id.clear(); let ray_pos = self.player_position.clone(); let mut ray_dir = Vector2f { x: 0., y: 0. }; let mut map_pos = Vector2i { x: 0, y: 0 }; let mut side_dist = Vector2f { x: 0., y: 0. }; let mut delta_dist = Vector2f { x: 0., y: 0. }; let mut step = Vector2i { x: 0, y: 0 }; let mut draw_start: i32 = 0; let mut draw_end: i32 = 0; let mut camera_x: f32; let mut side: i32; let mut perpendicular_wall_dist: f32 = 0.; let mut wall_x: f32 = 0.; for x in 0..(self.window_size.x as i32) { camera_x = 2. * x as f32 / self.window_size.x - 1.; ray_dir.x = self.vector_direction.x + self.cam_plane.x * camera_x; ray_dir.y = self.vector_direction.y + self.cam_plane.y * camera_x; map_pos.x = ray_pos.x as i32; map_pos.y = ray_pos.y as i32; delta_dist.x = (1. + (ray_dir.y * ray_dir.y) / (ray_dir.x * ray_dir.x)).sqrt(); delta_dist.y = (1. + (ray_dir.x * ray_dir.x) / (ray_dir.y * ray_dir.y)).sqrt(); side = 0; self.calculate_step( &ray_dir, &mut step, &ray_pos, &map_pos, &delta_dist, &mut side_dist, ); self.hit_wall( &mut map_pos, &mut side_dist, &mut step, &mut delta_dist, &mut side, ); self.calculate_wall_height( side, &mut draw_start, &mut draw_end, &map_pos, &ray_pos, &ray_dir, &step, &mut perpendicular_wall_dist, ); self.calculate_wall_texture( side, &ray_dir, x, &map_pos, &step, &ray_pos, draw_end, draw_start, &mut wall_x, ); if !self.no_ground { self.calculate_ground( side, &map_pos, wall_x, &ray_dir, perpendicular_wall_dist, &mut draw_end, x, ); } } self.update_events(event_handler); } fn calculate_ground( &mut self, side: i32, map_pos: &Vector2i, wall_x: f32, ray_dir: &Vector2f, perpendicular_wall_dist: f32, draw_end: &mut i32, x: i32, ) { if *draw_end < 0 { *draw_end = self.window_size.y as i32; } self.ground.get_mut(x as usize).unwrap().clear(); self.sky.get_mut(x as usize).unwrap().clear(); let mut vertex = Vertex::default(); let mut current_dist: f32; let mut weight: f32; let mut current_floor = Vector2f { x: 0., y: 0. }; let mut tex_coord = Vector2f { x: 0., y: 0. }; let mut pos = Vector2f { x: x as f32, y: 0. }; let dist_player: f32 = 0.; let (map_pos_x, map_pos_y) = (map_pos.x as f32, map_pos.y as f32); let floor = if side == 0 && ray_dir.x > 0. { Vector2f { x: map_pos_x, y: map_pos_y + wall_x, } } else if side == 0 && ray_dir.x < 0. { Vector2f { x: map_pos_x + 1., y: map_pos_y + wall_x, } } else if side == 1 && ray_dir.y > 0. { Vector2f { x: map_pos_x + wall_x, y: map_pos_y, } } else { Vector2f { x: map_pos_x + wall_x, y: map_pos_y + 1., } }; for y in (*draw_end + 1)..(self.window_size.y as i32) { current_dist = self.window_size.y / (2. * y as f32 - self.window_size.y as f32); weight = (current_dist - dist_player) / (perpendicular_wall_dist - dist_player); current_floor.x = weight * floor.x + (1. - weight) * self.player_position.x; current_floor.y = weight * floor.y + (1. - weight) * self.player_position.y; tex_coord.x = ((current_floor.x * 128.) as i32 % 128) as f32; tex_coord.y = ((current_floor.y * 128.) as i32 % 128) as f32; pos.y = y as f32; vertex.position.x = pos.x; vertex.position.y = pos.y; vertex.tex_coords.x = tex_coord.x; vertex.tex_coords.y = tex_coord.y; self.ground.get_mut(x as usize).unwrap().append(&vertex); pos.y = self.window_size.y - y as f32; vertex.position.x = pos.x; vertex.position.y = pos.y; vertex.tex_coords.x = tex_coord.x; vertex.tex_coords.y = tex_coord.y; self.sky.get_mut(x as usize).unwrap().append(&vertex); } } fn calculate_wall_height( &mut self, side: i32, draw_start: &mut i32, draw_end: &mut i32, map_pos: &Vector2i, ray_pos: &Vector2f, ray_dir: &Vector2f, step: &Vector2i, perpendicular_wall_dist: &mut f32, ) { *perpendicular_wall_dist = if side == 0 { (map_pos.x as f32 - ray_pos.x + (1 - step.x) as f32 / 2.) / ray_dir.x } else { (map_pos.y as f32 - ray_pos.y + (1 - step.y) as f32 / 2.) / ray_dir.y } .abs(); let line_height: i32 = if *perpendicular_wall_dist as i32 == 0 { self.window_size.y as i32 } else { ((self.window_size.y / *perpendicular_wall_dist) as i32).abs() }; *draw_start = (self.window_size.y as i32 / 2) - (line_height / 2); if *draw_start < 0 { *draw_start = 0; } *draw_end = line_height / 2 + self.window_size.y as i32 / 2; if *draw_end > self.window_size.y as i32 { *draw_end = self.window_size.y as i32 - 1; } } fn calculate_wall_texture( &mut self, side: i32, ray_dir: &Vector2f, x: i32, map_pos: &Vector2i, step: &Vector2i, ray_pos: &Vector2f, draw_end: i32, draw_start: i32, wall_x: &mut f32, ) { let mut texture_id = self .map .get_block(map_pos) .expect(&format!("ERROR: Cannot get block ID {:?}", map_pos)); *wall_x = if side == 1 { ray_pos.x + ((map_pos.y as f32 - ray_pos.y + (1. - step.y as f32) / 2.) / ray_dir.y) * ray_dir.x } else { ray_pos.y + ((map_pos.x as f32 - ray_pos.x + (1. - step.x as f32) / 2.) / ray_dir.x) * ray_dir.y }; *wall_x -= wall_x.floor(); let mut texture_x = (*wall_x * 128.) as i32; if side == 0 && ray_dir.x > 0. { texture_x = 128 - texture_x - 1; } if side == 1 && ray_dir.y < 0. { texture_x = 128 - texture_x - 1; } if side == 1 { texture_id += 5; } self.textures_id.push(texture_id); self.vertex_array.get_mut(x as usize).unwrap().clear(); self.vertex_array .get_mut(x as usize) .unwrap() .append(&Vertex::new( Vector2f::new(x as f32, draw_end as f32), Color::WHITE, Vector2f::new(texture_x as f32, 128.), )); self.vertex_array .get_mut(x as usize) .unwrap() .append(&Vertex::new( Vector2f::new(x as f32, draw_start as f32), Color::WHITE, Vector2f::new(texture_x as f32, 0.), )); }
fn hit_wall( &self, map_pos: &mut Vector2i, side_dist: &mut Vector2f, step: &mut Vector2i, delta_dist: &mut Vector2f, side: &mut i32, ) { let mut hit: bool = false; while !hit { if side_dist.x < side_dist.y { side_dist.x += delta_dist.x; map_pos.x += step.x; *side = 0; } else { side_dist.y += delta_dist.y; map_pos.y += step.y; *side = 1; } hit = match self.map.get_block(map_pos) { Some(block) if block == 0 => false, _ => true, }; } } fn update_events(&mut self, event_handler: &EventHandler) { let mut pos = Vector2i { x: 0, y: 0 }; if event_handler.is_key_pressed(Key::W) { pos.x = (self.player_position.x + (self.vector_direction.x * 0.1)) as i32; pos.y = self.player_position.y as i32; if self .map .get_block(&pos) .expect(&format!("ERROR: Cannot get block ID {:?}", &pos)) == 0 { self.player_position.x += self.vector_direction.x * 0.1; } pos.y = (self.player_position.y + (self.vector_direction.y * 0.1)) as i32; pos.x = self.player_position.x as i32; if self .map .get_block(&pos) .expect(&format!("ERROR: Cannot get block ID {:?}", &pos)) == 0 { self.player_position.y += self.vector_direction.y * 0.1; } } if event_handler.is_key_pressed(Key::S) { pos.x = (self.player_position.x - (self.vector_direction.x * 0.1)) as i32; pos.y = self.player_position.y as i32; if self .map .get_block(&pos) .expect(&format!("ERROR: Cannot get block ID {:?}", &pos)) == 0 { self.player_position.x -= self.vector_direction.x * 0.1; } pos.y = (self.player_position.y - (self.vector_direction.y * 0.1)) as i32; pos.x = self.player_position.x as i32; if self .map .get_block(&pos) .expect(&format!("ERROR: Cannot get block ID {:?}", &pos)) == 0 { self.player_position.y -= self.vector_direction.y * 0.1; } } let mouse_move = match event_handler.has_mouse_moved_event() { Some((x, _)) => x as f32 - (self.window_size.x / 2.) as f32, None => 0., } / -250.; let old_dir_x = self.vector_direction.x; self.vector_direction.x = self.vector_direction.x * (mouse_move).cos() - self.vector_direction.y * (mouse_move).sin(); self.vector_direction.y = old_dir_x * (mouse_move).sin() + self.vector_direction.y * (mouse_move).cos(); let old_cam_plane_x = self.cam_plane.x; self.cam_plane.x = self.cam_plane.x * (mouse_move).cos() - self.cam_plane.y * (mouse_move).sin(); self.cam_plane.y = old_cam_plane_x * (mouse_move).sin() + self.cam_plane.y * (mouse_move).cos(); } fn create_line_array(window_size: &Vector2f) -> Vec<Box<VertexArray>> { let mut lines: Vec<Box<VertexArray>> = Vec::new(); for _ in 0..(window_size.x as i32) { let mut line: Box<VertexArray> = Box::new(VertexArray::default()); line.set_primitive_type(PrimitiveType::Lines); lines.push(line); } lines } fn create_ground_array(window_size: &Vector2f) -> Vec<Box<VertexArray>> { let mut lines: Vec<Box<VertexArray>> = Vec::new(); for _ in 0..(window_size.x as i32) { let line: Box<VertexArray> = Box::new(VertexArray::default()); lines.push(line); } lines } pub fn get_player_pos(&self) -> Vector2f { self.player_position.clone() } pub fn draw<'r>(&self, render_window: &'r mut RenderWindow, texture_loader: &'r TextureLoader) { let mut render_states = RenderStates::default(); for (line_idx, line) in self.vertex_array.iter().enumerate() { render_states.texture = Some(texture_loader.get_texture(self.textures_id[line_idx as usize])); render_window.draw_with_renderstates(&*(*line), render_states); } render_states.texture = Some(texture_loader.get_texture(0)); for gr in self.ground.iter() { render_window.draw_with_renderstates(&*(*gr), render_states); } render_states.texture = Some(texture_loader.get_texture(11)); for sky in self.sky.iter() { render_window.draw_with_renderstates(&*(*sky), render_states); } } }
fn calculate_step( &self, ray_dir: &Vector2f, step: &mut Vector2i, ray_pos: &Vector2f, map_pos: &Vector2i, delta_dist: &Vector2f, side_dist: &mut Vector2f, ) { if ray_dir.x < 0. { step.x = -1; side_dist.x = (ray_pos.x - map_pos.x as f32) * delta_dist.x; } else { step.x = 1; side_dist.x = (map_pos.x as f32 + 1. - ray_pos.x) * delta_dist.x; } if ray_dir.y < 0. { step.y = -1; side_dist.y = (ray_pos.y - map_pos.y as f32) * delta_dist.y; } else { step.y = 1; side_dist.y = (map_pos.y as f32 + 1. - ray_pos.y) * delta_dist.y; } }
function_block-full_function
[ { "content": "pub fn parse_arguments() -> ParsedResult {\n\n let args = std::env::args().collect::<Vec<String>>();\n\n let arg_length = args.len();\n\n\n\n let mut arguments = Arguments {\n\n window_dimensions: (768, 480),\n\n no_ground: false,\n\n framerate_limit: 30,\n\n };\n\n\n\n let mut i_args = 1;\n\n while i_args < arg_length {\n\n let arg = &args[i_args];\n\n match arg.as_str() {\n\n \"--help\" => {\n\n display_help();\n\n return ParsedResult::Success;\n\n }\n\n \"--noground\" => arguments.no_ground = true,\n\n \"-f\" | \"--framerate\" => {\n", "file_path": "src/lib.rs", "rank": 0, "score": 56449.30045375915 }, { "content": "pub fn load_texture() -> Result<TextureLoader, Box<dyn Error>> {\n\n let mut texture_loader = TextureLoader::new();\n\n if texture_loader.load_texture(\"resources/ground.tga\").is_err() || // 0\n\n texture_loader.load_texture(\"resources/1.tga\").is_err() || // 1\n\n texture_loader.load_texture(\"resources/2.tga\").is_err() || // 2\n\n texture_loader.load_texture(\"resources/3.tga\").is_err() || // 3\n\n texture_loader.load_texture(\"resources/4.tga\").is_err() || // 4\n\n texture_loader.load_texture(\"resources/5.tga\").is_err() || // 5\n\n texture_loader.load_texture(\"resources/6.tga\").is_err() || // 6\n\n texture_loader.load_texture(\"resources/7.tga\").is_err() || // 7\n\n texture_loader.load_texture(\"resources/8.tga\").is_err() || // 8\n\n texture_loader.load_texture(\"resources/9.tga\").is_err() || // 9\n\n texture_loader.load_texture(\"resources/10.tga\").is_err() || // 10\n\n texture_loader.load_texture(\"resources/sky.tga\").is_err() || // 11\n\n texture_loader.load_texture(\"resources/weapons/gun_1.png\").is_err() || // 12\n\n texture_loader.load_texture(\"resources/weapons/gun_2.png\").is_err() || // 13\n\n texture_loader.load_texture(\"resources/weapons/gun_3.png\").is_err() || // 14\n\n texture_loader.load_texture(\"resources/weapons/gun_4.png\").is_err() || // 15\n\n texture_loader.load_texture(\"resources/weapons/gun_5.png\").is_err() || // 16\n\n texture_loader.load_texture(\"resources/weapons/gun_6.png\").is_err() || // 17\n", "file_path": "src/lib.rs", "rank": 1, "score": 45114.31882020636 }, { "content": "type Line<const N: usize> = [f32; N];\n\n\n", "file_path": "src/hud.rs", "rank": 2, "score": 44955.20036357323 }, { "content": "fn display_help() -> () {\n\n println!(\"Arguments available for Rustenstein3D:\");\n\n println!(\"\\t-w [window_width] [window_height] : Specify a new size for the window.\");\n\n println!(\"\\t-f, --framerate [framerate_value] : Set the framerate of the game.\");\n\n println!(\"\\t--noground\\t\\t\\t : Disable the ground texturing (improve performance).\");\n\n println!(\"\\t--help\\t\\t\\t\\t : Display this help.\");\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 3, "score": 31750.03538077303 }, { "content": "// TODO: Use this over the giant texture loading block\n\nfn get_resources_list<P: AsRef<Path>>(path: P) -> Result<Vec<String>, Box<dyn Error>> {\n\n let paths = fs::read_dir(path)?;\n\n\n\n let mut resource_list = Vec::new();\n\n\n\n for path in paths {\n\n let path_name = path?.path();\n\n if path_name.is_dir() {\n\n let mut sub_resources = get_resources_list(path_name)?;\n\n resource_list.append(&mut sub_resources);\n\n } else {\n\n let extension = path_name\n\n .extension()\n\n .expect(\"ERROR: Cannot get file extension.\");\n\n if extension != \"wav\" && extension != \"ttf\" {\n\n resource_list.push(path_name.display().to_string());\n\n }\n\n }\n\n }\n\n\n\n Ok(resource_list)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 4, "score": 30072.199378352274 }, { "content": "fn main() -> Result<(), String> {\n\n let Arguments {\n\n window_dimensions: (width, height),\n\n framerate_limit,\n\n no_ground,\n\n } = match parse_arguments() {\n\n ParsedResult::Success => return Ok(()),\n\n ParsedResult::Failure(err) => return Err(err),\n\n ParsedResult::Parsed(value) => value,\n\n };\n\n\n\n // Create the render_window.\n\n let settings = ContextSettings::default();\n\n let video_mode = VideoMode::new(width, height, 32);\n\n // let video_mode = VideoMode::new_init(512, 384, 32);\n\n let mut render_window = RenderWindow::new(video_mode, \"Rustenstein3D\", Style::CLOSE, &settings);\n\n\n\n render_window.set_framerate_limit(framerate_limit);\n\n\n\n // hide the cursor.\n", "file_path": "src/main.rs", "rank": 5, "score": 28767.898006410433 }, { "content": "impl Map {\n\n pub fn new(map: Vec<i32>, map_size: &Vector2f) -> Map {\n\n Map {\n\n map,\n\n map_size: Vector2i {\n\n x: map_size.x as i32,\n\n y: map_size.y as i32,\n\n },\n\n }\n\n }\n\n\n\n pub fn get_block_with_orientation(\n\n &self,\n\n block_orientation: Orientation,\n\n position: Vector2i,\n\n ) -> Option<i32> {\n\n match block_orientation {\n\n Orientation::Top => self.handle_orientation(position, Vector2i::new(-1, 0)),\n\n Orientation::Bottom => self.handle_orientation(position, Vector2i::new(1, 0)),\n\n Orientation::Left => self.handle_orientation(position, Vector2i::new(0, -1)),\n", "file_path": "src/map.rs", "rank": 6, "score": 24832.810543809697 }, { "content": "//! Module for the world map data\n\nuse rsfml::system::{Vector2f, Vector2i};\n\n\n\n#[derive(Clone)]\n\npub struct Map {\n\n map: Vec<i32>,\n\n map_size: Vector2i,\n\n}\n\n\n\npub enum Orientation {\n\n Top,\n\n Bottom,\n\n Left,\n\n Right,\n\n TopLeft,\n\n TopRight,\n\n BottomLeft,\n\n BottomRight,\n\n}\n\n\n", "file_path": "src/map.rs", "rank": 7, "score": 24830.409009540046 }, { "content": " Orientation::Right => self.handle_orientation(position, Vector2i::new(0, 1)),\n\n Orientation::TopLeft => self.handle_orientation(position, Vector2i::new(-1, -1)),\n\n Orientation::TopRight => self.handle_orientation(position, Vector2i::new(-1, 1)),\n\n Orientation::BottomLeft => self.handle_orientation(position, Vector2i::new(1, -1)),\n\n Orientation::BottomRight => self.handle_orientation(position, Vector2i::new(1, 1)),\n\n }\n\n }\n\n\n\n pub fn get_block(&self, position: &Vector2i) -> Option<i32> {\n\n if position.x >= 0\n\n && position.x <= self.map_size.x\n\n && position.y >= 0\n\n && position.y <= self.map_size.y\n\n {\n\n Some(self.map[(position.y * self.map_size.x + position.x) as usize])\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n pub fn get_map_size(&self) -> &Vector2i {\n\n &self.map_size\n\n }\n\n\n\n fn handle_orientation(&self, position: Vector2i, offset: Vector2i) -> Option<i32> {\n\n self.get_block(&(position + offset))\n\n }\n\n}\n", "file_path": "src/map.rs", "rank": 8, "score": 24829.728464288226 }, { "content": "//! Module for displaying the mini-map\n\n\n\nuse rsfml::{\n\n graphics::{\n\n Color, FloatRect, RectangleShape, RenderTarget, RenderWindow, Shape, Transformable, View,\n\n },\n\n system::{SfBox, Vector2f, Vector2i, Vector2u},\n\n};\n\n\n\nuse crate::{map::*, texture_loader::TextureLoader};\n\n\n\npub struct MiniMap {\n\n map: Map,\n\n active: bool,\n\n mini_map_view: SfBox<View>,\n\n player_pos: Vector2f,\n\n rotation: f32,\n\n}\n\n\n\nimpl MiniMap {\n", "file_path": "src/mini_map.rs", "rank": 9, "score": 23233.18336739101 }, { "content": " pub fn is_active(&self) -> bool {\n\n self.active\n\n }\n\n\n\n pub fn update(&mut self, player_position: Vector2f, new_rotation: f32) -> () {\n\n self.player_pos = player_position;\n\n let borrowed_mini_map_view = &mut (*self.mini_map_view);\n\n borrowed_mini_map_view.rotate(new_rotation);\n\n borrowed_mini_map_view.set_center(Vector2f::new(\n\n self.player_pos.x * 80.,\n\n self.player_pos.y * 80.,\n\n ));\n\n self.rotation += new_rotation;\n\n }\n\n\n\n pub fn draw(&mut self, render_window: &mut RenderWindow, texture_loader: &TextureLoader) -> () {\n\n let mut block: i32;\n\n let map_size = self.map.get_map_size();\n\n let mut pos: Vector2i = Vector2i::new(0, 0);\n\n let mut rect = RectangleShape::with_size(Vector2f::new(80., 80.));\n", "file_path": "src/mini_map.rs", "rank": 10, "score": 23231.272465254857 }, { "content": " rect.set_fill_color(Color::rgba(255, 255, 255, 175));\n\n render_window.set_view(&self.mini_map_view);\n\n while pos.x < map_size.x {\n\n while pos.y < map_size.y {\n\n block = self\n\n .map\n\n .get_block(&pos)\n\n .expect(\"ERROR: Cannot get block in minimap.\");\n\n if block == 0 {\n\n rect.set_texture(texture_loader.get_texture(block), false);\n\n rect.set_position(Vector2f::new(pos.x as f32 * 80., pos.y as f32 * 80.));\n\n } else {\n\n rect.set_texture(texture_loader.get_texture(block), false);\n\n rect.set_position(Vector2f::new(pos.x as f32 * 80., pos.y as f32 * 80.));\n\n }\n\n render_window.draw(&mut rect);\n\n pos.y += 1;\n\n }\n\n pos.x += 1;\n\n pos.y = 0;\n", "file_path": "src/mini_map.rs", "rank": 11, "score": 23229.433016485462 }, { "content": " pub fn new(map: Map, window_size: &Vector2u) -> MiniMap {\n\n let mut tmp_view = View::new(Vector2f::default(), Vector2f::default());\n\n let borrowed_view = &mut (*tmp_view);\n\n borrowed_view.set_size(Vector2f::new(window_size.x as f32, window_size.y as f32));\n\n borrowed_view.set_viewport(&FloatRect::new(0.70, 0.05, 0.25, 0.25));\n\n borrowed_view.set_rotation(-90.);\n\n MiniMap {\n\n map,\n\n active: true,\n\n mini_map_view: tmp_view,\n\n player_pos: Vector2f { x: 0., y: 0. },\n\n rotation: 0.,\n\n }\n\n }\n\n\n\n pub fn toggle_active(&mut self) -> bool {\n\n self.active = !self.active;\n\n self.active\n\n }\n\n\n", "file_path": "src/mini_map.rs", "rank": 12, "score": 23226.653209869193 }, { "content": " }\n\n rect.set_fill_color(Color::rgba(255, 0, 0, 125));\n\n rect.set_origin(Vector2f::new(40., 40.));\n\n rect.set_position(Vector2f::new(\n\n self.player_pos.x as f32 * 80.,\n\n self.player_pos.y as f32 * 80.,\n\n ));\n\n render_window.draw(&mut rect);\n\n\n\n //TODO: Figure out how to restore view.\n\n let def_view = render_window.default_view();\n\n let new_view = View::new(def_view.center(), def_view.size());\n\n render_window.set_view(&new_view);\n\n }\n\n}\n", "file_path": "src/mini_map.rs", "rank": 13, "score": 23222.142863408913 }, { "content": "#[cfg(target_os = \"macos\")]\n\n#[start]\n\nfn start(argc: int, argv: *const *const u8) -> int {\n\n native::start(argc, argv, main)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 14, "score": 21700.392714356767 }, { "content": " sky: RectangleShape<'s>,\n\n ground: RectangleShape<'s>,\n\n}\n\n\n\nimpl<'s> GameMode<'s> {\n\n pub fn new(\n\n window_size: Vector2u,\n\n texture_loader: &'s TextureLoader,\n\n no_ground: bool,\n\n ) -> GameMode<'s> {\n\n let map = GameMode::get_map();\n\n let mut sky = RectangleShape::with_size(Vector2f::new(\n\n window_size.x as f32,\n\n window_size.y as f32 / 2. - 40.,\n\n ));\n\n sky.set_fill_color(Color::rgb(63, 48, 21));\n\n let mut ground = RectangleShape::with_size(Vector2f::new(\n\n window_size.x as f32,\n\n window_size.y as f32 / 2. - 40.,\n\n ));\n", "file_path": "src/game_mode.rs", "rank": 22, "score": 20.839985042346697 }, { "content": " ground,\n\n }\n\n }\n\n\n\n pub fn get_map() -> Map {\n\n let map_i32: Vec<i32> = vec![\n\n 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 0, 0, 0, 0, 3, 0, 3, 0, 3,\n\n 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 2, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,\n\n 0, 0, 0, 0, 0, 2, 0, 0, 0, 2, 0, 0, 0, 0, 3, 0, 0, 0, 3, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0,\n\n 2, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 2, 2, 0, 2, 2,\n\n 0, 0, 0, 0, 3, 0, 3, 0, 3, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n\n 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 1, 1, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", "file_path": "src/game_mode.rs", "rank": 30, "score": 13.894773024554842 }, { "content": " ground.set_fill_color(Color::rgb(109, 108, 112));\n\n ground.set_position(Vector2f::new(0., window_size.y as f32 / 2. - 40.));\n\n GameMode {\n\n window_size,\n\n mini_map: MiniMap::new(map.clone(), &window_size),\n\n r_engine: RaycastEngine::new(\n\n map,\n\n &Vector2f::new(window_size.x as f32, window_size.y as f32),\n\n no_ground,\n\n ),\n\n texture_loader,\n\n hud: HUD::new(\n\n &Vector2f::new(window_size.x as f32, window_size.y as f32),\n\n texture_loader,\n\n ),\n\n weapon: Weapon::new(\n\n &Vector2f::new(window_size.x as f32, window_size.y as f32),\n\n texture_loader,\n\n ),\n\n sky,\n", "file_path": "src/game_mode.rs", "rank": 32, "score": 13.522050009315816 }, { "content": "\n\n pub fn get_current_texture_id(&self) -> i32 {\n\n self.texture_ids[self.current_texture as usize]\n\n }\n\n\n\n pub fn set_loop_anim(&mut self, a: u32, b: u32) -> () {\n\n self.a = a;\n\n self.b = b;\n\n }\n\n\n\n pub fn set_need_anim_offset(&mut self, offset: u32) -> () {\n\n self.offset = offset\n\n }\n\n\n\n pub fn update(&mut self) -> () {\n\n if self.state != AnimationState::Play {\n\n return;\n\n }\n\n\n\n if self.clock.elapsed_time().as_seconds() < self.lag {\n", "file_path": "src/animation.rs", "rank": 34, "score": 12.51589471891231 }, { "content": "//! Module for configuring and displaying the Heads-Up Display (HUD)\n\n\n\nuse rsfml::{\n\n graphics::{\n\n Color, PrimitiveType, RectangleShape, RenderTarget, RenderWindow, Shape, Transformable,\n\n Vertex, VertexArray,\n\n },\n\n system::{Clock, Vector2f},\n\n};\n\n\n\nuse crate::{animation::*, texture_loader::TextureLoader};\n\n\n", "file_path": "src/hud.rs", "rank": 35, "score": 12.475695829500962 }, { "content": "//! Module for configuration of selected game mode\n\n\n\nuse rsfml::{\n\n graphics::{Color, RectangleShape, RenderTarget, RenderWindow, Shape, Transformable},\n\n system::{Vector2f, Vector2i, Vector2u},\n\n window::Key,\n\n};\n\n\n\nuse crate::{\n\n event_handler::*, hud::HUD, map::Map, mini_map::*, raycasting_engine::RaycastEngine,\n\n texture_loader::TextureLoader, weapon::Weapon,\n\n};\n\n\n\npub struct GameMode<'s> {\n\n r_engine: RaycastEngine,\n\n texture_loader: &'s TextureLoader,\n\n window_size: Vector2u,\n\n mini_map: MiniMap,\n\n hud: HUD<'s>,\n\n weapon: Weapon<'s>,\n", "file_path": "src/game_mode.rs", "rank": 36, "score": 12.325061019157994 }, { "content": "//! Module for drawing weapons\n\n\n\nuse std::ops::Range;\n\n\n\nuse rsfml::{\n\n graphics::{RectangleShape, RenderTarget, RenderWindow, Shape, Transformable},\n\n system::Vector2f,\n\n window::{mouse::Button as MouseButton, Key},\n\n};\n\n\n\nuse crate::{animation::*, event_handler::EventHandler, texture_loader::TextureLoader};\n\n\n\npub struct Weapon<'s> {\n\n weapons: RectangleShape<'s>,\n\n animations: Vec<Animation>,\n\n texture_loader: &'s TextureLoader,\n\n shadows: RectangleShape<'s>,\n\n shadows_id: Vec<i32>,\n\n current_weapon: i32,\n\n mouse_fire: bool,\n", "file_path": "src/weapon.rs", "rank": 37, "score": 12.2115002603146 }, { "content": "//! Basic class to display the games frames per seconds (FPS).\n\n//!\n\n//! This class displays the current FPS in the bottom-left of the window.\n\n#![allow(non_snake_case)]\n\n\n\nuse rsfml::{\n\n graphics::{Color, Font, RenderTarget, RenderWindow, Text, Transformable},\n\n system::{Clock, Vector2f},\n\n};\n\n\n\npub struct FPSHandler<'t> {\n\n fps_clock: Clock,\n\n text: Text<'t>,\n\n images: usize,\n\n}\n\n\n\nimpl<'t> FPSHandler<'t> {\n\n /// Constructs a new instance of FPSHandler\n\n ///\n\n /// # Arguments\n", "file_path": "src/fps.rs", "rank": 38, "score": 11.845449409848886 }, { "content": " .set_position(Vector2f::new(10., self.window_size.y - 70.));\n\n self.face_animation.update();\n\n self.face.set_texture(\n\n self.texture_loader\n\n .get_texture(self.face_animation.get_current_texture_id()),\n\n false,\n\n );\n\n if self.face_clock.elapsed_time().as_seconds() >= 7. {\n\n self.face_animation.set_state(AnimationState::Play);\n\n self.face_clock.restart();\n\n }\n\n }\n\n\n\n fn draw_line(&mut self, line_coords: Line2D, color: &Color, render_window: &mut RenderWindow) {\n\n self.hud_vertex_array.clear();\n\n self.hud_vertex_array.append(&Vertex::with_pos_color(\n\n Vector2f::new(line_coords[0], line_coords[1]),\n\n *color,\n\n ));\n\n self.hud_vertex_array.append(&Vertex::with_pos_color(\n", "file_path": "src/hud.rs", "rank": 39, "score": 11.752992839484051 }, { "content": "\n\n pub fn has_mouse_moved_event(&self) -> Option<(i32, i32)> {\n\n self.events.iter().find_map(|ev| match *ev {\n\n Event::MouseMoved { x, y } => Some((x, y)),\n\n _ => None,\n\n })\n\n }\n\n\n\n pub fn has_mouse_entered_event(&self) -> bool {\n\n self.events.iter().any(|ev| *ev == Event::MouseEntered)\n\n }\n\n\n\n pub fn has_mouse_left_event(&self) -> bool {\n\n self.events.iter().any(|ev| *ev == Event::MouseLeft)\n\n }\n\n\n\n // pub fn get_mouse_position(&self) -> Vector2i {\n\n // self.render_window.get_mouse_position()\n\n // }\n\n\n", "file_path": "src/event_handler.rs", "rank": 40, "score": 11.742245828919428 }, { "content": " }\n\n self.r_engine.update(event_handler);\n\n if self.mini_map.is_active() {\n\n self.mini_map\n\n .update(self.r_engine.get_player_pos(), rotation);\n\n }\n\n self.hud.update();\n\n self.weapon.update(event_handler);\n\n }\n\n\n\n pub fn draw(&mut self, render_window: &mut RenderWindow) {\n\n render_window.draw(&self.sky);\n\n render_window.draw(&self.ground);\n\n self.r_engine.draw(render_window, self.texture_loader);\n\n if self.mini_map.is_active() {\n\n self.mini_map.draw(render_window, self.texture_loader);\n\n }\n\n self.hud.draw(render_window);\n\n self.weapon.draw(render_window);\n\n render_window.set_mouse_cursor_visible(false);\n\n render_window.set_mouse_position(Vector2i::new(\n\n (self.window_size.x / 2) as i32,\n\n (self.window_size.y / 2) as i32,\n\n ));\n\n }\n\n}\n", "file_path": "src/game_mode.rs", "rank": 44, "score": 11.054426419734382 }, { "content": "//! Module for event handling\n\n\n\nuse rsfml::{\n\n graphics::RenderWindow,\n\n window::{mouse::Button as MouseButton, Event, Key},\n\n};\n\n\n\npub struct EventHandler {\n\n pub events: Vec<Event>,\n\n}\n\n\n\nimpl EventHandler {\n\n pub fn new() -> EventHandler {\n\n EventHandler { events: Vec::new() }\n\n }\n\n\n\n pub fn is_key_pressed(&self, key: Key) -> bool {\n\n Key::is_pressed(key)\n\n }\n\n\n", "file_path": "src/event_handler.rs", "rank": 45, "score": 10.939107780309445 }, { "content": " 0, 1, 1, 4, 0, 4, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 4, 0,\n\n 0, 0, 0, 5, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 4, 0, 4, 0, 0, 0, 0,\n\n 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 4, 0, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 1, 1, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n\n 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n\n ];\n\n Map::new(map_i32, &Vector2f::new(24., 24.))\n\n }\n\n\n\n pub fn update(&mut self, event_handler: &EventHandler) {\n\n let mut rotation: f32 = 0.;\n\n if event_handler.is_key_pressed(Key::Left) {\n\n rotation = -5.25;\n\n }\n\n if event_handler.is_key_pressed(Key::Right) {\n\n rotation = 5.25;\n\n }\n\n if let Some(_) = event_handler.has_key_pressed_event(Key::M) {\n\n self.mini_map.toggle_active();\n", "file_path": "src/game_mode.rs", "rank": 46, "score": 10.93697327944958 }, { "content": " Vector2f::new(line_coords[2], line_coords[3]),\n\n *color,\n\n ));\n\n render_window.draw(&self.hud_vertex_array);\n\n }\n\n\n\n fn draw_2line(&mut self, line_coords: Line3D, color: &Color, render_window: &mut RenderWindow) {\n\n self.hud_vertex_array.clear();\n\n self.hud_vertex_array.append(&Vertex::with_pos_color(\n\n Vector2f::new(line_coords[0], line_coords[1]),\n\n *color,\n\n ));\n\n self.hud_vertex_array.append(&Vertex::with_pos_color(\n\n Vector2f::new(line_coords[2], line_coords[3]),\n\n *color,\n\n ));\n\n self.hud_vertex_array.append(&Vertex::with_pos_color(\n\n Vector2f::new(line_coords[4], line_coords[5]),\n\n *color,\n\n ));\n", "file_path": "src/hud.rs", "rank": 48, "score": 10.622583198120287 }, { "content": "//! Module for game state management\n\n\n\n#![allow(non_snake_case)]\n\n\n\nuse rsfml::{\n\n graphics::{Color, Font, RenderTarget, RenderWindow},\n\n window::Key,\n\n};\n\n\n\nuse crate::{event_handler::*, fps::*, game_mode::*, texture_loader::TextureLoader};\n\n\n\npub struct GameLoop<'s> {\n\n render_window: RenderWindow,\n\n fps_handler: Option<FPSHandler<'s>>,\n\n event_handler: EventHandler,\n\n clear_color: Color,\n\n game_mode: GameMode<'s>,\n\n}\n\n\n\nimpl<'s> GameLoop<'s> {\n", "file_path": "src/game.rs", "rank": 49, "score": 10.209704097109205 }, { "content": " offset: u32,\n\n texture_ids: Vec<i32>,\n\n state: AnimationState,\n\n mode: AnimationPlayMode,\n\n lag: f32,\n\n current_texture: u32,\n\n clock: Clock,\n\n}\n\n\n\nimpl Animation {\n\n pub fn new(\n\n texture_ids: Vec<i32>,\n\n state: AnimationState,\n\n mode: AnimationPlayMode,\n\n lag: f32,\n\n offset: u32,\n\n ) -> Animation {\n\n Animation {\n\n a: 1,\n\n b: texture_ids.len() as u32 - 1u32,\n", "file_path": "src/animation.rs", "rank": 50, "score": 10.140477977035701 }, { "content": " ///\n\n /// # Arguments\n\n /// `render_window` - The window to draw the weapon onto\n\n pub fn draw<'r>(&'r mut self, render_window: &'r mut RenderWindow) -> () {\n\n self.weapons.set_texture(\n\n self.texture_loader.get_texture(\n\n self.animations[self.current_weapon as usize].get_current_texture_id(),\n\n ),\n\n false,\n\n );\n\n self.shadows.set_texture(\n\n self.texture_loader\n\n .get_texture(self.shadows_id[self.current_weapon as usize]),\n\n false,\n\n );\n\n render_window.draw(&self.weapons);\n\n render_window.draw(&self.shadows);\n\n }\n\n}\n", "file_path": "src/weapon.rs", "rank": 51, "score": 10.079044808418702 }, { "content": "//! Module for loading textures\n\n\n\nuse rsfml::{graphics::Texture, system::SfBox};\n\n\n\npub struct TextureLoader {\n\n textures: Vec<SfBox<Texture>>,\n\n}\n\n\n\nimpl TextureLoader {\n\n pub fn new() -> TextureLoader {\n\n TextureLoader {\n\n textures: Vec::new(),\n\n }\n\n }\n\n\n\n pub fn load_texture(&mut self, texture_path: &str) -> Result<(), String> {\n\n let texture = Texture::from_file(texture_path);\n\n\n\n if let Some(tex) = texture {\n\n self.textures.push(tex);\n", "file_path": "src/texture_loader.rs", "rank": 52, "score": 9.867385948371863 }, { "content": " }\n\n _ => {}\n\n }\n\n }\n\n\n\n pub fn set_mode(&mut self, new_mode: AnimationPlayMode) -> () {\n\n self.mode = new_mode;\n\n }\n\n\n\n pub fn get_state(&self) -> AnimationState {\n\n self.state\n\n }\n\n\n\n pub fn get_mode(&self) -> AnimationPlayMode {\n\n self.mode\n\n }\n\n\n\n pub fn set_lag(&mut self, new_lag: f32) -> () {\n\n self.lag = new_lag\n\n }\n", "file_path": "src/animation.rs", "rank": 53, "score": 9.172141781463017 }, { "content": " self.fps_handler.as_mut().unwrap().update();\n\n }\n\n\n\n pub fn draw(&mut self) {\n\n self.render_window.clear(self.clear_color);\n\n self.game_mode.draw(&mut self.render_window);\n\n if let Some(_) = self.fps_handler {\n\n self.fps_handler\n\n .as_mut()\n\n .unwrap()\n\n .draw(&mut self.render_window)\n\n };\n\n self.render_window.display();\n\n }\n\n}\n", "file_path": "src/game.rs", "rank": 54, "score": 9.12664544648493 }, { "content": " }\n\n\n\n fn update_reload<'r>(&'r mut self, event_handler: &EventHandler) {\n\n if event_handler.is_key_pressed(Key::E) {\n\n self.animations\n\n .get_mut(self.current_weapon as usize)\n\n .unwrap()\n\n .set_state(AnimationState::Play);\n\n }\n\n }\n\n\n\n fn update_action<'r>(&'r mut self, event_handler: &EventHandler) {\n\n if !self.mouse_fire {\n\n if let Some(_) = event_handler.get_mouse_button_pressed_event(MouseButton::Left) {\n\n self.animations\n\n .get_mut(self.current_weapon as usize)\n\n .unwrap()\n\n .set_state(AnimationState::Play);\n\n self.mouse_fire = true\n\n };\n", "file_path": "src/weapon.rs", "rank": 55, "score": 9.03242991108431 }, { "content": " Ok(())\n\n } else {\n\n Err(format!(\"ERROR: Failed to load texture {}\", texture_path))\n\n }\n\n }\n\n\n\n pub fn get_texture(&self, index: i32) -> &Texture {\n\n &self.textures[index as usize]\n\n }\n\n}\n", "file_path": "src/texture_loader.rs", "rank": 58, "score": 8.72356279604574 }, { "content": " pub fn new(\n\n render_window: RenderWindow,\n\n texture_loader: &'s TextureLoader,\n\n no_ground: bool,\n\n ) -> GameLoop<'s> {\n\n let tmp_size = render_window.size();\n\n GameLoop {\n\n render_window,\n\n fps_handler: None,\n\n event_handler: EventHandler::new(),\n\n clear_color: Color::rgb(3, 64, 59),\n\n game_mode: GameMode::new(tmp_size, texture_loader, no_ground),\n\n }\n\n }\n\n\n\n pub fn activate_FPS(&mut self, font: &'s Font) {\n\n if let None = self.fps_handler {\n\n self.fps_handler = Some(FPSHandler::new(font))\n\n }\n\n }\n", "file_path": "src/game.rs", "rank": 59, "score": 8.490348060878041 }, { "content": " background: RectangleShape::new(),\n\n hud_vertex_array: array,\n\n face: tmp_face,\n\n face_animation: Animation::new(\n\n vec![40, 41, 42],\n\n AnimationState::Play,\n\n AnimationPlayMode::Once,\n\n 1.,\n\n 0,\n\n ),\n\n texture_loader,\n\n face_clock: Clock::start(),\n\n }\n\n }\n\n\n\n pub fn update(&mut self) {\n\n self.background\n\n .set_size(Vector2f::new(self.window_size.x - 21., 59.));\n\n self.background.set_fill_color(Color::rgb(6, 1, 162));\n\n self.background\n", "file_path": "src/hud.rs", "rank": 60, "score": 8.177154628902956 }, { "content": " } if code == key => Some((code, alt, ctrl, shift, system)),\n\n _ => None,\n\n })\n\n }\n\n\n\n pub fn has_mouse_wheel_moved_event(&self) -> Option<(i32, i32, i32)> {\n\n self.events.iter().find_map(|ev| match *ev {\n\n Event::MouseWheelScrolled {\n\n wheel: _,\n\n delta,\n\n x,\n\n y,\n\n } => Some((delta as i32, x, y)),\n\n _ => None,\n\n })\n\n }\n\n\n\n pub fn get_mouse_button_pressed_event(\n\n &self,\n\n mouse_button: MouseButton,\n", "file_path": "src/event_handler.rs", "rank": 62, "score": 8.01173817518543 }, { "content": "#![allow(non_snake_case)]\n\n\n\npub mod animation;\n\npub mod event_handler;\n\npub mod fps;\n\npub mod game;\n\npub mod game_mode;\n\npub mod hud;\n\npub mod map;\n\npub mod mini_map;\n\npub mod raycasting_engine;\n\npub mod texture_loader;\n\npub mod weapon;\n\n\n\nuse std::error::Error;\n\nuse std::fs;\n\nuse std::path::Path;\n\n\n\nuse texture_loader::TextureLoader;\n\n\n", "file_path": "src/lib.rs", "rank": 63, "score": 7.904062708463769 }, { "content": "#![allow(non_snake_case)]\n\n\n\nuse rsfml::{\n\n graphics::{Font, RenderWindow},\n\n system::Vector2i,\n\n window::{ContextSettings, Style, VideoMode},\n\n};\n\nuse rustenstein3D::game::GameLoop;\n\nuse rustenstein3D::{load_texture, parse_arguments, Arguments, ParsedResult, RESOURCES_BASE_PATH};\n\n\n\n#[cfg(target_os = \"macos\")]\n\n#[start]\n", "file_path": "src/main.rs", "rank": 64, "score": 7.786156778315804 }, { "content": "\n\n fn initialize_weapons(window_size: &Vector2f) -> RectangleShape<'s> {\n\n let mut tmp_weapon = RectangleShape::with_size(Vector2f { x: 400., y: 400. });\n\n tmp_weapon.set_position(Vector2f::new(\n\n window_size.x / 2. - 200.,\n\n window_size.y - 400. - 81.,\n\n ));\n\n tmp_weapon\n\n }\n\n\n\n fn initialize_shadows(window_size: &Vector2f) -> RectangleShape<'s> {\n\n let mut tmp_shadow = RectangleShape::with_size(Vector2f { x: 99., y: 48. });\n\n tmp_shadow.set_position(Vector2f::new(window_size.x - 115., window_size.y - 66.));\n\n tmp_shadow\n\n }\n\n\n\n fn create_animation_by_range(texture_id_range: Range<i32>) -> Animation {\n\n Animation::new(\n\n texture_id_range.collect(),\n\n AnimationState::Stop,\n", "file_path": "src/weapon.rs", "rank": 65, "score": 7.595845257013101 }, { "content": " ) -> Option<(MouseButton, i32, i32)> {\n\n self.events.iter().find_map(|ev| match *ev {\n\n Event::MouseButtonPressed { button, x, y } if mouse_button == button => {\n\n Some((button, x, y))\n\n }\n\n _ => None,\n\n })\n\n }\n\n\n\n pub fn get_mouse_button_released_event(\n\n &self,\n\n mouse_button: MouseButton,\n\n ) -> Option<(MouseButton, i32, i32)> {\n\n self.events.iter().find_map(|ev| match *ev {\n\n Event::MouseButtonReleased { button, x, y } if mouse_button == button => {\n\n Some((button, x, y))\n\n }\n\n _ => None,\n\n })\n\n }\n", "file_path": "src/event_handler.rs", "rank": 66, "score": 7.350396025380373 }, { "content": " render_window.draw(&self.hud_vertex_array);\n\n }\n\n\n\n pub fn draw(&mut self, render_window: &mut RenderWindow) {\n\n render_window.draw(&self.background);\n\n let window_x = self.window_size.x;\n\n let window_y = self.window_size.y;\n\n self.draw_2line(\n\n [\n\n window_x - 9.,\n\n window_x - 9.,\n\n 9.,\n\n window_y - 70.,\n\n window_y - 10.,\n\n window_y - 10.,\n\n ],\n\n &Color::rgba(255, 255, 255, 75),\n\n render_window,\n\n );\n\n self.draw_2line(\n", "file_path": "src/hud.rs", "rank": 67, "score": 7.221571335748709 }, { "content": "pub const RESOURCES_BASE_PATH: &'static str = \"resources\";\n\n\n\npub struct Arguments {\n\n pub window_dimensions: (u32, u32),\n\n pub no_ground: bool,\n\n pub framerate_limit: u32,\n\n}\n\n\n\npub enum ParsedResult {\n\n Success,\n\n Parsed(Arguments),\n\n Failure(String),\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 68, "score": 7.07240488556225 }, { "content": " pub fn has_closed_event(&self) -> bool {\n\n self.events.iter().any(|ev| *ev == Event::Closed)\n\n }\n\n\n\n pub fn has_gained_focus_event(&self) -> bool {\n\n self.events.iter().any(|ev| *ev == Event::GainedFocus)\n\n }\n\n\n\n pub fn has_lost_focus_event(&self) -> bool {\n\n self.events.iter().any(|ev| *ev == Event::LostFocus)\n\n }\n\n\n\n pub fn has_text_entered(&self) -> Option<char> {\n\n self.events.iter().find_map(|ev| match *ev {\n\n Event::TextEntered { unicode } => Some(unicode),\n\n _ => None,\n\n })\n\n }\n\n\n\n pub fn has_key_pressed_event(&self, key: Key) -> Option<(Key, bool, bool, bool, bool)> {\n", "file_path": "src/event_handler.rs", "rank": 69, "score": 7.049330236229626 }, { "content": " AnimationPlayMode::Once,\n\n 0.07,\n\n 3,\n\n )\n\n }\n\n\n\n fn initialize_animation() -> Vec<Animation> {\n\n let mut animations = Vec::new();\n\n animations.push(Weapon::create_animation_by_range(12..18));\n\n animations.push(Weapon::create_animation_by_range(19..25));\n\n animations.push(Weapon::create_animation_by_range(26..32));\n\n animations.push(Weapon::create_animation_by_range(33..39));\n\n animations\n\n }\n\n\n\n fn update_animations<'r>(&'r mut self) {\n\n self.animations\n\n .get_mut(self.current_weapon as usize)\n\n .unwrap()\n\n .update();\n", "file_path": "src/weapon.rs", "rank": 70, "score": 6.999712420256095 }, { "content": " /// `font` - The font to render the text on the window.\n\n pub fn new(font: &'t Font) -> FPSHandler<'t> {\n\n let mut t = Text::new(\"0\", font, 20);\n\n t.set_position(Vector2f::new(10., 10.));\n\n t.set_fill_color(Color::WHITE);\n\n FPSHandler {\n\n fps_clock: Clock::start(),\n\n text: t,\n\n images: 0,\n\n }\n\n }\n\n\n\n /// Update internal data of the FPSHandler\n\n ///\n\n /// Call this function at each end of the loop to update FPSHandler internal data.\n\n pub fn update(&mut self) {\n\n if self.fps_clock.elapsed_time().as_seconds() >= 0.33 {\n\n self.text.set_string((self.images * 3).to_string().as_str());\n\n self.images = 0;\n\n self.fps_clock.restart();\n", "file_path": "src/fps.rs", "rank": 71, "score": 6.76450981086831 }, { "content": " } else if let Some(_) = event_handler.get_mouse_button_released_event(MouseButton::Left) {\n\n self.mouse_fire = false\n\n } else {\n\n self.animations\n\n .get_mut(self.current_weapon as usize)\n\n .unwrap()\n\n .set_state(AnimationState::Play)\n\n };\n\n }\n\n\n\n fn update_selection<'r>(&'r mut self, event_handler: &EventHandler) {\n\n if let Some(_) = event_handler.has_key_pressed_event(Key::Num1) {\n\n self.current_weapon = 0\n\n };\n\n if let Some(_) = event_handler.has_key_pressed_event(Key::Num2) {\n\n self.current_weapon = 1\n\n };\n\n if let Some(_) = event_handler.has_key_pressed_event(Key::Num3) {\n\n self.current_weapon = 2\n\n };\n", "file_path": "src/weapon.rs", "rank": 72, "score": 6.571705155693365 }, { "content": " self.events.iter().find_map(|ev| match *ev {\n\n Event::KeyPressed {\n\n code,\n\n alt,\n\n ctrl,\n\n shift,\n\n system,\n\n } if code == key => Some((code, alt, ctrl, shift, system)),\n\n _ => None,\n\n })\n\n }\n\n\n\n pub fn has_key_released_event(&self, key: Key) -> Option<(Key, bool, bool, bool, bool)> {\n\n self.events.iter().find_map(|ev| match *ev {\n\n Event::KeyReleased {\n\n code,\n\n alt,\n\n ctrl,\n\n shift,\n\n system,\n", "file_path": "src/event_handler.rs", "rank": 73, "score": 6.328772935331427 }, { "content": "//! Module for animation management\n\n\n\nuse rsfml::system::Clock;\n\n\n\n#[derive(PartialEq, Eq, Clone, Copy)]\n\npub enum AnimationState {\n\n Play,\n\n Pause,\n\n Stop,\n\n}\n\n\n\n#[derive(PartialEq, Eq, Clone, Copy)]\n\npub enum AnimationPlayMode {\n\n Once,\n\n Infinite,\n\n}\n\n\n\npub struct Animation {\n\n a: u32,\n\n b: u32,\n", "file_path": "src/animation.rs", "rank": 74, "score": 6.275553140676789 }, { "content": "}\n\n\n\nimpl<'s> Weapon<'s> {\n\n /// Instantiates a new Weapon.\n\n ///\n\n /// # Arguments\n\n /// `window_size` - The size of the window to draw the weapons onto\n\n ///\n\n /// `texture_loader` - A [TextureLoader] to obtain weapon textures from\n\n pub fn new(window_size: &Vector2f, texture_loader: &'s TextureLoader) -> Weapon<'s> {\n\n Weapon {\n\n weapons: Weapon::initialize_weapons(window_size),\n\n animations: Weapon::initialize_animation(),\n\n texture_loader,\n\n shadows: Weapon::initialize_shadows(window_size),\n\n shadows_id: vec![18, 25, 32, 39],\n\n current_weapon: 0,\n\n mouse_fire: false,\n\n }\n\n }\n", "file_path": "src/weapon.rs", "rank": 75, "score": 5.362393412825686 }, { "content": "\n\n pub fn deactivate_FPS(&mut self) {\n\n if let Some(_) = self.fps_handler {\n\n self.fps_handler = None\n\n }\n\n }\n\n\n\n pub fn run(&mut self) {\n\n while self.render_window.is_open() {\n\n self.update();\n\n self.draw();\n\n }\n\n }\n\n\n\n pub fn update(&mut self) {\n\n self.event_handler.update_events(&mut self.render_window);\n\n if self.event_handler.has_closed_event() || self.event_handler.is_key_pressed(Key::Escape) {\n\n self.render_window.close();\n\n }\n\n self.game_mode.update(&self.event_handler);\n", "file_path": "src/game.rs", "rank": 76, "score": 5.048388803094471 }, { "content": " pub fn get_events(&self) -> Vec<Event> {\n\n self.events.clone()\n\n }\n\n\n\n pub fn update_events(&mut self, render_window: &mut RenderWindow) -> () {\n\n self.events.clear();\n\n while let Some(ev) = render_window.poll_event() {\n\n self.events.push(ev)\n\n }\n\n }\n\n}\n\n\n\n// TODO IMPLEMENT FUNCTION FOR JOYSTICK HANDLE\n\n// JoystickButtonPressed { joystickid : int, button : int },\n\n// JoystickButtonReleased { joystickid : int, button : int },\n\n// JoystickMoved { joystickid : uint, axis : Axis, position : float },\n\n// JoystickConnected { joystickid : uint },\n\n// JoystickDisconnected { joystickid : uint },\n", "file_path": "src/event_handler.rs", "rank": 77, "score": 4.874394391417158 }, { "content": " render_window.set_mouse_cursor_visible(false);\n\n\n\n // set the mouse positon on the center of the window\n\n render_window.set_mouse_position(Vector2i::new(width as i32 / 2, height as i32 / 2));\n\n\n\n // Create the font for the FPS_handler.\n\n let font = Font::from_file(&format!(\"{}/sansation.ttf\", RESOURCES_BASE_PATH))\n\n .ok_or(\"ERROR: Cannot load font! Font (resources/sansation.ttf) does not exist!\")?;\n\n\n\n // Create the texture loader and load textures\n\n let texture_loader = match load_texture() {\n\n Ok(tl) => tl,\n\n Err(err) => return Err(err.to_string()),\n\n };\n\n\n\n // Create the game_loop and activate the fps handler.\n\n let mut game_loop = GameLoop::new(render_window, &texture_loader, no_ground);\n\n game_loop.activate_FPS(&font);\n\n\n\n game_loop.run();\n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 78, "score": 4.63248895776503 }, { "content": " offset,\n\n texture_ids,\n\n state,\n\n mode,\n\n lag,\n\n current_texture: 0,\n\n clock: Clock::start(),\n\n }\n\n }\n\n\n\n pub fn set_state(&mut self, new_state: AnimationState) -> () {\n\n self.state = new_state;\n\n match new_state {\n\n AnimationState::Stop => {\n\n self.current_texture = 0;\n\n self.clock.restart();\n\n }\n\n AnimationState::Play if self.offset <= self.current_texture => {\n\n self.current_texture = self.a;\n\n self.clock.restart();\n", "file_path": "src/animation.rs", "rank": 79, "score": 4.542070270510915 }, { "content": " }\n\n self.images += 1;\n\n }\n\n\n\n /// Draw the current FPS on the left bottom of the window\n\n ///\n\n /// # Arguments\n\n /// `&mut render_window` - The window to draw onto\n\n pub fn draw(&self, render_window: &mut RenderWindow) {\n\n render_window.draw(&self.text)\n\n }\n\n}\n", "file_path": "src/fps.rs", "rank": 80, "score": 4.262456901776284 }, { "content": " &Color::BLACK,\n\n render_window,\n\n );\n\n self.draw_2line(\n\n [\n\n 11.,\n\n 11.,\n\n window_x - 11.,\n\n window_y - 11.,\n\n window_y - 69.,\n\n window_y - 69.,\n\n ],\n\n &Color::rgba(255, 255, 255, 75),\n\n render_window,\n\n );\n\n self.draw_line(\n\n [window_x, 0., window_y - 80., window_y - 80.],\n\n &Color::rgba(255, 255, 255, 50),\n\n render_window,\n\n );\n\n self.draw_line(\n\n [window_x, 0., window_y - 79., window_y - 79.],\n\n &Color::rgba(255, 255, 255, 75),\n\n render_window,\n\n );\n\n render_window.draw(&self.face);\n\n }\n\n}\n", "file_path": "src/hud.rs", "rank": 81, "score": 3.1036862283454063 }, { "content": " if let Some(_) = event_handler.has_key_pressed_event(Key::Num4) {\n\n self.current_weapon = 3\n\n };\n\n }\n\n\n\n /// Updates the weapon state\n\n ///\n\n /// # Arguments\n\n /// `event_handler` - The event handler to read input from to update state\n\n pub fn update<'r>(&'r mut self, event_handler: &'r EventHandler) -> () {\n\n self.update_selection(event_handler);\n\n\n\n self.update_action(event_handler);\n\n\n\n self.update_reload(event_handler);\n\n\n\n self.update_animations();\n\n }\n\n\n\n /// Updates the weapon animation on the next frame\n", "file_path": "src/weapon.rs", "rank": 82, "score": 2.740843130013035 }, { "content": " [\n\n window_x - 11.,\n\n window_x - 11.,\n\n 11.,\n\n window_y - 70.,\n\n window_y - 12.,\n\n window_y - 12.,\n\n ],\n\n &Color::BLACK,\n\n render_window,\n\n );\n\n self.draw_2line(\n\n [\n\n 9.,\n\n 9.,\n\n window_x - 9.,\n\n window_y - 12.,\n\n window_y - 71.,\n\n window_y - 71.,\n\n ],\n", "file_path": "src/hud.rs", "rank": 83, "score": 2.3829201002030223 }, { "content": " return;\n\n }\n\n\n\n if self.current_texture != self.texture_ids.len() as u32 - 1 {\n\n self.current_texture += 1;\n\n } else {\n\n self.current_texture = 0;\n\n if let AnimationPlayMode::Once = self.mode {\n\n self.state = AnimationState::Stop\n\n }\n\n }\n\n\n\n self.clock.restart();\n\n }\n\n}\n", "file_path": "src/animation.rs", "rank": 84, "score": 2.2157003342273014 }, { "content": "# Rustenstein3D\n\n\n\nSimple Wolfenstein3D engine in rust.\n\n\n\n## Build Instructions\n\n\n\n1. Clone the repository\n\n2. Run `cargo check` to pre-build the project\n\n3. Windows:\n\n 1. Download [SFML2](https://www.sfml-dev.org/download/sfml/2.5.1/) and [CSFML2](https://www.sfml-dev.org/download/csfml/)\n\n 2. Copy the `.lib` files from the `lib` folders in (C)SFML to your toolchain.\n\n - `C:\\Users\\[USERNAME]\\.rustup\\toolchains\\[RUST_TOOLCHAIN]\\lib\\rustlib\\[MSVC_TOOLCHAIN]\\lib`\n\n 3. Copy the `.dll` files from the `bin` folders in CSFML to the `target/[debug/release]` folders.\n\n4. Macintosh:\n\n 1. TODO\n\n5. Linux:\n\n 1. TODO\n\n6. Use `cargo run` to run Rustenstein3D.\n\n - Use `cargo run -- -w \"[WIDTH]\" \"[HEIGHT]\"` for a custom window size.\n\n\n\n## Screenshot\n\n\n\n![rustenstein screenshot](resources/screen.png \"rustenstein3D screenshot\")\n", "file_path": "README.md", "rank": 85, "score": 1.6736202865518233 } ]
Rust
datafusion/src/logical_plan/window_frames.rs
cube-js/arrow-datafusion
4e9d31e05b69017d972ae4c1ffbeeaab163fd654
use crate::error::{DataFusionError, Result}; use crate::execution::context::ExecutionContextState; use crate::logical_plan::Expr; use crate::scalar::ScalarValue; use crate::sql::planner::SqlToRel; use serde_derive::{Deserialize, Serialize}; use sqlparser::ast; use sqlparser::ast::DateTimeField; use std::cmp::Ordering; use std::convert::TryInto; use std::convert::{From, TryFrom}; use std::fmt; #[derive(Debug, Clone, PartialEq)] pub struct WindowFrame { pub units: WindowFrameUnits, pub start_bound: WindowFrameBound, pub end_bound: WindowFrameBound, } impl fmt::Display for WindowFrame { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "{} BETWEEN {} AND {}", self.units, self.start_bound, self.end_bound )?; Ok(()) } } impl TryFrom<ast::WindowFrame> for WindowFrame { type Error = DataFusionError; fn try_from(value: ast::WindowFrame) -> Result<Self> { let start_bound = value.start_bound.try_into()?; let end_bound = value .end_bound .map(WindowFrameBound::try_from) .unwrap_or(Ok(WindowFrameBound::CurrentRow))?; check_window_bound_order(&start_bound, &end_bound)?; let is_allowed_range_bound = |s: &ScalarValue| match s { ScalarValue::Int64(Some(i)) => *i == 0, _ => false, }; let units = value.units.into(); if units == WindowFrameUnits::Range { for bound in &[&start_bound, &end_bound] { match bound { WindowFrameBound::Preceding(Some(v)) | WindowFrameBound::Following(Some(v)) if !is_allowed_range_bound(v) => { Err(DataFusionError::NotImplemented(format!( "With WindowFrameUnits={}, the bound cannot be {} PRECEDING or FOLLOWING at the moment", units, v ))) } _ => Ok(()), }?; } } Ok(Self { units, start_bound, end_bound, }) } } #[allow(missing_docs)] pub fn check_window_bound_order( start_bound: &WindowFrameBound, end_bound: &WindowFrameBound, ) -> Result<()> { if let WindowFrameBound::Following(None) = start_bound { Err(DataFusionError::Execution( "Invalid window frame: start bound cannot be unbounded following".to_owned(), )) } else if let WindowFrameBound::Preceding(None) = end_bound { Err(DataFusionError::Execution( "Invalid window frame: end bound cannot be unbounded preceding".to_owned(), )) } else { match start_bound.logical_cmp(&end_bound) { None => Err(DataFusionError::Execution(format!( "Invalid window frame: start bound ({}) is incompatble with the end bound ({})", start_bound, end_bound ))), Some(o) if o > Ordering::Equal => Err(DataFusionError::Execution(format!( "Invalid window frame: start bound ({}) cannot be larger than end bound ({})", start_bound, end_bound ))), Some(_) => Ok(()), } } } impl Default for WindowFrame { fn default() -> Self { WindowFrame { units: WindowFrameUnits::Range, start_bound: WindowFrameBound::Preceding(None), end_bound: WindowFrameBound::CurrentRow, } } } #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub enum WindowFrameBound { Preceding(Option<ScalarValue>), CurrentRow, Following(Option<ScalarValue>), } impl TryFrom<ast::WindowFrameBound> for WindowFrameBound { type Error = DataFusionError; fn try_from(value: ast::WindowFrameBound) -> Result<Self> { let value_to_scalar = |v| -> Result<_> { match v { None => Ok(None), Some(ast::Value::Number(v, _)) => match v.parse() { Err(_) => Err(DataFusionError::Plan(format!("could not convert window frame bound '{}' to int64", v))), Ok(v) => Ok(Some(ScalarValue::Int64(Some(v)))), }, Some(ast::Value::Interval { value, leading_field, leading_precision, last_field, fractional_seconds_precision }) => Ok(Some(interval_to_scalar(&value, &leading_field, &leading_precision, &last_field, &fractional_seconds_precision)?)), Some(o) => Err(DataFusionError::Plan(format!("window frame bound must be a positive integer or an INTERVAL, got {}", o))), } }; match value { ast::WindowFrameBound::Preceding(v) => { Ok(Self::Preceding(value_to_scalar(v)?)) } ast::WindowFrameBound::Following(v) => { Ok(Self::Following(value_to_scalar(v)?)) } ast::WindowFrameBound::CurrentRow => Ok(Self::CurrentRow), } } } fn interval_to_scalar( value: &str, leading_field: &Option<DateTimeField>, leading_precision: &Option<u64>, last_field: &Option<DateTimeField>, fractional_seconds_precision: &Option<u64>, ) -> Result<ScalarValue> { match SqlToRel::<ExecutionContextState>::sql_interval_to_literal( value, leading_field, leading_precision, last_field, fractional_seconds_precision, )? { Expr::Literal(v) => Ok(v), o => panic!("unexpected result of interval_to_literal: {:?}", o), } } impl fmt::Display for WindowFrameBound { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { WindowFrameBound::CurrentRow => f.write_str("CURRENT ROW"), WindowFrameBound::Preceding(None) => f.write_str("UNBOUNDED PRECEDING"), WindowFrameBound::Following(None) => f.write_str("UNBOUNDED FOLLOWING"), WindowFrameBound::Preceding(Some(n)) => write!(f, "{} PRECEDING", n), WindowFrameBound::Following(Some(n)) => write!(f, "{} FOLLOWING", n), } } } impl WindowFrameBound { pub fn logical_cmp(&self, other: &Self) -> Option<Ordering> { use WindowFrameBound::{CurrentRow, Following, Preceding}; let ord = |v: &WindowFrameBound| match v { Preceding(_) => 0, CurrentRow => 1, Following(_) => 2, }; let lo = ord(self); let ro = ord(other); let o = lo.cmp(&ro); if o != Ordering::Equal { return Some(o); } let (l, r) = match (self, other) { (Preceding(Some(l)), Preceding(Some(r))) => (r, l), (Following(Some(l)), Following(Some(r))) => (l, r), (CurrentRow, CurrentRow) => return Some(Ordering::Equal), (Preceding(None), Preceding(None)) => return Some(Ordering::Equal), (Preceding(None), Preceding(Some(_))) => return Some(Ordering::Less), (Preceding(Some(_)), Preceding(None)) => return Some(Ordering::Greater), (Following(None), Following(None)) => return Some(Ordering::Equal), (Following(Some(_)), Following(None)) => return Some(Ordering::Less), (Following(None), Following(Some(_))) => return Some(Ordering::Greater), _ => panic!("unhandled bounds: {} and {}", self, other), }; match (l, r) { (ScalarValue::Int64(Some(l)), ScalarValue::Int64(Some(r))) => Some(l.cmp(r)), ( ScalarValue::IntervalDayTime(Some(l)), ScalarValue::IntervalDayTime(Some(r)), ) => Some(l.cmp(r)), ( ScalarValue::IntervalYearMonth(Some(l)), ScalarValue::IntervalYearMonth(Some(r)), ) => Some(l.cmp(r)), _ => None, } } } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum WindowFrameUnits { Rows, Range, Groups, } impl fmt::Display for WindowFrameUnits { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(match self { WindowFrameUnits::Rows => "ROWS", WindowFrameUnits::Range => "RANGE", WindowFrameUnits::Groups => "GROUPS", }) } } impl From<ast::WindowFrameUnits> for WindowFrameUnits { fn from(value: ast::WindowFrameUnits) -> Self { match value { ast::WindowFrameUnits::Range => Self::Range, ast::WindowFrameUnits::Groups => Self::Groups, ast::WindowFrameUnits::Rows => Self::Rows, } } } #[cfg(test)] mod tests { use super::*; #[test] fn test_window_frame_creation() -> Result<()> { let window_frame = ast::WindowFrame { units: ast::WindowFrameUnits::Range, start_bound: ast::WindowFrameBound::Following(None), end_bound: None, }; let result = WindowFrame::try_from(window_frame); assert_eq!( result.err().unwrap().to_string(), "Execution error: Invalid window frame: start bound cannot be unbounded following".to_owned() ); let window_frame = ast::WindowFrame { units: ast::WindowFrameUnits::Range, start_bound: ast::WindowFrameBound::Preceding(None), end_bound: Some(ast::WindowFrameBound::Preceding(None)), }; let result = WindowFrame::try_from(window_frame); assert_eq!( result.err().unwrap().to_string(), "Execution error: Invalid window frame: end bound cannot be unbounded preceding".to_owned() ); let window_frame = ast::WindowFrame { units: ast::WindowFrameUnits::Range, start_bound: ast::WindowFrameBound::Preceding(Some(1)), end_bound: Some(ast::WindowFrameBound::Preceding(Some(2))), }; let result = WindowFrame::try_from(window_frame); assert_eq!( result.err().unwrap().to_string(), "Execution error: Invalid window frame: start bound (1 PRECEDING) cannot be larger than end bound (2 PRECEDING)".to_owned() ); let window_frame = ast::WindowFrame { units: ast::WindowFrameUnits::Range, start_bound: ast::WindowFrameBound::Preceding(Some(2)), end_bound: Some(ast::WindowFrameBound::Preceding(Some(1))), }; let result = WindowFrame::try_from(window_frame); assert_eq!( result.err().unwrap().to_string(), "This feature is not implemented: With WindowFrameUnits=RANGE, the bound cannot be 2 PRECEDING or FOLLOWING at the moment".to_owned() ); let window_frame = ast::WindowFrame { units: ast::WindowFrameUnits::Rows, start_bound: ast::WindowFrameBound::Preceding(Some(2)), end_bound: Some(ast::WindowFrameBound::Preceding(Some(1))), }; let result = WindowFrame::try_from(window_frame); assert!(result.is_ok()); Ok(()) } #[test] fn test_eq() { assert_eq!( WindowFrameBound::Preceding(Some(0)), WindowFrameBound::CurrentRow ); assert_eq!( WindowFrameBound::CurrentRow, WindowFrameBound::Following(Some(0)) ); assert_eq!( WindowFrameBound::Following(Some(2)), WindowFrameBound::Following(Some(2)) ); assert_eq!( WindowFrameBound::Following(None), WindowFrameBound::Following(None) ); assert_eq!( WindowFrameBound::Preceding(Some(2)), WindowFrameBound::Preceding(Some(2)) ); assert_eq!( WindowFrameBound::Preceding(None), WindowFrameBound::Preceding(None) ); } #[test] fn test_ord() { assert!(WindowFrameBound::Preceding(Some(1)) < WindowFrameBound::CurrentRow); assert!( WindowFrameBound::Preceding(Some(2)) < WindowFrameBound::Preceding(Some(1)) ); assert!( WindowFrameBound::Preceding(Some(u64::MAX)) < WindowFrameBound::Preceding(Some(u64::MAX - 1)) ); assert!( WindowFrameBound::Preceding(None) < WindowFrameBound::Preceding(Some(1000000)) ); assert!( WindowFrameBound::Preceding(None) < WindowFrameBound::Preceding(Some(u64::MAX)) ); assert!(WindowFrameBound::Preceding(None) < WindowFrameBound::Following(Some(0))); assert!( WindowFrameBound::Preceding(Some(1)) < WindowFrameBound::Following(Some(1)) ); assert!(WindowFrameBound::CurrentRow < WindowFrameBound::Following(Some(1))); assert!( WindowFrameBound::Following(Some(1)) < WindowFrameBound::Following(Some(2)) ); assert!(WindowFrameBound::Following(Some(2)) < WindowFrameBound::Following(None)); assert!( WindowFrameBound::Following(Some(u64::MAX)) < WindowFrameBound::Following(None) ); } }
use crate::error::{DataFusionError, Result}; use crate::execution::context::ExecutionContextState; use crate::logical_plan::Expr; use crate::scalar::ScalarValue; use crate::sql::planner::SqlToRel; use serde_derive::{Deserialize, Serialize}; use sqlparser::ast; use sqlparser::ast::DateTimeField; use std::cmp::Ordering; use std::convert::TryInto; use std::convert::{From, TryFrom}; use std::fmt; #[derive(Debug, Clone, PartialEq)] pub struct WindowFrame { pub units: WindowFrameUnits, pub start_bound: WindowFrameBound, pub end_bound: WindowFrameBound, } impl fmt::Display for WindowFrame { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "{} BETWEEN {} AND {}", self.units, self.start_bound, self.end_bound )?; Ok(()) } } impl TryFrom<ast::WindowFrame> for WindowFrame { type Error = DataFusionError; fn try_from(value: ast::WindowFrame) -> Result<Self> { let start_bound = value.start_bound.try_into()?; let end_bound = value .end_bound .map(WindowFrameBound::try_from) .unwrap_or(Ok(WindowFrameBound::CurrentRow))?; check_window_bound_order(&start_bound, &end_bound)?; let is_allowed_range_bound = |s: &ScalarValue| match s { ScalarValue::Int64(Some(i)) => *i == 0, _ => false, }; let units = value.units.into(); if units == WindowFrameUnits::Range { for bound in &[&start_bound, &end_bound] { match bound { WindowFrameBound::Preceding(Some(v)) | WindowFrameBound::Following(Some(v)) if !is_allowed_range_bound(v) => { Err(DataFusionError::NotImplemented(format!( "With WindowFrameUnits={}, the bound cannot be {} PRECEDING or FOLLOWING at the moment", units, v ))) } _ => Ok(()), }?; } } Ok(Self { units, start_bound, end_bound, }) } } #[allow(missing_docs)] pub fn check_window_bound_order( start_bound: &WindowFrameBound, end_bound: &WindowFrameBound, ) -> Result<()> { if let WindowFrameBound::Following(None) = start_bound { Err(DataFusionError::Execution( "Invalid window frame: start bound cannot be unbounded following".to_owned(), )) } else if let WindowFrameBound::Preceding(None) = end_bound { Err(DataFusionError::Execution( "Invalid window frame: end bound cannot be unbounded preceding".to_owned(), )) } else { match start_bound.logical_cmp(&end_bound) { None => Err(DataFusionError::Execution(format!( "Invalid window frame: start bound ({}) is incompatble with the end bound ({})", start_bound, end_bound ))), Some(o) if o > Ordering::Equal => Err(DataFusionError::Execution(format!( "Invalid window frame: start bound ({}) cannot be larger than end bound ({})", start_bound, end_bound ))), Some(_) => Ok(()), } } } impl Default for WindowFrame { fn default() -> Self { WindowFrame { units: WindowFrameUnits::Range, start_bound: WindowFrameBound::Preceding(None), end_bound: WindowFrameBound::CurrentRow, } } } #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub enum WindowFrameBound { Preceding(Option<ScalarValue>), CurrentRow, Following(Option<ScalarValue>), } impl TryFrom<ast::WindowFrameBound> for WindowFrameBound { type Error = DataFusionError; fn try_from(value: ast::WindowFrameBound) -> Result<Self> { let value_to_scalar = |v| -> Result<_> { match v { None => Ok(None), Some(ast::Value::Number(v, _)) => match v.parse() { Err(_) => Err(DataFusionError::Plan(format!("could not convert window frame bound '{}' to int64", v))), Ok(v) => Ok(Some(ScalarValue::Int64(Some(v)))), }, Some(ast::Value::Interval { value, leading_field, leading_precision, last_field, fractional_seconds_precision }) => Ok(Some(interval_to_scalar(&value, &leading_field, &leading_precision, &last_field, &fractional_seconds_precision)?)), Some(o) => Err(DataFusionError::Plan(format!("window frame bound must be a positive integer or an INTERVAL, got {}", o))), } }; match value { ast::WindowFrameBound::Preceding(v) => { Ok(Self::Preceding(value_to_scalar(v)?)) } ast::WindowFrameBound::Following(v) => { Ok(Self::Following(value_to_scalar(v)?)) } ast::WindowFrameBound::CurrentRow => Ok(Self::CurrentRow), } } } fn interval_to_scalar( value: &str, leading_field: &Option<DateTimeField>, leading_precision: &Option<u64>, last_field: &Option<DateTimeField>, fractional_seconds_precision: &Option<u64>, ) -> Result<ScalarValue> { match SqlToRel::<ExecutionContextState>::sql_interval_to_literal( value, leading_field, leading_precision, last_field, fractional_seconds_precision, )? { Expr::Literal(v) => Ok(v), o => panic!("unexpected result of interval_to_literal: {:?}", o), } } impl fmt::Display for WindowFrameBound { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { WindowFrameBound::CurrentRow => f.write_str("CURRENT ROW"), WindowFrameBound::Preceding(None) => f.write_str("UNBOUNDED PRECEDING"), WindowFrameBound::Following(None) => f.write_str("UNBOUNDED FOLLOWING"), WindowFrameBound::Preceding(Some(n)) => write!(f, "{} PRECEDING", n), WindowFrameBound::Following(Some(n)) => write!(f, "{} FOLLOWING", n), } } } impl WindowFrameBound { pub fn logical_cmp(&self, other: &Self) -> Option<Ordering> { use WindowFrameBound::{CurrentRow, Following, Preceding}; let ord = |v: &WindowFrameBound| match v { Preceding(_) => 0, CurrentRow => 1, Following(_) => 2, }; let lo = ord(self); let ro = ord(other); let o = lo.cmp(&ro); if o != Ordering::Equal { return Some(o); } let (l, r) = match (self, other) { (Preceding(Some(l)), Preceding(Some(r))) => (r, l), (Following(Some(l)), Following(Some(r))) => (l, r), (CurrentRow, CurrentRow) => return Some(Ordering::Equal), (Preceding(None), Preceding(None)) => return Some(Ordering::Equal), (Preceding(None), Preceding(Some(_))) => return Some(Ordering::Less), (Preceding(Some(_)), Preceding(None)) => return Some(Ordering::Greater), (Following(None), Following(None)) => return Some(Ordering::Equal), (Following(Some(_)), Following(None)) => return Some(Ordering::Less), (Following(None), Following(Some(_))) => return Some(Ordering::Greater), _ => panic!("unhandled bounds: {} and {}", self, other), }; match (l, r) { (ScalarValue::Int64(Some(l)), ScalarValue::Int64(Some(r))) => Some(l.cmp(r)), ( ScalarValue::IntervalDayTime(Some(l)), ScalarValue::IntervalDayTime(Some(r)), ) => Some(l.cmp(r)), ( ScalarValue::IntervalYearMonth(Some(l)), ScalarValue::IntervalYearMonth(Some(r)), ) => Some(l.cmp(r)), _ => None, } } } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum WindowFrameUnits { Rows, Range, Groups, } impl fmt::Display for WindowFrameUnits { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(
) } } impl From<ast::WindowFrameUnits> for WindowFrameUnits { fn from(value: ast::WindowFrameUnits) -> Self { match value { ast::WindowFrameUnits::Range => Self::Range, ast::WindowFrameUnits::Groups => Self::Groups, ast::WindowFrameUnits::Rows => Self::Rows, } } } #[cfg(test)] mod tests { use super::*; #[test] fn test_window_frame_creation() -> Result<()> { let window_frame = ast::WindowFrame { units: ast::WindowFrameUnits::Range, start_bound: ast::WindowFrameBound::Following(None), end_bound: None, }; let result = WindowFrame::try_from(window_frame); assert_eq!( result.err().unwrap().to_string(), "Execution error: Invalid window frame: start bound cannot be unbounded following".to_owned() ); let window_frame = ast::WindowFrame { units: ast::WindowFrameUnits::Range, start_bound: ast::WindowFrameBound::Preceding(None), end_bound: Some(ast::WindowFrameBound::Preceding(None)), }; let result = WindowFrame::try_from(window_frame); assert_eq!( result.err().unwrap().to_string(), "Execution error: Invalid window frame: end bound cannot be unbounded preceding".to_owned() ); let window_frame = ast::WindowFrame { units: ast::WindowFrameUnits::Range, start_bound: ast::WindowFrameBound::Preceding(Some(1)), end_bound: Some(ast::WindowFrameBound::Preceding(Some(2))), }; let result = WindowFrame::try_from(window_frame); assert_eq!( result.err().unwrap().to_string(), "Execution error: Invalid window frame: start bound (1 PRECEDING) cannot be larger than end bound (2 PRECEDING)".to_owned() ); let window_frame = ast::WindowFrame { units: ast::WindowFrameUnits::Range, start_bound: ast::WindowFrameBound::Preceding(Some(2)), end_bound: Some(ast::WindowFrameBound::Preceding(Some(1))), }; let result = WindowFrame::try_from(window_frame); assert_eq!( result.err().unwrap().to_string(), "This feature is not implemented: With WindowFrameUnits=RANGE, the bound cannot be 2 PRECEDING or FOLLOWING at the moment".to_owned() ); let window_frame = ast::WindowFrame { units: ast::WindowFrameUnits::Rows, start_bound: ast::WindowFrameBound::Preceding(Some(2)), end_bound: Some(ast::WindowFrameBound::Preceding(Some(1))), }; let result = WindowFrame::try_from(window_frame); assert!(result.is_ok()); Ok(()) } #[test] fn test_eq() { assert_eq!( WindowFrameBound::Preceding(Some(0)), WindowFrameBound::CurrentRow ); assert_eq!( WindowFrameBound::CurrentRow, WindowFrameBound::Following(Some(0)) ); assert_eq!( WindowFrameBound::Following(Some(2)), WindowFrameBound::Following(Some(2)) ); assert_eq!( WindowFrameBound::Following(None), WindowFrameBound::Following(None) ); assert_eq!( WindowFrameBound::Preceding(Some(2)), WindowFrameBound::Preceding(Some(2)) ); assert_eq!( WindowFrameBound::Preceding(None), WindowFrameBound::Preceding(None) ); } #[test] fn test_ord() { assert!(WindowFrameBound::Preceding(Some(1)) < WindowFrameBound::CurrentRow); assert!( WindowFrameBound::Preceding(Some(2)) < WindowFrameBound::Preceding(Some(1)) ); assert!( WindowFrameBound::Preceding(Some(u64::MAX)) < WindowFrameBound::Preceding(Some(u64::MAX - 1)) ); assert!( WindowFrameBound::Preceding(None) < WindowFrameBound::Preceding(Some(1000000)) ); assert!( WindowFrameBound::Preceding(None) < WindowFrameBound::Preceding(Some(u64::MAX)) ); assert!(WindowFrameBound::Preceding(None) < WindowFrameBound::Following(Some(0))); assert!( WindowFrameBound::Preceding(Some(1)) < WindowFrameBound::Following(Some(1)) ); assert!(WindowFrameBound::CurrentRow < WindowFrameBound::Following(Some(1))); assert!( WindowFrameBound::Following(Some(1)) < WindowFrameBound::Following(Some(2)) ); assert!(WindowFrameBound::Following(Some(2)) < WindowFrameBound::Following(None)); assert!( WindowFrameBound::Following(Some(u64::MAX)) < WindowFrameBound::Following(None) ); } }
match self { WindowFrameUnits::Rows => "ROWS", WindowFrameUnits::Range => "RANGE", WindowFrameUnits::Groups => "GROUPS", }
if_condition
[ { "content": "fn handle<F, R>(args: &[ColumnarValue], op: F, name: &str) -> Result<ColumnarValue>\n\nwhere\n\n R: AsRef<[u8]>,\n\n F: Fn(&str) -> R,\n\n{\n\n match &args[0] {\n\n ColumnarValue::Array(a) => match a.data_type() {\n\n DataType::Utf8 => {\n\n Ok(ColumnarValue::Array(Arc::new(unary_binary_function::<\n\n i32,\n\n _,\n\n _,\n\n >(\n\n &[a.as_ref()], op, name\n\n )?)))\n\n }\n\n DataType::LargeUtf8 => {\n\n Ok(ColumnarValue::Array(Arc::new(unary_binary_function::<\n\n i64,\n\n _,\n", "file_path": "datafusion/src/physical_plan/crypto_expressions.rs", "rank": 0, "score": 418480.6843065396 }, { "content": "fn handle<'a, F, R>(args: &'a [ColumnarValue], op: F, name: &str) -> Result<ColumnarValue>\n\nwhere\n\n R: AsRef<str>,\n\n F: Fn(&'a str) -> R,\n\n{\n\n match &args[0] {\n\n ColumnarValue::Array(a) => match a.data_type() {\n\n DataType::Utf8 => {\n\n Ok(ColumnarValue::Array(Arc::new(unary_string_function::<\n\n i32,\n\n i32,\n\n _,\n\n _,\n\n >(\n\n &[a.as_ref()], op, name\n\n )?)))\n\n }\n\n DataType::LargeUtf8 => {\n\n Ok(ColumnarValue::Array(Arc::new(unary_string_function::<\n\n i64,\n", "file_path": "datafusion/src/physical_plan/string_expressions.rs", "rank": 1, "score": 409128.00970623724 }, { "content": "/// Returns the datatype of the window function\n\npub fn return_type(fun: &WindowFunction, arg_types: &[DataType]) -> Result<DataType> {\n\n match fun {\n\n WindowFunction::AggregateFunction(fun) => aggregates::return_type(fun, arg_types),\n\n WindowFunction::BuiltInWindowFunction(fun) => {\n\n return_type_for_built_in(fun, arg_types)\n\n }\n\n }\n\n}\n\n\n\n/// Returns the datatype of the built-in window function\n\npub(super) fn return_type_for_built_in(\n\n fun: &BuiltInWindowFunction,\n\n arg_types: &[DataType],\n\n) -> Result<DataType> {\n\n // Note that this function *must* return the same type that the respective physical expression returns\n\n // or the execution panics.\n\n\n\n // verify that this is a valid set of data types for this function\n\n data_types(arg_types, &signature_for_built_in(fun))?;\n\n\n", "file_path": "datafusion/src/physical_plan/window_functions.rs", "rank": 2, "score": 375360.38169924164 }, { "content": "/// Returns a cloned `Expr`, but any of the `Expr`'s in the tree may be\n\n/// replaced/customized by the replacement function.\n\n///\n\n/// The replacement function is called repeatedly with `Expr`, starting with\n\n/// the argument `expr`, then descending depth-first through its\n\n/// descendants. The function chooses to replace or keep (clone) each `Expr`.\n\n///\n\n/// The function's return type is `Result<Option<Expr>>>`, where:\n\n///\n\n/// * `Ok(Some(replacement_expr))`: A replacement `Expr` is provided; it is\n\n/// swapped in at the particular node in the tree. Any nested `Expr` are\n\n/// not subject to cloning/replacement.\n\n/// * `Ok(None)`: A replacement `Expr` is not provided. The `Expr` is\n\n/// recreated, with all of its nested `Expr`'s subject to\n\n/// cloning/replacement.\n\n/// * `Err(err)`: Any error returned by the function is returned as-is by\n\n/// `clone_with_replacement()`.\n\npub fn clone_with_replacement<F>(expr: &Expr, replacement_fn: &F) -> Result<Expr>\n\nwhere\n\n F: Fn(&Expr) -> Result<Option<Expr>>,\n\n{\n\n let replacement_opt = replacement_fn(expr)?;\n\n\n\n match replacement_opt {\n\n // If we were provided a replacement, use the replacement. Do not\n\n // descend further.\n\n Some(replacement) => Ok(replacement),\n\n // No replacement was provided, clone the node and recursively call\n\n // clone_with_replacement() on any nested expressions.\n\n None => match expr {\n\n Expr::AggregateFunction {\n\n fun,\n\n args,\n\n distinct,\n\n } => Ok(Expr::AggregateFunction {\n\n fun: fun.clone(),\n\n args: args\n", "file_path": "datafusion/src/sql/utils.rs", "rank": 3, "score": 371345.7518967459 }, { "content": "#[allow(missing_docs)]\n\npub fn write_group_result_row(\n\n mode: AggregateMode,\n\n group_by_values: &[GroupByScalar],\n\n accumulator_set: &AccumulatorSet,\n\n key_fields: &[Field],\n\n key_columns: &mut Vec<Box<dyn ArrayBuilder>>,\n\n value_columns: &mut Vec<Box<dyn ArrayBuilder>>,\n\n) -> Result<()> {\n\n let add_key_columns = key_columns.is_empty();\n\n for i in 0..group_by_values.len() {\n\n match &group_by_values[i] {\n\n // Optimization to avoid allocation on conversion to ScalarValue.\n\n GroupByScalar::Utf8(str) => {\n\n if add_key_columns {\n\n key_columns.push(Box::new(StringBuilder::new(0)));\n\n }\n\n key_columns[i]\n\n .as_any_mut()\n\n .downcast_mut::<StringBuilder>()\n\n .unwrap()\n", "file_path": "datafusion/src/physical_plan/hash_aggregate.rs", "rank": 4, "score": 366401.78927313944 }, { "content": "/// Propagates current span to blocking operation. See [spawn] for details.\n\npub fn spawn_blocking<F, R>(f: F) -> JoinHandle<R>\n\nwhere\n\n F: FnOnce() -> R + Send + 'static,\n\n R: Send + 'static,\n\n{\n\n if let Some(s) = new_subtask_span() {\n\n tokio::task::spawn_blocking(move || {\n\n let _p = s.parent; // ensure parent stays alive.\n\n s.child.in_scope(f)\n\n })\n\n } else {\n\n tokio::task::spawn_blocking(f)\n\n }\n\n}\n\n\n", "file_path": "datafusion/src/cube_ext/spawn.rs", "rank": 5, "score": 351332.8371089396 }, { "content": "/// Convert SQL data type to relational representation of data type\n\npub fn convert_data_type(sql: &SQLDataType) -> Result<DataType> {\n\n match sql {\n\n SQLDataType::Boolean => Ok(DataType::Boolean),\n\n SQLDataType::SmallInt => Ok(DataType::Int16),\n\n SQLDataType::Int => Ok(DataType::Int32),\n\n SQLDataType::BigInt => Ok(DataType::Int64),\n\n SQLDataType::Float(_) | SQLDataType::Real => Ok(DataType::Float64),\n\n SQLDataType::Double => Ok(DataType::Float64),\n\n SQLDataType::Char(_) | SQLDataType::Varchar(_) => Ok(DataType::Utf8),\n\n SQLDataType::Timestamp => Ok(DataType::Timestamp(TimeUnit::Nanosecond, None)),\n\n SQLDataType::Date => Ok(DataType::Date32),\n\n other => Err(DataFusionError::NotImplemented(format!(\n\n \"Unsupported SQL type {:?}\",\n\n other\n\n ))),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "datafusion/src/sql/planner.rs", "rank": 6, "score": 336878.6714877247 }, { "content": "/// function return type of an average\n\npub fn avg_return_type(arg_type: &DataType) -> Result<DataType> {\n\n match arg_type {\n\n DataType::Int8\n\n | DataType::Int16\n\n | DataType::Int32\n\n | DataType::Int64\n\n | DataType::UInt8\n\n | DataType::UInt16\n\n | DataType::UInt32\n\n | DataType::UInt64\n\n | DataType::Float32\n\n | DataType::Float64 => Ok(DataType::Float64),\n\n other => Err(DataFusionError::Plan(format!(\n\n \"AVG does not support {:?}\",\n\n other\n\n ))),\n\n }\n\n}\n\n\n\nimpl Avg {\n", "file_path": "datafusion/src/physical_plan/expressions/average.rs", "rank": 7, "score": 333590.1468000982 }, { "content": "/// function return type of a sum\n\npub fn sum_return_type(arg_type: &DataType) -> Result<DataType> {\n\n match arg_type {\n\n DataType::Int8 | DataType::Int16 | DataType::Int32 | DataType::Int64 => {\n\n Ok(DataType::Int64)\n\n }\n\n DataType::Int64Decimal(scale) => Ok(DataType::Int64Decimal(*scale)),\n\n DataType::UInt8 | DataType::UInt16 | DataType::UInt32 | DataType::UInt64 => {\n\n Ok(DataType::UInt64)\n\n }\n\n DataType::Float32 => Ok(DataType::Float32),\n\n DataType::Float64 => Ok(DataType::Float64),\n\n other => Err(DataFusionError::Plan(format!(\n\n \"SUM does not support type \\\"{:?}\\\"\",\n\n other\n\n ))),\n\n }\n\n}\n\n\n\nimpl Sum {\n\n /// Create a new SUM aggregate function\n", "file_path": "datafusion/src/physical_plan/expressions/sum.rs", "rank": 8, "score": 333590.1468000982 }, { "content": "/// rewrite a `Vec` of `Expr`s with the rewriter\n\nfn rewrite_vec<R>(v: Vec<Expr>, rewriter: &mut R) -> Result<Vec<Expr>>\n\nwhere\n\n R: ExprRewriter,\n\n{\n\n v.into_iter().map(|expr| expr.rewrite(rewriter)).collect()\n\n}\n\n\n\n/// Controls how the visitor recursion should proceed.\n\npub enum Recursion<V: ExpressionVisitor> {\n\n /// Attempt to visit all the children, recursively, of this expression.\n\n Continue(V),\n\n /// Do not visit the children of this expression, though the walk\n\n /// of parents of this expression will not be affected\n\n Stop(V),\n\n}\n\n\n", "file_path": "datafusion/src/logical_plan/expr.rs", "rank": 10, "score": 327806.6815817945 }, { "content": "fn tuple_err<T, R>(value: (Result<T>, Result<R>)) -> Result<(T, R)> {\n\n match value {\n\n (Ok(e), Ok(e1)) => Ok((e, e1)),\n\n (Err(e), Ok(_)) => Err(e),\n\n (Ok(_), Err(e1)) => Err(e1),\n\n (Err(e), Err(_)) => Err(e),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::logical_plan::{DFField, DFSchema, DFSchemaRef};\n\n use crate::physical_plan::{csv::CsvReadOptions, expressions, Partitioning};\n\n use crate::scalar::ScalarValue;\n\n use crate::{\n\n logical_plan::{col, lit, sum, LogicalPlanBuilder},\n\n physical_plan::SendableRecordBatchStream,\n\n };\n\n use arrow::datatypes::{DataType, Field, SchemaRef};\n", "file_path": "datafusion/src/physical_plan/planner.rs", "rank": 11, "score": 324233.01660493 }, { "content": "/// Returns the datatype of the scalar function\n\npub fn return_type(fun: &AggregateFunction, arg_types: &[DataType]) -> Result<DataType> {\n\n // Note that this function *must* return the same type that the respective physical expression returns\n\n // or the execution panics.\n\n\n\n // verify that this is a valid set of data types for this function\n\n data_types(arg_types, &signature(fun))?;\n\n\n\n match fun {\n\n AggregateFunction::Count => Ok(DataType::UInt64),\n\n AggregateFunction::Max | AggregateFunction::Min => Ok(arg_types[0].clone()),\n\n AggregateFunction::Sum => sum_return_type(&arg_types[0]),\n\n AggregateFunction::Avg => avg_return_type(&arg_types[0]),\n\n }\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/aggregates.rs", "rank": 12, "score": 321970.2322467006 }, { "content": "/// Recursively builds a list of files in a directory with a given extension\n\npub fn build_file_list(dir: &str, ext: &str) -> Result<Vec<String>> {\n\n let mut filenames: Vec<String> = Vec::new();\n\n build_file_list_recurse(dir, &mut filenames, ext)?;\n\n Ok(filenames)\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/common.rs", "rank": 13, "score": 308593.0411597548 }, { "content": "pub fn ballista_error(message: &str) -> BallistaError {\n\n BallistaError::General(message.to_owned())\n\n}\n\n\n\nimpl From<String> for BallistaError {\n\n fn from(e: String) -> Self {\n\n BallistaError::General(e)\n\n }\n\n}\n\n\n\nimpl From<ArrowError> for BallistaError {\n\n fn from(e: ArrowError) -> Self {\n\n BallistaError::ArrowError(e)\n\n }\n\n}\n\n\n\nimpl From<parser::ParserError> for BallistaError {\n\n fn from(e: parser::ParserError) -> Self {\n\n BallistaError::SqlError(e)\n\n }\n", "file_path": "ballista/rust/core/src/error.rs", "rank": 14, "score": 306164.1734483945 }, { "content": "/// some tests share a common table with different names\n\npub fn test_table_scan_with_name(name: &str) -> Result<LogicalPlan> {\n\n let schema = Schema::new(vec![\n\n Field::new(\"a\", DataType::UInt32, false),\n\n Field::new(\"b\", DataType::UInt32, false),\n\n Field::new(\"c\", DataType::UInt32, false),\n\n ]);\n\n LogicalPlanBuilder::scan_empty(Some(name), &schema, None)?.build()\n\n}\n\n\n", "file_path": "datafusion/src/test/mod.rs", "rank": 15, "score": 306080.56563583063 }, { "content": "/// all tests share a common table\n\npub fn test_table_scan_with_alias(alias: &str) -> Result<LogicalPlan> {\n\n let schema = Schema::new(vec![\n\n Field::new(\"a\", DataType::UInt32, false),\n\n Field::new(\"b\", DataType::UInt32, false),\n\n Field::new(\"c\", DataType::UInt32, false),\n\n ]);\n\n LogicalPlanBuilder::scan_empty(Some(alias), &schema, None)?.build()\n\n}\n\n\n", "file_path": "datafusion/src/test/mod.rs", "rank": 16, "score": 306080.56563583063 }, { "content": "fn data_type_id(id: &i32) -> Result<DataType, errors::DataFusionError> {\n\n // see https://github.com/apache/arrow/blob/3694794bdfd0677b95b8c95681e392512f1c9237/python/pyarrow/includes/libarrow.pxd\n\n // this is not ideal as it does not generalize for non-basic types\n\n // Find a way to get a unique name from the pyarrow.DataType\n\n Ok(match id {\n\n 1 => DataType::Boolean,\n\n 2 => DataType::UInt8,\n\n 3 => DataType::Int8,\n\n 4 => DataType::UInt16,\n\n 5 => DataType::Int16,\n\n 6 => DataType::UInt32,\n\n 7 => DataType::Int32,\n\n 8 => DataType::UInt64,\n\n 9 => DataType::Int64,\n\n 10 => DataType::Float16,\n\n 11 => DataType::Float32,\n\n 12 => DataType::Float64,\n\n 13 => DataType::Utf8,\n\n 14 => DataType::Binary,\n\n 34 => DataType::LargeUtf8,\n\n 35 => DataType::LargeBinary,\n\n other => {\n\n return Err(errors::DataFusionError::Common(format!(\n\n \"The type {} is not valid\",\n\n other\n\n )))\n\n }\n\n })\n\n}\n", "file_path": "python/src/types.rs", "rank": 17, "score": 302793.0540222621 }, { "content": "pub fn datafusion_test_context(path: &str) -> Result<ExecutionContext> {\n\n let default_shuffle_partitions = 2;\n\n let config = ExecutionConfig::new().with_concurrency(default_shuffle_partitions);\n\n let mut ctx = ExecutionContext::with_config(config);\n\n for table in TPCH_TABLES {\n\n let schema = get_tpch_schema(table);\n\n let options = CsvReadOptions::new()\n\n .schema(&schema)\n\n .delimiter(b'|')\n\n .has_header(false)\n\n .file_extension(\".tbl\");\n\n let dir = format!(\"{}/{}\", path, table);\n\n ctx.register_csv(table, &dir, options)?;\n\n }\n\n Ok(ctx)\n\n}\n\n\n", "file_path": "ballista/rust/scheduler/src/test_utils.rs", "rank": 18, "score": 302784.7438203158 }, { "content": "/// Returns a directory path for finding test data.\n\n///\n\n/// udf_env: name of an environment variable\n\n///\n\n/// submodule_dir: fallback path (relative to CARGO_MANIFEST_DIR)\n\n///\n\n/// Returns either:\n\n/// The path referred to in `udf_env` if that variable is set and refers to a directory\n\n/// The submodule_data directory relative to CARGO_MANIFEST_PATH\n\nfn get_data_dir(udf_env: &str, submodule_data: &str) -> Result<PathBuf, Box<dyn Error>> {\n\n // Try user defined env.\n\n if let Ok(dir) = env::var(udf_env) {\n\n let trimmed = dir.trim().to_string();\n\n if !trimmed.is_empty() {\n\n let pb = PathBuf::from(trimmed);\n\n if pb.is_dir() {\n\n return Ok(pb);\n\n } else {\n\n return Err(format!(\n\n \"the data dir `{}` defined by env {} not found\",\n\n pb.display().to_string(),\n\n udf_env\n\n )\n\n .into());\n\n }\n\n }\n\n }\n\n\n\n // The env is undefined or its value is trimmed to empty, let's try default dir.\n", "file_path": "datafusion/src/test_util.rs", "rank": 19, "score": 297438.9154651019 }, { "content": "#[allow(clippy::boxed_local)]\n\nfn rewrite_boxed<R>(boxed_expr: Box<Expr>, rewriter: &mut R) -> Result<Box<Expr>>\n\nwhere\n\n R: ExprRewriter,\n\n{\n\n // TODO: It might be possible to avoid an allocation (the\n\n // Box::new) below by reusing the box.\n\n let expr: Expr = *boxed_expr;\n\n let rewritten_expr = expr.rewrite(rewriter)?;\n\n Ok(Box::new(rewritten_expr))\n\n}\n\n\n", "file_path": "datafusion/src/logical_plan/expr.rs", "rank": 20, "score": 296234.9473511569 }, { "content": "/// Generated partitioned copy of a CSV file\n\npub fn create_partitioned_csv(filename: &str, partitions: usize) -> Result<String> {\n\n let testdata = crate::test_util::arrow_test_data();\n\n let path = format!(\"{}/csv/{}\", testdata, filename);\n\n\n\n let tmp_dir = TempDir::new()?;\n\n\n\n let mut writers = vec![];\n\n for i in 0..partitions {\n\n let filename = format!(\"partition-{}.csv\", i);\n\n let filename = tmp_dir.path().join(&filename);\n\n\n\n let writer = BufWriter::new(File::create(&filename).unwrap());\n\n writers.push(writer);\n\n }\n\n\n\n let f = File::open(&path)?;\n\n let f = BufReader::new(f);\n\n for (i, line) in f.lines().enumerate() {\n\n let line = line.unwrap();\n\n\n", "file_path": "datafusion/src/test/mod.rs", "rank": 21, "score": 294952.8930606079 }, { "content": "/// put values in an array.\n\npub fn array(values: &[ColumnarValue]) -> Result<ColumnarValue> {\n\n let arrays: Vec<&dyn Array> = values\n\n .iter()\n\n .map(|value| {\n\n if let ColumnarValue::Array(value) = value {\n\n Ok(value.as_ref())\n\n } else {\n\n Err(DataFusionError::NotImplemented(\n\n \"Array is not implemented for scalar values.\".to_string(),\n\n ))\n\n }\n\n })\n\n .collect::<Result<_>>()?;\n\n\n\n Ok(ColumnarValue::Array(array_array(&arrays)?))\n\n}\n\n\n\n/// Currently supported types by the array function.\n\n/// The order of these types correspond to the order on which coercion applies\n\n/// This should thus be from least informative to most informative\n", "file_path": "datafusion/src/physical_plan/array_expressions.rs", "rank": 22, "score": 294208.8706321672 }, { "content": "pub fn produce_diagram(filename: &str, stages: &[Arc<ShuffleWriterExec>]) -> Result<()> {\n\n let write_file = File::create(filename)?;\n\n let mut w = BufWriter::new(&write_file);\n\n writeln!(w, \"digraph G {{\")?;\n\n\n\n // draw stages and entities\n\n for stage in stages {\n\n writeln!(w, \"\\tsubgraph cluster{} {{\", stage.stage_id())?;\n\n writeln!(w, \"\\t\\tlabel = \\\"Stage {}\\\";\", stage.stage_id())?;\n\n let mut id = AtomicUsize::new(0);\n\n build_exec_plan_diagram(\n\n &mut w,\n\n stage.children()[0].as_ref(),\n\n stage.stage_id(),\n\n &mut id,\n\n true,\n\n )?;\n\n writeln!(w, \"\\t}}\")?;\n\n }\n\n\n", "file_path": "ballista/rust/core/src/utils.rs", "rank": 23, "score": 288470.1884757816 }, { "content": "/// crypto function that accepts Utf8 or LargeUtf8 and returns a [`ColumnarValue`]\n\npub fn md5(args: &[ColumnarValue]) -> Result<ColumnarValue> {\n\n match &args[0] {\n\n ColumnarValue::Array(a) => match a.data_type() {\n\n DataType::Utf8 => Ok(ColumnarValue::Array(Arc::new(md5_array::<i32>(&[\n\n a.as_ref()\n\n ])?))),\n\n DataType::LargeUtf8 => {\n\n Ok(ColumnarValue::Array(Arc::new(md5_array::<i64>(&[\n\n a.as_ref()\n\n ])?)))\n\n }\n\n other => Err(DataFusionError::Internal(format!(\n\n \"Unsupported data type {:?} for function md5\",\n\n other,\n\n ))),\n\n },\n\n ColumnarValue::Scalar(scalar) => match scalar {\n\n ScalarValue::Utf8(a) => {\n\n let result = a.as_ref().map(|x| md5_process(x));\n\n Ok(ColumnarValue::Scalar(ScalarValue::Utf8(result)))\n", "file_path": "datafusion/src/physical_plan/crypto_expressions.rs", "rank": 25, "score": 287697.8275211335 }, { "content": "/// crypto function that accepts Utf8 or LargeUtf8 and returns a [`ColumnarValue`]\n\npub fn sha512(args: &[ColumnarValue]) -> Result<ColumnarValue> {\n\n handle(args, sha_process::<Sha512>, \"sha512\")\n\n}\n", "file_path": "datafusion/src/physical_plan/crypto_expressions.rs", "rank": 26, "score": 287697.8275211335 }, { "content": "/// crypto function that accepts Utf8 or LargeUtf8 and returns a [`ColumnarValue`]\n\npub fn sha384(args: &[ColumnarValue]) -> Result<ColumnarValue> {\n\n handle(args, sha_process::<Sha384>, \"sha384\")\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/crypto_expressions.rs", "rank": 27, "score": 287697.8275211335 }, { "content": "/// crypto function that accepts Utf8 or LargeUtf8 and returns a [`ColumnarValue`]\n\npub fn sha256(args: &[ColumnarValue]) -> Result<ColumnarValue> {\n\n handle(args, sha_process::<Sha256>, \"sha256\")\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/crypto_expressions.rs", "rank": 28, "score": 287697.8275211335 }, { "content": "/// crypto function that accepts Utf8 or LargeUtf8 and returns a [`ColumnarValue`]\n\npub fn sha224(args: &[ColumnarValue]) -> Result<ColumnarValue> {\n\n handle(args, sha_process::<Sha224>, \"ssh224\")\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/crypto_expressions.rs", "rank": 29, "score": 287697.8275211335 }, { "content": "/// Converts the string to all upper case.\n\n/// upper('tom') = 'TOM'\n\npub fn upper(args: &[ColumnarValue]) -> Result<ColumnarValue> {\n\n handle(args, |string| string.to_ascii_uppercase(), \"upper\")\n\n}\n", "file_path": "datafusion/src/physical_plan/string_expressions.rs", "rank": 30, "score": 287691.8817770411 }, { "content": "/// Converts the string to all lower case.\n\n/// lower('TOM') = 'tom'\n\npub fn lower(args: &[ColumnarValue]) -> Result<ColumnarValue> {\n\n handle(args, |string| string.to_ascii_lowercase(), \"lower\")\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/string_expressions.rs", "rank": 31, "score": 287691.8817770411 }, { "content": "/// Concatenates the text representations of all the arguments. NULL arguments are ignored.\n\n/// concat('abcde', 2, NULL, 22) = 'abcde222'\n\npub fn concat(args: &[ColumnarValue]) -> Result<ColumnarValue> {\n\n // do not accept 0 arguments.\n\n if args.is_empty() {\n\n return Err(DataFusionError::Internal(format!(\n\n \"concat was called with {} arguments. It requires at least 1.\",\n\n args.len()\n\n )));\n\n }\n\n\n\n // first, decide whether to return a scalar or a vector.\n\n let mut return_array = args.iter().filter_map(|x| match x {\n\n ColumnarValue::Array(array) => Some(array.len()),\n\n _ => None,\n\n });\n\n if let Some(size) = return_array.next() {\n\n let result = (0..size)\n\n .map(|index| {\n\n let mut owned_string: String = \"\".to_owned();\n\n for arg in args {\n\n match arg {\n", "file_path": "datafusion/src/physical_plan/string_expressions.rs", "rank": 32, "score": 287685.6398648054 }, { "content": "/// to_timestamp SQL function\n\npub fn to_timestamp(args: &[ColumnarValue]) -> Result<ColumnarValue> {\n\n handle::<TimestampNanosecondType, _, TimestampNanosecondType>(\n\n args,\n\n string_to_timestamp_nanos_shim,\n\n \"to_timestamp\",\n\n )\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/datetime_expressions.rs", "rank": 33, "score": 287685.6398648054 }, { "content": "/// random SQL function\n\npub fn random(args: &[ColumnarValue]) -> Result<ColumnarValue> {\n\n let len: usize = match &args[0] {\n\n ColumnarValue::Array(array) => array.len(),\n\n _ => {\n\n return Err(DataFusionError::Internal(\n\n \"Expect random function to take no param\".to_string(),\n\n ))\n\n }\n\n };\n\n let mut rng = thread_rng();\n\n let values = iter::repeat_with(|| rng.gen_range(0.0..1.0)).take(len);\n\n let array = Float64Array::from_iter_values(values);\n\n Ok(ColumnarValue::Array(Arc::new(array)))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n use arrow::array::{Float64Array, NullArray};\n", "file_path": "datafusion/src/physical_plan/math_expressions.rs", "rank": 34, "score": 287685.6398648054 }, { "content": "/// Recursively walk a list of expression trees, collecting the unique set of columns\n\n/// referenced in the expression\n\npub fn exprlist_to_columns(expr: &[Expr], accum: &mut HashSet<Column>) -> Result<()> {\n\n for e in expr {\n\n expr_to_columns(e, accum)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "datafusion/src/optimizer/utils.rs", "rank": 35, "score": 286960.2248272653 }, { "content": "/// Recursively walk an expression tree, collecting the unique set of columns\n\n/// referenced in the expression\n\npub fn expr_to_columns(expr: &Expr, accum: &mut HashSet<Column>) -> Result<()> {\n\n expr.accept(ColumnNameVisitor { accum })?;\n\n Ok(())\n\n}\n\n\n", "file_path": "datafusion/src/optimizer/utils.rs", "rank": 36, "score": 286960.2248272653 }, { "content": "/// Implements NULLIF(expr1, expr2)\n\n/// Args: 0 - left expr is any array\n\n/// 1 - if the left is equal to this expr2, then the result is NULL, otherwise left value is passed.\n\n///\n\npub fn nullif_func(args: &[ColumnarValue]) -> Result<ColumnarValue> {\n\n if args.len() != 2 {\n\n return Err(DataFusionError::Internal(format!(\n\n \"{:?} args were supplied but NULLIF takes exactly two args\",\n\n args.len(),\n\n )));\n\n }\n\n\n\n let (lhs, rhs) = (&args[0], &args[1]);\n\n\n\n match (lhs, rhs) {\n\n (ColumnarValue::Array(lhs), ColumnarValue::Scalar(rhs)) => {\n\n let cond_array = binary_array_op_scalar!(lhs, rhs.clone(), eq).unwrap()?;\n\n\n\n let array = primitive_bool_array_op!(lhs, *cond_array, nullif)?;\n\n\n\n Ok(ColumnarValue::Array(array))\n\n }\n\n (ColumnarValue::Array(lhs), ColumnarValue::Array(rhs)) => {\n\n // Get args0 == args1 evaluated and produce a boolean array\n", "file_path": "datafusion/src/physical_plan/expressions/nullif.rs", "rank": 37, "score": 284532.514620138 }, { "content": "/// to_timestamp_millis SQL function\n\npub fn to_timestamp_millis(args: &[ColumnarValue]) -> Result<ColumnarValue> {\n\n handle::<TimestampMillisecondType, _, TimestampMillisecondType>(\n\n args,\n\n |s| string_to_timestamp_nanos_shim(s).map(|n| n / 1_000_000),\n\n \"to_timestamp_millis\",\n\n )\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/datetime_expressions.rs", "rank": 38, "score": 284521.10630423814 }, { "content": "/// to_timestamp_seconds SQL function\n\npub fn to_timestamp_seconds(args: &[ColumnarValue]) -> Result<ColumnarValue> {\n\n handle::<TimestampSecondType, _, TimestampSecondType>(\n\n args,\n\n |s| string_to_timestamp_nanos_shim(s).map(|n| n / 1_000_000_000),\n\n \"to_timestamp_seconds\",\n\n )\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/datetime_expressions.rs", "rank": 39, "score": 284521.10630423814 }, { "content": "/// date_trunc SQL function\n\npub fn date_trunc(args: &[ColumnarValue]) -> Result<ColumnarValue> {\n\n let (granularity, array) = (&args[0], &args[1]);\n\n\n\n let granularity =\n\n if let ColumnarValue::Scalar(ScalarValue::Utf8(Some(v))) = granularity {\n\n v\n\n } else {\n\n return Err(DataFusionError::Execution(\n\n \"Granularity of `date_trunc` must be non-null scalar Utf8\".to_string(),\n\n ));\n\n };\n\n\n\n let f = |x: Option<i64>| x.map(|x| date_trunc_single(granularity, x)).transpose();\n\n\n\n Ok(match array {\n\n ColumnarValue::Scalar(scalar) => {\n\n if let ScalarValue::TimestampNanosecond(v) = scalar {\n\n ColumnarValue::Scalar(ScalarValue::TimestampNanosecond((f)(*v)?))\n\n } else {\n\n return Err(DataFusionError::Execution(\n", "file_path": "datafusion/src/physical_plan/datetime_expressions.rs", "rank": 40, "score": 284521.10630423814 }, { "content": "/// DATE_PART SQL function\n\npub fn date_part(args: &[ColumnarValue]) -> Result<ColumnarValue> {\n\n if args.len() != 2 {\n\n return Err(DataFusionError::Execution(\n\n \"Expected two arguments in DATE_PART\".to_string(),\n\n ));\n\n }\n\n let (date_part, array) = (&args[0], &args[1]);\n\n\n\n let date_part = if let ColumnarValue::Scalar(ScalarValue::Utf8(Some(v))) = date_part {\n\n v\n\n } else {\n\n return Err(DataFusionError::Execution(\n\n \"First argument of `DATE_PART` must be non-null scalar Utf8\".to_string(),\n\n ));\n\n };\n\n\n\n let is_scalar = matches!(array, ColumnarValue::Scalar(_));\n\n\n\n let array = match array {\n\n ColumnarValue::Array(array) => array.clone(),\n", "file_path": "datafusion/src/physical_plan/datetime_expressions.rs", "rank": 41, "score": 284521.10630423814 }, { "content": "/// to_timestamp_micros SQL function\n\npub fn to_timestamp_micros(args: &[ColumnarValue]) -> Result<ColumnarValue> {\n\n handle::<TimestampMicrosecondType, _, TimestampMicrosecondType>(\n\n args,\n\n |s| string_to_timestamp_nanos_shim(s).map(|n| n / 1_000),\n\n \"to_timestamp_micros\",\n\n )\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/datetime_expressions.rs", "rank": 42, "score": 284521.10630423814 }, { "content": "/// Returns the datatype of the scalar function\n\npub fn return_type(\n\n fun: &BuiltinScalarFunction,\n\n arg_types: &[DataType],\n\n) -> Result<DataType> {\n\n // Note that this function *must* return the same type that the respective physical expression returns\n\n // or the execution panics.\n\n\n\n // verify that this is a valid set of data types for this function\n\n data_types(arg_types, &signature(fun))?;\n\n\n\n // the return type of the built in function.\n\n // Some built-in functions' return type depends on the incoming type.\n\n match fun {\n\n BuiltinScalarFunction::Array => Ok(DataType::FixedSizeList(\n\n Box::new(Field::new(\"item\", arg_types[0].clone(), true)),\n\n arg_types.len() as i32,\n\n )),\n\n BuiltinScalarFunction::Ascii => Ok(DataType::Int32),\n\n BuiltinScalarFunction::BitLength => utf8_to_int_type(&arg_types[0], \"bit_length\"),\n\n BuiltinScalarFunction::Btrim => utf8_to_str_type(&arg_types[0], \"btrim\"),\n", "file_path": "datafusion/src/physical_plan/functions.rs", "rank": 43, "score": 280040.382574402 }, { "content": "/// Appends a sequence of [u8] bytes for the value in `col[row]` to\n\n/// `vec` to be used as a key into the hash map\n\nfn create_key_for_col(col: &ArrayRef, row: usize, vec: &mut KeyVec) -> Result<()> {\n\n match col.data_type() {\n\n DataType::Boolean => {\n\n let array = col.as_any().downcast_ref::<BooleanArray>().unwrap();\n\n vec.extend_from_slice(&[array.value(row) as u8]);\n\n }\n\n DataType::Float32 => {\n\n let array = col.as_any().downcast_ref::<Float32Array>().unwrap();\n\n vec.extend_from_slice(&array.value(row).to_le_bytes());\n\n }\n\n DataType::Float64 => {\n\n let array = col.as_any().downcast_ref::<Float64Array>().unwrap();\n\n vec.extend_from_slice(&array.value(row).to_le_bytes());\n\n }\n\n DataType::UInt8 => {\n\n let array = col.as_any().downcast_ref::<UInt8Array>().unwrap();\n\n vec.extend_from_slice(&array.value(row).to_le_bytes());\n\n }\n\n DataType::UInt16 => {\n\n let array = col.as_any().downcast_ref::<UInt16Array>().unwrap();\n", "file_path": "datafusion/src/physical_plan/hash_aggregate.rs", "rank": 44, "score": 276860.986116309 }, { "content": "/// Create a column expression\n\npub fn col(name: &str, schema: &Schema) -> Result<Arc<dyn PhysicalExpr>> {\n\n Ok(Arc::new(Column::new_with_schema(name, schema)?))\n\n}\n", "file_path": "datafusion/src/physical_plan/expressions/column.rs", "rank": 45, "score": 276143.66452859604 }, { "content": "/// Extract the values in `group_by_keys` arrow arrays into the target vector\n\n/// as GroupByScalar values\n\npub fn create_group_by_values(\n\n group_by_keys: &[ArrayRef],\n\n row: usize,\n\n vec: &mut SmallVec<[GroupByScalar; 2]>,\n\n) -> Result<()> {\n\n for (i, col) in group_by_keys.iter().enumerate() {\n\n vec[i] = create_group_by_value(col, row)?\n\n }\n\n Ok(())\n\n}\n\n\n\nasync fn compute_grouped_sorted_aggregate(\n\n mode: AggregateMode,\n\n schema: SchemaRef,\n\n group_expr: Vec<Arc<dyn PhysicalExpr>>,\n\n aggr_expr: Vec<Arc<dyn AggregateExpr>>,\n\n mut input: SendableRecordBatchStream,\n\n) -> ArrowResult<RecordBatch> {\n\n // the expressions to evaluate the batch, one vec of expressions per aggregation\n\n let aggregate_expressions =\n", "file_path": "datafusion/src/physical_plan/hash_aggregate.rs", "rank": 46, "score": 271915.3162133056 }, { "content": "/// Panics if arrays are of different types. Comparison is ascending, null first.\n\npub fn cmp_array_row_same_types(\n\n l: &ArrayRef,\n\n l_row: usize,\n\n r: &ArrayRef,\n\n r_row: usize,\n\n) -> Ordering {\n\n let l_null = l.is_null(l_row);\n\n let r_null = r.is_null(r_row);\n\n if l_null && r_null {\n\n return Ordering::Equal;\n\n }\n\n if l_null && !r_null {\n\n return Ordering::Less;\n\n }\n\n if !l_null && r_null {\n\n return Ordering::Greater;\n\n }\n\n\n\n macro_rules! cmp_row {\n\n ($l: expr, Float32Array, $($rest: tt)*) => {{\n", "file_path": "datafusion/src/cube_ext/util.rs", "rank": 47, "score": 271897.83156633005 }, { "content": "/// convert_tz SQL function\n\npub fn convert_tz(args: &[ArrayRef]) -> Result<ArrayRef> {\n\n let timestamps = &args[0]\n\n .as_any()\n\n .downcast_ref::<TimestampNanosecondArray>()\n\n .ok_or_else(|| {\n\n DataFusionError::Execution(\n\n \"Could not cast convert_tz timestamp input to TimestampNanosecondArray\"\n\n .to_string(),\n\n )\n\n })?;\n\n\n\n let shift = &args[1]\n\n .as_any()\n\n .downcast_ref::<StringArray>()\n\n .ok_or_else(|| {\n\n DataFusionError::Execution(\n\n \"Could not cast convert_tz shift input to StringArray\".to_string(),\n\n )\n\n })?;\n\n\n", "file_path": "datafusion/src/physical_plan/datetime_expressions.rs", "rank": 48, "score": 270821.7754734948 }, { "content": "pub fn assert_fields_eq(plan: &LogicalPlan, expected: Vec<&str>) {\n\n let actual: Vec<String> = plan\n\n .schema()\n\n .fields()\n\n .iter()\n\n .map(|f| f.name().clone())\n\n .collect();\n\n assert_eq!(actual, expected);\n\n}\n\n\n", "file_path": "datafusion/src/test/mod.rs", "rank": 49, "score": 270522.3567157234 }, { "content": "fn date_trunc_single(granularity: &str, value: i64) -> Result<i64> {\n\n let value = timestamp_ns_to_datetime(value).with_nanosecond(0);\n\n let value = match granularity {\n\n \"second\" => value,\n\n \"minute\" => value.and_then(|d| d.with_second(0)),\n\n \"hour\" => value\n\n .and_then(|d| d.with_second(0))\n\n .and_then(|d| d.with_minute(0)),\n\n \"day\" => value\n\n .and_then(|d| d.with_second(0))\n\n .and_then(|d| d.with_minute(0))\n\n .and_then(|d| d.with_hour(0)),\n\n \"week\" => value\n\n .and_then(|d| d.with_second(0))\n\n .and_then(|d| d.with_minute(0))\n\n .and_then(|d| d.with_hour(0))\n\n .map(|d| d - Duration::seconds(60 * 60 * 24 * d.weekday() as i64)),\n\n \"month\" => value\n\n .and_then(|d| d.with_second(0))\n\n .and_then(|d| d.with_minute(0))\n", "file_path": "datafusion/src/physical_plan/datetime_expressions.rs", "rank": 50, "score": 268348.96001933055 }, { "content": "pub fn decode_protobuf(bytes: &[u8]) -> Result<BallistaAction, BallistaError> {\n\n let mut buf = Cursor::new(bytes);\n\n\n\n protobuf::Action::decode(&mut buf)\n\n .map_err(|e| BallistaError::Internal(format!(\"{:?}\", e)))\n\n .and_then(|node| node.try_into())\n\n}\n\n\n\npub(crate) fn proto_error<S: Into<String>>(message: S) -> BallistaError {\n\n BallistaError::General(message.into())\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! convert_required {\n\n ($PB:expr) => {{\n\n if let Some(field) = $PB.as_ref() {\n\n field.try_into()\n\n } else {\n\n Err(proto_error(\"Missing required field in protobuf\"))\n\n }\n", "file_path": "ballista/rust/core/src/serde/mod.rs", "rank": 51, "score": 263317.66957165493 }, { "content": "/// Concatenates all but the first argument, with separators.\n\n/// The first argument is used as the separator string, and should not be NULL.\n\n/// Other NULL arguments are ignored.\n\npub fn concat_ws(sep: impl Into<String>, values: &[Expr]) -> Expr {\n\n let mut args = vec![lit(sep.into())];\n\n args.extend_from_slice(values);\n\n Expr::ScalarFunction {\n\n fun: functions::BuiltinScalarFunction::ConcatWithSeparator,\n\n args,\n\n }\n\n}\n\n\n", "file_path": "datafusion/src/logical_plan/expr.rs", "rank": 52, "score": 263200.44447907753 }, { "content": "fn extract_job_id_from_task_key(job_key: &str) -> Result<&str> {\n\n job_key.split('/').nth(4).ok_or_else(|| {\n\n BallistaError::Internal(format!(\"Unexpected task key: {}\", job_key))\n\n })\n\n}\n\n\n", "file_path": "ballista/rust/scheduler/src/state/mod.rs", "rank": 53, "score": 253998.8575662212 }, { "content": "/// Returns starting index of specified substring within string, or zero if it's not present. (Same as position(substring in string), but note the reversed argument order.)\n\n/// strpos('high', 'ig') = 2\n\npub fn strpos<T: ArrowPrimitiveType>(args: &[ArrayRef]) -> Result<ArrayRef>\n\nwhere\n\n T::Native: StringOffsetSizeTrait,\n\n{\n\n let string_array: &GenericStringArray<T::Native> = args[0]\n\n .as_any()\n\n .downcast_ref::<GenericStringArray<T::Native>>()\n\n .ok_or_else(|| {\n\n DataFusionError::Internal(\"could not cast string to StringArray\".to_string())\n\n })?;\n\n\n\n let substring_array: &GenericStringArray<T::Native> = args[1]\n\n .as_any()\n\n .downcast_ref::<GenericStringArray<T::Native>>()\n\n .ok_or_else(|| {\n\n DataFusionError::Internal(\n\n \"could not cast substring to StringArray\".to_string(),\n\n )\n\n })?;\n\n\n", "file_path": "datafusion/src/physical_plan/unicode_expressions.rs", "rank": 54, "score": 253360.44246469944 }, { "content": "/// Converts the number to its equivalent hexadecimal representation.\n\n/// to_hex(2147483647) = '7fffffff'\n\npub fn to_hex<T: ArrowPrimitiveType>(args: &[ArrayRef]) -> Result<ArrayRef>\n\nwhere\n\n T::Native: StringOffsetSizeTrait,\n\n{\n\n let integer_array = downcast_primitive_array_arg!(args[0], \"integer\", T);\n\n\n\n let result = integer_array\n\n .iter()\n\n .map(|integer| {\n\n integer.map(|integer| format!(\"{:x}\", integer.to_usize().unwrap()))\n\n })\n\n .collect::<GenericStringArray<i32>>();\n\n\n\n Ok(Arc::new(result) as ArrayRef)\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/string_expressions.rs", "rank": 55, "score": 253348.96254012128 }, { "content": "/// Print the schema in a compact representation to `buf`\n\n///\n\n/// For example: `foo:Utf8` if `foo` can not be null, and\n\n/// `foo:Utf8;N` if `foo` is nullable.\n\n///\n\n/// ```\n\n/// use arrow::datatypes::{Field, Schema, DataType};\n\n/// # use datafusion::logical_plan::display_schema;\n\n/// let schema = Schema::new(vec![\n\n/// Field::new(\"id\", DataType::Int32, false),\n\n/// Field::new(\"first_name\", DataType::Utf8, true),\n\n/// ]);\n\n///\n\n/// assert_eq!(\n\n/// \"[id:Int32, first_name:Utf8;N]\",\n\n/// format!(\"{}\", display_schema(&schema))\n\n/// );\n\n/// ```\n\npub fn display_schema(schema: &Schema) -> impl fmt::Display + '_ {\n\n struct Wrapper<'a>(&'a Schema);\n\n\n\n impl<'a> fmt::Display for Wrapper<'a> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"[\")?;\n\n for (idx, field) in self.0.fields().iter().enumerate() {\n\n if idx > 0 {\n\n write!(f, \", \")?;\n\n }\n\n let nullable_str = if field.is_nullable() { \";N\" } else { \"\" };\n\n write!(\n\n f,\n\n \"{}:{:?}{}\",\n\n field.name(),\n\n field.data_type(),\n\n nullable_str\n\n )?;\n\n }\n\n write!(f, \"]\")\n\n }\n\n }\n\n Wrapper(schema)\n\n}\n\n\n\n/// Logic related to creating DOT language graphs.\n", "file_path": "datafusion/src/logical_plan/display.rs", "rank": 56, "score": 252195.0010547214 }, { "content": "/// decorates a function to handle [`ScalarValue`]s by converting them to arrays before calling the function\n\n/// and vice-versa after evaluation.\n\npub fn make_scalar_function<F>(inner: F) -> ScalarFunctionImplementation\n\nwhere\n\n F: Fn(&[ArrayRef]) -> Result<ArrayRef> + Sync + Send + 'static,\n\n{\n\n Arc::new(move |args: &[ColumnarValue]| {\n\n // first, identify if any of the arguments is an Array. If yes, store its `len`,\n\n // as any scalar will need to be converted to an array of len `len`.\n\n let len = args\n\n .iter()\n\n .fold(Option::<usize>::None, |acc, arg| match arg {\n\n ColumnarValue::Scalar(_) => acc,\n\n ColumnarValue::Array(a) => Some(a.len()),\n\n });\n\n\n\n // to array\n\n let args = if let Some(len) = len {\n\n args.iter()\n\n .map(|arg| arg.clone().into_array(len))\n\n .collect::<Vec<ArrayRef>>()\n\n } else {\n", "file_path": "datafusion/src/physical_plan/functions.rs", "rank": 57, "score": 250720.13883627256 }, { "content": "/// Returns true if string starts with prefix.\n\n/// starts_with('alphabet', 'alph') = 't'\n\npub fn starts_with<T: StringOffsetSizeTrait>(args: &[ArrayRef]) -> Result<ArrayRef> {\n\n let string_array = downcast_string_arg!(args[0], \"string\", T);\n\n let prefix_array = downcast_string_arg!(args[1], \"prefix\", T);\n\n\n\n let result = string_array\n\n .iter()\n\n .zip(prefix_array.iter())\n\n .map(|(string, prefix)| match (string, prefix) {\n\n (Some(string), Some(prefix)) => Some(string.starts_with(prefix)),\n\n _ => None,\n\n })\n\n .collect::<BooleanArray>();\n\n\n\n Ok(Arc::new(result) as ArrayRef)\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/string_expressions.rs", "rank": 58, "score": 250553.17486994766 }, { "content": "/// Returns number of characters in the string.\n\n/// character_length('josé') = 4\n\npub fn character_length<T: ArrowPrimitiveType>(args: &[ArrayRef]) -> Result<ArrayRef>\n\nwhere\n\n T::Native: StringOffsetSizeTrait,\n\n{\n\n let string_array: &GenericStringArray<T::Native> = args[0]\n\n .as_any()\n\n .downcast_ref::<GenericStringArray<T::Native>>()\n\n .ok_or_else(|| {\n\n DataFusionError::Internal(\"could not cast string to StringArray\".to_string())\n\n })?;\n\n\n\n let result = string_array\n\n .iter()\n\n .map(|string| {\n\n string.map(|string: &str| {\n\n T::Native::from_usize(string.graphemes(true).count()).expect(\n\n \"should not fail as graphemes.count will always return integer\",\n\n )\n\n })\n\n })\n\n .collect::<PrimitiveArray<T>>();\n\n\n\n Ok(Arc::new(result) as ArrayRef)\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/unicode_expressions.rs", "rank": 59, "score": 250480.46137234906 }, { "content": "/// converts a pyarrow Scalar into a Rust Scalar\n\npub fn to_rust_scalar(ob: &PyAny) -> PyResult<ScalarValue> {\n\n let t = ob\n\n .getattr(\"__class__\")?\n\n .getattr(\"__name__\")?\n\n .extract::<&str>()?;\n\n\n\n let p = ob.call_method0(\"as_py\")?;\n\n\n\n Ok(match t {\n\n \"Int8Scalar\" => ScalarValue::Int8(Some(p.extract::<i8>()?)),\n\n \"Int16Scalar\" => ScalarValue::Int16(Some(p.extract::<i16>()?)),\n\n \"Int32Scalar\" => ScalarValue::Int32(Some(p.extract::<i32>()?)),\n\n \"Int64Scalar\" => ScalarValue::Int64(Some(p.extract::<i64>()?)),\n\n \"UInt8Scalar\" => ScalarValue::UInt8(Some(p.extract::<u8>()?)),\n\n \"UInt16Scalar\" => ScalarValue::UInt16(Some(p.extract::<u16>()?)),\n\n \"UInt32Scalar\" => ScalarValue::UInt32(Some(p.extract::<u32>()?)),\n\n \"UInt64Scalar\" => ScalarValue::UInt64(Some(p.extract::<u64>()?)),\n\n \"FloatScalar\" => ScalarValue::Float32(Some(p.extract::<f32>()?)),\n\n \"DoubleScalar\" => ScalarValue::Float64(Some(p.extract::<f64>()?)),\n\n \"BooleanScalar\" => ScalarValue::Boolean(Some(p.extract::<bool>()?)),\n", "file_path": "python/src/to_rust.rs", "rank": 60, "score": 249640.9891187648 }, { "content": "// coercion rules for equality operations. This is a superset of all numerical coercion rules.\n\npub fn eq_coercion(lhs_type: &DataType, rhs_type: &DataType) -> Option<DataType> {\n\n if lhs_type == rhs_type {\n\n // same type => equality is possible\n\n return Some(lhs_type.clone());\n\n }\n\n numerical_coercion(lhs_type, rhs_type)\n\n .or_else(|| eq_bool_coercion(lhs_type, rhs_type))\n\n .or_else(|| dictionary_coercion(lhs_type, rhs_type))\n\n .or_else(|| temporal_coercion(lhs_type, rhs_type))\n\n .or_else(|| string_implicit_cast(lhs_type, rhs_type))\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/expressions/coercion.rs", "rank": 61, "score": 247965.47024009447 }, { "content": "/// extract a specific group from a string column, using a regular expression\n\npub fn regexp_match<T: StringOffsetSizeTrait>(args: &[ArrayRef]) -> Result<ArrayRef> {\n\n match args.len() {\n\n 2 => compute::regexp_match(downcast_string_arg!(args[0], \"string\", T), downcast_string_arg!(args[1], \"pattern\", T), None)\n\n .map_err(DataFusionError::ArrowError),\n\n 3 => compute::regexp_match(downcast_string_arg!(args[0], \"string\", T), downcast_string_arg!(args[1], \"pattern\", T), Some(downcast_string_arg!(args[1], \"flags\", T)))\n\n .map_err(DataFusionError::ArrowError),\n\n other => Err(DataFusionError::Internal(format!(\n\n \"regexp_match was called with {} arguments. It requires at least 2 and at most 3.\",\n\n other\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/regex_expressions.rs", "rank": 62, "score": 247776.18988604023 }, { "content": "/// Visit all children of this plan, according to the order defined on `ExecutionPlanVisitor`.\n\n// Note that this would be really nice if it were a method on\n\n// ExecutionPlan, but it can not be because it takes a generic\n\n// parameter and `ExecutionPlan` is a trait\n\npub fn accept<V: ExecutionPlanVisitor>(\n\n plan: &dyn ExecutionPlan,\n\n visitor: &mut V,\n\n) -> std::result::Result<(), V::Error> {\n\n visitor.pre_visit(plan)?;\n\n for child in plan.children() {\n\n visit_execution_plan(child.as_ref(), visitor)?;\n\n }\n\n visitor.post_visit(plan)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/mod.rs", "rank": 63, "score": 246700.46372810518 }, { "content": "/// # Errors\n\n/// This function errors when:\n\n/// * the number of arguments is not 1\n\n/// * the first argument is not castable to a `GenericStringArray`\n\nfn unary_binary_function<T, R, F>(\n\n args: &[&dyn Array],\n\n op: F,\n\n name: &str,\n\n) -> Result<BinaryArray>\n\nwhere\n\n R: AsRef<[u8]>,\n\n T: StringOffsetSizeTrait,\n\n F: Fn(&str) -> R,\n\n{\n\n if args.len() != 1 {\n\n return Err(DataFusionError::Internal(format!(\n\n \"{:?} args were supplied but {} takes exactly one argument\",\n\n args.len(),\n\n name,\n\n )));\n\n }\n\n\n\n let array = args[0]\n\n .as_any()\n\n .downcast_ref::<GenericStringArray<T>>()\n\n .ok_or_else(|| {\n\n DataFusionError::Internal(\"failed to downcast to string\".to_string())\n\n })?;\n\n\n\n // first map is the iterator, second is for the `Option<_>`\n\n Ok(array.iter().map(|x| x.map(|x| op(x))).collect())\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/crypto_expressions.rs", "rank": 64, "score": 246306.5754166705 }, { "content": "/// some tests share a common table\n\npub fn test_table_scan() -> Result<LogicalPlan> {\n\n test_table_scan_with_name(\"test\")\n\n}\n\n\n", "file_path": "datafusion/src/test/mod.rs", "rank": 65, "score": 245576.21199123075 }, { "content": "/// returns the name of the state\n\npub fn format_state_name(name: &str, state_name: &str) -> String {\n\n format!(\"{}[{}]\", name, state_name)\n\n}\n\n\n\n/// Represents Sort operation for a column in a RecordBatch\n\n#[derive(Clone, Debug)]\n\npub struct PhysicalSortExpr {\n\n /// Physical expression representing the column to sort\n\n pub expr: Arc<dyn PhysicalExpr>,\n\n /// Option to specify how the given column should be sorted\n\n pub options: SortOptions,\n\n}\n\n\n\nimpl std::fmt::Display for PhysicalSortExpr {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let opts_string = match (self.options.descending, self.options.nulls_first) {\n\n (true, true) => \"DESC\",\n\n (true, false) => \"DESC NULLS LAST\",\n\n (false, true) => \"ASC\",\n\n (false, false) => \"ASC NULLS LAST\",\n", "file_path": "datafusion/src/physical_plan/expressions/mod.rs", "rank": 66, "score": 243954.21990812384 }, { "content": "/// Create a column expression based on a qualified or unqualified column name\n\npub fn col(ident: &str) -> Expr {\n\n Expr::Column(ident.into())\n\n}\n\n\n", "file_path": "datafusion/src/logical_plan/expr.rs", "rank": 67, "score": 243931.1962003408 }, { "content": "fn register_aggregate_csv(ctx: &mut ExecutionContext) -> Result<()> {\n\n let testdata = datafusion::test_util::arrow_test_data();\n\n let schema = aggr_test_schema();\n\n ctx.register_csv(\n\n \"aggregate_test_100\",\n\n &format!(\"{}/csv/aggregate_test_100.csv\", testdata),\n\n CsvReadOptions::new().schema(&schema),\n\n )?;\n\n Ok(())\n\n}\n\n\n", "file_path": "datafusion/tests/sql.rs", "rank": 68, "score": 242540.83017196198 }, { "content": "fn eq_bool_coercion(l: &DataType, r: &DataType) -> Option<DataType> {\n\n if l == &DataType::Boolean || r == &DataType::Boolean {\n\n Some(DataType::Boolean)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/expressions/coercion.rs", "rank": 69, "score": 242065.8453518648 }, { "content": "/// Recursively calls `pre_visit` and `post_visit` for this node and\n\n/// all of its children, as described on [`ExecutionPlanVisitor`]\n\npub fn visit_execution_plan<V: ExecutionPlanVisitor>(\n\n plan: &dyn ExecutionPlan,\n\n visitor: &mut V,\n\n) -> std::result::Result<(), V::Error> {\n\n visitor.pre_visit(plan)?;\n\n for child in plan.children() {\n\n visit_execution_plan(child.as_ref(), visitor)?;\n\n }\n\n visitor.post_visit(plan)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/mod.rs", "rank": 70, "score": 241281.71695120173 }, { "content": "fn register_aggregate_simple_csv(ctx: &mut ExecutionContext) -> Result<()> {\n\n // It's not possible to use aggregate_test_100, not enought similar values to test grouping on floats\n\n let schema = Arc::new(Schema::new(vec![\n\n Field::new(\"c1\", DataType::Float32, false),\n\n Field::new(\"c2\", DataType::Float64, false),\n\n Field::new(\"c3\", DataType::Boolean, false),\n\n ]));\n\n\n\n ctx.register_csv(\n\n \"aggregate_simple\",\n\n \"tests/aggregate_simple.csv\",\n\n CsvReadOptions::new().schema(&schema),\n\n )?;\n\n Ok(())\n\n}\n\n\n", "file_path": "datafusion/tests/sql.rs", "rank": 71, "score": 239778.70793450915 }, { "content": "fn create_batch(value: i32, num_rows: usize) -> Result<RecordBatch> {\n\n let mut builder = Int32Builder::new(num_rows);\n\n for _ in 0..num_rows {\n\n builder.append_value(value)?;\n\n }\n\n\n\n Ok(RecordBatch::try_new(\n\n Arc::new(Schema::new(vec![Field::new(\n\n \"flag\",\n\n DataType::Int32,\n\n false,\n\n )])),\n\n vec![Arc::new(builder.finish())],\n\n )?)\n\n}\n\n\n", "file_path": "datafusion/tests/provider_filter_pushdown.rs", "rank": 72, "score": 237903.9210339261 }, { "content": "/// Returns the data types that each argument must be coerced to match\n\n/// `signature`.\n\n///\n\n/// See the module level documentation for more detail on coercion.\n\npub fn data_types(\n\n current_types: &[DataType],\n\n signature: &Signature,\n\n) -> Result<Vec<DataType>> {\n\n if current_types.is_empty() {\n\n return Ok(vec![]);\n\n }\n\n let valid_types = get_valid_types(signature, current_types)?;\n\n\n\n if valid_types\n\n .iter()\n\n .any(|data_type| data_type == current_types)\n\n {\n\n return Ok(current_types.to_vec());\n\n }\n\n\n\n for valid_types in valid_types {\n\n if let Some(types) = maybe_data_types(&valid_types, current_types) {\n\n return Ok(types);\n\n }\n\n }\n\n\n\n // none possible -> Error\n\n Err(DataFusionError::Plan(format!(\n\n \"Coercion from {:?} to the signature {:?} failed.\",\n\n current_types, signature\n\n )))\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/type_coercion.rs", "rank": 73, "score": 237489.2244775246 }, { "content": "/// Calls string_to_timestamp_nanos and converts the error type\n\nfn string_to_timestamp_nanos_shim(s: &str) -> Result<i64> {\n\n string_to_timestamp_nanos(s).map_err(|e| e.into())\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/datetime_expressions.rs", "rank": 74, "score": 236969.44991153682 }, { "content": "pub fn get_tpch_schema(table: &str) -> Schema {\n\n // note that the schema intentionally uses signed integers so that any generated Parquet\n\n // files can also be used to benchmark tools that only support signed integers, such as\n\n // Apache Spark\n\n\n\n match table {\n\n \"part\" => Schema::new(vec![\n\n Field::new(\"p_partkey\", DataType::Int32, false),\n\n Field::new(\"p_name\", DataType::Utf8, false),\n\n Field::new(\"p_mfgr\", DataType::Utf8, false),\n\n Field::new(\"p_brand\", DataType::Utf8, false),\n\n Field::new(\"p_type\", DataType::Utf8, false),\n\n Field::new(\"p_size\", DataType::Int32, false),\n\n Field::new(\"p_container\", DataType::Utf8, false),\n\n Field::new(\"p_retailprice\", DataType::Float64, false),\n\n Field::new(\"p_comment\", DataType::Utf8, false),\n\n ]),\n\n\n\n \"supplier\" => Schema::new(vec![\n\n Field::new(\"s_suppkey\", DataType::Int32, false),\n", "file_path": "ballista/rust/scheduler/src/test_utils.rs", "rank": 75, "score": 235646.70777623507 }, { "content": "/// Create a physical expression for window function\n\npub fn create_window_expr(\n\n fun: &WindowFunction,\n\n name: String,\n\n args: &[Arc<dyn PhysicalExpr>],\n\n partition_by: &[Arc<dyn PhysicalExpr>],\n\n order_by: &[PhysicalSortExpr],\n\n window_frame: Option<WindowFrame>,\n\n input_schema: &Schema,\n\n) -> Result<Arc<dyn WindowExpr>> {\n\n Ok(match fun {\n\n WindowFunction::AggregateFunction(fun) => Arc::new(AggregateWindowExpr::new(\n\n aggregates::create_aggregate_expr(fun, false, args, input_schema, name)?,\n\n partition_by,\n\n order_by,\n\n window_frame,\n\n )),\n\n WindowFunction::BuiltInWindowFunction(fun) => Arc::new(BuiltInWindowExpr::new(\n\n fun.clone(),\n\n create_built_in_window_expr(fun, args, input_schema, name)?,\n\n partition_by,\n\n order_by,\n\n window_frame,\n\n )),\n\n })\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/windows/mod.rs", "rank": 76, "score": 234624.19010937697 }, { "content": "/// Return record batch with i32 sequence\n\n///\n\n/// Columns are named\n\n/// \"i\" -> Int32Array\n\nfn make_int32_batch(start: i32, end: i32) -> RecordBatch {\n\n let schema = Arc::new(Schema::new(vec![Field::new(\"i\", DataType::Int32, true)]));\n\n let v: Vec<i32> = (start..end).collect();\n\n let array = Arc::new(Int32Array::from(v)) as ArrayRef;\n\n RecordBatch::try_new(schema, vec![array.clone()]).unwrap()\n\n}\n\n\n", "file_path": "datafusion/tests/parquet_pruning.rs", "rank": 77, "score": 233483.00525581356 }, { "content": "/// Returns `expressions` coerced to types compatible with\n\n/// `signature`, if possible.\n\n///\n\n/// See the module level documentation for more detail on coercion.\n\npub fn coerce(\n\n expressions: &[Arc<dyn PhysicalExpr>],\n\n schema: &Schema,\n\n signature: &Signature,\n\n) -> Result<Vec<Arc<dyn PhysicalExpr>>> {\n\n if expressions.is_empty() {\n\n return Ok(vec![]);\n\n }\n\n\n\n let current_types = expressions\n\n .iter()\n\n .map(|e| e.data_type(schema))\n\n .collect::<Result<Vec<_>>>()?;\n\n\n\n let new_types = data_types(&current_types, signature)?;\n\n\n\n expressions\n\n .iter()\n\n .enumerate()\n\n .map(|(i, expr)| try_cast(expr.clone(), schema, new_types[i].clone()))\n\n .collect::<Result<Vec<_>>>()\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/type_coercion.rs", "rank": 78, "score": 227832.43287421213 }, { "content": "/// Panics if scalars are of different types.\n\npub fn cmp_same_types(\n\n l: &ScalarValue,\n\n r: &ScalarValue,\n\n nulls_first: bool,\n\n asc: bool,\n\n) -> Ordering {\n\n match (l.is_null(), r.is_null()) {\n\n (true, true) => return Ordering::Equal,\n\n (true, false) => {\n\n return if nulls_first {\n\n Ordering::Less\n\n } else {\n\n Ordering::Greater\n\n }\n\n }\n\n (false, true) => {\n\n return if nulls_first {\n\n Ordering::Greater\n\n } else {\n\n Ordering::Less\n", "file_path": "datafusion/src/cube_ext/util.rs", "rank": 79, "score": 227826.64140918016 }, { "content": "/// Returns the character with the given code. chr(0) is disallowed because text data types cannot store that character.\n\n/// chr(65) = 'A'\n\npub fn chr(args: &[ArrayRef]) -> Result<ArrayRef> {\n\n let integer_array = downcast_arg!(args[0], \"integer\", Int64Array);\n\n\n\n // first map is the iterator, second is for the `Option<_>`\n\n let result = integer_array\n\n .iter()\n\n .map(|integer: Option<i64>| {\n\n integer\n\n .map(|integer| {\n\n if integer == 0 {\n\n Err(DataFusionError::Execution(\n\n \"null character not permitted.\".to_string(),\n\n ))\n\n } else {\n\n match core::char::from_u32(integer as u32) {\n\n Some(integer) => Ok(integer.to_string()),\n\n None => Err(DataFusionError::Execution(\n\n \"requested character too large for encoding.\".to_string(),\n\n )),\n\n }\n\n }\n\n })\n\n .transpose()\n\n })\n\n .collect::<Result<StringArray>>()?;\n\n\n\n Ok(Arc::new(result) as ArrayRef)\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/string_expressions.rs", "rank": 80, "score": 227581.65310581296 }, { "content": "/// Returns all direct children `Expression`s of `expr`.\n\n/// E.g. if the expression is \"(a + 1) + 1\", it returns [\"a + 1\", \"1\"] (as Expr objects)\n\npub fn expr_sub_expressions(expr: &Expr) -> Result<Vec<Expr>> {\n\n match expr {\n\n Expr::BinaryExpr { left, right, .. } => {\n\n Ok(vec![left.as_ref().to_owned(), right.as_ref().to_owned()])\n\n }\n\n Expr::IsNull(e) => Ok(vec![e.as_ref().to_owned()]),\n\n Expr::IsNotNull(e) => Ok(vec![e.as_ref().to_owned()]),\n\n Expr::ScalarFunction { args, .. } => Ok(args.clone()),\n\n Expr::ScalarUDF { args, .. } => Ok(args.clone()),\n\n Expr::WindowFunction {\n\n args,\n\n partition_by,\n\n order_by,\n\n ..\n\n } => {\n\n let mut expr_list: Vec<Expr> = vec![];\n\n expr_list.extend(args.clone());\n\n expr_list.push(lit(WINDOW_PARTITION_MARKER));\n\n expr_list.extend(partition_by.clone());\n\n expr_list.push(lit(WINDOW_SORT_MARKER));\n", "file_path": "datafusion/src/optimizer/utils.rs", "rank": 81, "score": 225846.41804674314 }, { "content": "/// Concatenates all but the first argument, with separators. The first argument is used as the separator string, and should not be NULL. Other NULL arguments are ignored.\n\n/// concat_ws(',', 'abcde', 2, NULL, 22) = 'abcde,2,22'\n\npub fn concat_ws(args: &[ArrayRef]) -> Result<ArrayRef> {\n\n // downcast all arguments to strings\n\n let args = downcast_vec!(args, StringArray).collect::<Result<Vec<&StringArray>>>()?;\n\n\n\n // do not accept 0 or 1 arguments.\n\n if args.len() < 2 {\n\n return Err(DataFusionError::Internal(format!(\n\n \"concat_ws was called with {} arguments. It requires at least 2.\",\n\n args.len()\n\n )));\n\n }\n\n\n\n // first map is the iterator, second is for the `Option<_>`\n\n let result = args[0]\n\n .iter()\n\n .enumerate()\n\n .map(|(index, x)| {\n\n x.map(|sep: &str| {\n\n let mut owned_string: String = \"\".to_owned();\n\n for arg_index in 1..args.len() {\n", "file_path": "datafusion/src/physical_plan/string_expressions.rs", "rank": 82, "score": 225106.97911664023 }, { "content": "fn build_statistics_expr(expr_builder: &mut PruningExpressionBuilder) -> Result<Expr> {\n\n let statistics_expr =\n\n match expr_builder.op() {\n\n Operator::NotEq => {\n\n // column != literal => (min, max) = literal =>\n\n // !(min != literal && max != literal) ==>\n\n // min != literal || literal != max\n\n let min_column_expr = expr_builder.min_column_expr()?;\n\n let max_column_expr = expr_builder.max_column_expr()?;\n\n min_column_expr\n\n .not_eq(expr_builder.scalar_expr().clone())\n\n .or(expr_builder.scalar_expr().clone().not_eq(max_column_expr))\n\n }\n\n Operator::Eq => {\n\n // column = literal => (min, max) = literal => min <= literal && literal <= max\n\n // (column / 2) = 4 => (column_min / 2) <= 4 && 4 <= (column_max / 2)\n\n let min_column_expr = expr_builder.min_column_expr()?;\n\n let max_column_expr = expr_builder.max_column_expr()?;\n\n min_column_expr\n\n .lt_eq(expr_builder.scalar_expr().clone())\n", "file_path": "datafusion/src/physical_optimizer/pruning.rs", "rank": 83, "score": 224845.54705271753 }, { "content": "/// returns a new expression where the expressions in `expr` are replaced by the ones in\n\n/// `expressions`.\n\n/// This is used in conjunction with ``expr_expressions`` to re-write expressions.\n\npub fn rewrite_expression(expr: &Expr, expressions: &[Expr]) -> Result<Expr> {\n\n match expr {\n\n Expr::BinaryExpr { op, .. } => Ok(Expr::BinaryExpr {\n\n left: Box::new(expressions[0].clone()),\n\n op: *op,\n\n right: Box::new(expressions[1].clone()),\n\n }),\n\n Expr::IsNull(_) => Ok(Expr::IsNull(Box::new(expressions[0].clone()))),\n\n Expr::IsNotNull(_) => Ok(Expr::IsNotNull(Box::new(expressions[0].clone()))),\n\n Expr::ScalarFunction { fun, .. } => Ok(Expr::ScalarFunction {\n\n fun: fun.clone(),\n\n args: expressions.to_vec(),\n\n }),\n\n Expr::ScalarUDF { fun, .. } => Ok(Expr::ScalarUDF {\n\n fun: fun.clone(),\n\n args: expressions.to_vec(),\n\n }),\n\n Expr::WindowFunction {\n\n fun, window_frame, ..\n\n } => {\n", "file_path": "datafusion/src/optimizer/utils.rs", "rank": 84, "score": 221962.45165952452 }, { "content": "fn custom_sqrt(args: &[ColumnarValue]) -> Result<ColumnarValue> {\n\n let arg = &args[0];\n\n if let ColumnarValue::Array(v) = arg {\n\n let input = v\n\n .as_any()\n\n .downcast_ref::<Float64Array>()\n\n .expect(\"cast failed\");\n\n\n\n let array: Float64Array = input.iter().map(|v| v.map(|x| x.sqrt())).collect();\n\n Ok(ColumnarValue::Array(Arc::new(array)))\n\n } else {\n\n unimplemented!()\n\n }\n\n}\n\n\n\n#[tokio::test]\n\nasync fn csv_query_avg() -> Result<()> {\n\n let mut ctx = ExecutionContext::new();\n\n register_aggregate_csv(&mut ctx)?;\n\n let sql = \"SELECT avg(c12) FROM aggregate_test_100\";\n", "file_path": "datafusion/tests/sql.rs", "rank": 85, "score": 221783.90597410663 }, { "content": "pub fn lexcmp_array_rows<'a>(\n\n cols: impl Iterator<Item = &'a ArrayRef>,\n\n l_row: usize,\n\n r_row: usize,\n\n) -> Ordering {\n\n for c in cols {\n\n let o = cmp_array_row_same_types(c, l_row, c, r_row);\n\n if o != Ordering::Equal {\n\n return o;\n\n }\n\n }\n\n Ordering::Equal\n\n}\n", "file_path": "datafusion/src/cube_ext/util.rs", "rank": 86, "score": 220458.1776858169 }, { "content": "fn decode_protobuf<T: Message + Default>(bytes: &[u8]) -> Result<T> {\n\n T::decode(bytes).map_err(|e| {\n\n BallistaError::Internal(format!(\n\n \"Could not deserialize {}: {}\",\n\n type_name::<T>(),\n\n e\n\n ))\n\n })\n\n}\n\n\n", "file_path": "ballista/rust/scheduler/src/state/mod.rs", "rank": 87, "score": 218996.7187746701 }, { "content": "/// Returns the return type of a binary operator or an error when the binary operator cannot\n\n/// perform the computation between the argument's types, even after type coercion.\n\n///\n\n/// This function makes some assumptions about the underlying available computations.\n\npub fn binary_operator_data_type(\n\n lhs_type: &DataType,\n\n op: &Operator,\n\n rhs_type: &DataType,\n\n) -> Result<DataType> {\n\n // validate that it is possible to perform the operation on incoming types.\n\n // (or the return datatype cannot be infered)\n\n let common_type = common_binary_type(lhs_type, op, rhs_type)?;\n\n\n\n match op {\n\n // operators that return a boolean\n\n Operator::Eq\n\n | Operator::NotEq\n\n | Operator::And\n\n | Operator::Or\n\n | Operator::Like\n\n | Operator::NotLike\n\n | Operator::ILike\n\n | Operator::NotILike\n\n | Operator::Lt\n", "file_path": "datafusion/src/physical_plan/expressions/binary.rs", "rank": 88, "score": 218890.7397180245 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n let partitions_len = 8;\n\n let array_len = 1024 * 1024;\n\n let batch_size = 8 * 1024;\n\n let ctx = create_context(partitions_len, array_len, batch_size).unwrap();\n\n\n\n c.bench_function(\"window empty over, aggregate functions\", |b| {\n\n b.iter(|| {\n\n query(\n\n ctx.clone(),\n\n \"SELECT \\\n\n MAX(f64) OVER (), \\\n\n MIN(f32) OVER (), \\\n\n SUM(u64_narrow) OVER () \\\n\n FROM t\",\n\n )\n\n })\n\n });\n\n\n\n c.bench_function(\"window empty over, built-in functions\", |b| {\n", "file_path": "datafusion/benches/window_query_sql.rs", "rank": 89, "score": 218884.29236605688 }, { "content": "fn optimize(plan: &LogicalPlan, mut state: State) -> Result<LogicalPlan> {\n\n match plan {\n\n LogicalPlan::Explain { .. } => {\n\n // push the optimization to the plan of this explain\n\n push_down(&state, plan)\n\n }\n\n LogicalPlan::Filter { input, predicate } => {\n\n let mut predicates = vec![];\n\n split_members(predicate, &mut predicates);\n\n\n\n // Predicates without referencing columns (WHERE FALSE, WHERE 1=1, etc.)\n\n let mut no_col_predicates = vec![];\n\n\n\n predicates\n\n .into_iter()\n\n .try_for_each::<_, Result<()>>(|predicate| {\n\n let mut columns: HashSet<Column> = HashSet::new();\n\n utils::expr_to_columns(predicate, &mut columns)?;\n\n if columns.is_empty() {\n\n no_col_predicates.push(predicate)\n", "file_path": "datafusion/src/optimizer/filter_push_down.rs", "rank": 90, "score": 218871.84976712946 }, { "content": "/// the signatures supported by the function `fun`.\n\npub fn signature(fun: &WindowFunction) -> Signature {\n\n match fun {\n\n WindowFunction::AggregateFunction(fun) => aggregates::signature(fun),\n\n WindowFunction::BuiltInWindowFunction(fun) => signature_for_built_in(fun),\n\n }\n\n}\n\n\n\n/// the signatures supported by the built-in window function `fun`.\n\npub(super) fn signature_for_built_in(fun: &BuiltInWindowFunction) -> Signature {\n\n // note: the physical expression must accept the type returned by this function or the execution panics.\n\n match fun {\n\n BuiltInWindowFunction::RowNumber\n\n | BuiltInWindowFunction::Rank\n\n | BuiltInWindowFunction::DenseRank\n\n | BuiltInWindowFunction::PercentRank\n\n | BuiltInWindowFunction::CumeDist => Signature::Any(0),\n\n BuiltInWindowFunction::Lag | BuiltInWindowFunction::Lead => {\n\n Signature::OneOf(vec![\n\n Signature::Any(1),\n\n Signature::Any(2),\n", "file_path": "datafusion/src/physical_plan/window_functions.rs", "rank": 91, "score": 217804.32525229102 }, { "content": "pub fn get_routes(scheduler_server: SchedulerServer) -> BoxedFilter<(impl Reply,)> {\n\n let routes = warp::path(\"state\")\n\n .and(with_data_server(scheduler_server))\n\n .and_then(handlers::scheduler_state);\n\n routes.boxed()\n\n}\n", "file_path": "ballista/rust/scheduler/src/api/mod.rs", "rank": 92, "score": 217079.55052341387 }, { "content": "/// Used for column names in schemas\n\npub fn physical_name(e: &Expr, input_schema: &DFSchema) -> Result<String> {\n\n match e {\n\n Expr::Column(c) => Ok(c.name.clone()),\n\n Expr::Alias(_, name) => Ok(name.clone()),\n\n Expr::ScalarVariable(variable_names) => Ok(variable_names.join(\".\")),\n\n Expr::Literal(value) => Ok(format!(\"{:?}\", value)),\n\n Expr::BinaryExpr { left, op, right } => {\n\n let left = physical_name(left, input_schema)?;\n\n let right = physical_name(right, input_schema)?;\n\n Ok(format!(\"{} {:?} {}\", left, op, right))\n\n }\n\n Expr::Case {\n\n expr,\n\n when_then_expr,\n\n else_expr,\n\n } => {\n\n let mut name = \"CASE \".to_string();\n\n if let Some(e) = expr {\n\n name += &format!(\"{:?} \", e);\n\n }\n", "file_path": "datafusion/src/physical_plan/planner.rs", "rank": 93, "score": 216922.79505555204 }, { "content": "/// Recursively call [`Column::normalize`] on all Column expressions\n\n/// in the `expr` expression tree.\n\npub fn normalize_col(expr: Expr, plan: &LogicalPlan) -> Result<Expr> {\n\n normalize_col_with_schemas(expr, &plan.all_schemas(), &plan.using_columns()?)\n\n}\n\n\n", "file_path": "datafusion/src/logical_plan/expr.rs", "rank": 94, "score": 216916.64891109645 }, { "content": "/// Return true if a value of type `type_from` can be coerced\n\n/// (losslessly converted) into a value of `type_to`\n\n///\n\n/// See the module level documentation for more detail on coercion.\n\npub fn can_coerce_from(type_into: &DataType, type_from: &DataType) -> bool {\n\n use self::DataType::*;\n\n match type_into {\n\n Int8 => matches!(type_from, Int8),\n\n Int16 => matches!(type_from, Int8 | Int16 | UInt8),\n\n Int32 => matches!(type_from, Int8 | Int16 | Int32 | UInt8 | UInt16),\n\n Int64 => matches!(\n\n type_from,\n\n Int8 | Int16 | Int32 | Int64 | UInt8 | UInt16 | UInt32\n\n ),\n\n UInt8 => matches!(type_from, UInt8),\n\n UInt16 => matches!(type_from, UInt8 | UInt16),\n\n UInt32 => matches!(type_from, UInt8 | UInt16 | UInt32),\n\n UInt64 => matches!(type_from, UInt8 | UInt16 | UInt32 | UInt64),\n\n Float32 => matches!(\n\n type_from,\n\n Int8 | Int16 | Int32 | Int64 | UInt8 | UInt16 | UInt32 | UInt64 | Float32\n\n ),\n\n Float64 => matches!(\n\n type_from,\n", "file_path": "datafusion/src/physical_plan/type_coercion.rs", "rank": 95, "score": 215942.85762053804 }, { "content": "/// Extract the value in `col[row]` from a dictionary a GroupByScalar\n\nfn dictionary_create_group_by_value<K: ArrowDictionaryKeyType>(\n\n col: &ArrayRef,\n\n row: usize,\n\n) -> Result<GroupByScalar> {\n\n let dict_col = col.as_any().downcast_ref::<DictionaryArray<K>>().unwrap();\n\n\n\n // look up the index in the values dictionary\n\n let keys_col = dict_col.keys();\n\n let values_index = keys_col.value(row).to_usize().ok_or_else(|| {\n\n DataFusionError::Internal(format!(\n\n \"Can not convert index to usize in dictionary of type creating group by value {:?}\",\n\n keys_col.data_type()\n\n ))\n\n })?;\n\n\n\n create_group_by_value(dict_col.values(), values_index)\n\n}\n\n\n\n/// Extract the value in `col[row]` as a GroupByScalar\n\npub(crate) fn create_group_by_value(col: &ArrayRef, row: usize) -> Result<GroupByScalar> {\n", "file_path": "datafusion/src/physical_plan/hash_aggregate.rs", "rank": 96, "score": 214329.75468675574 }, { "content": "#[inline]\n\npub fn unnormalize_cols(exprs: impl IntoIterator<Item = Expr>) -> Vec<Expr> {\n\n exprs.into_iter().map(unnormalize_col).collect()\n\n}\n\n\n", "file_path": "datafusion/src/logical_plan/expr.rs", "rank": 97, "score": 214245.57899277064 }, { "content": "fn encode_protobuf<T: Message + Default>(msg: &T) -> Result<Vec<u8>> {\n\n let mut value: Vec<u8> = Vec::with_capacity(msg.encoded_len());\n\n msg.encode(&mut value).map_err(|e| {\n\n BallistaError::Internal(format!(\n\n \"Could not serialize {}: {}\",\n\n type_name::<T>(),\n\n e\n\n ))\n\n })?;\n\n Ok(value)\n\n}\n\n\n\n#[cfg(all(test, feature = \"sled\"))]\n\nmod test {\n\n use std::sync::Arc;\n\n\n\n use ballista_core::serde::protobuf::{\n\n job_status, task_status, CompletedTask, FailedTask, JobStatus, PartitionId,\n\n QueuedJob, RunningJob, RunningTask, TaskStatus,\n\n };\n", "file_path": "ballista/rust/scheduler/src/state/mod.rs", "rank": 98, "score": 212977.80955505196 }, { "content": "// It's not possible to return &[u8], because trait in trait without short lifetime\n\nfn sha_process<D: SHA2Digest + Default>(input: &str) -> SHA2DigestOutput<D> {\n\n let mut digest = D::default();\n\n digest.update(&input);\n\n\n\n digest.finalize()\n\n}\n\n\n", "file_path": "datafusion/src/physical_plan/crypto_expressions.rs", "rank": 99, "score": 212753.53361634506 } ]
Rust
src/runtime/thread/continuation.rs
jamesbornholt/shuttle
24ffc0aa2a2c30059c263ea7e5516f90af1c828b
#![allow(deprecated)] use crate::runtime::execution::ExecutionState; use generator::{Generator, Gn}; use scoped_tls::scoped_thread_local; use std::cell::{Cell, RefCell}; use std::collections::VecDeque; use std::ops::Deref; use std::ops::DerefMut; use std::rc::Rc; scoped_thread_local! { pub(crate) static CONTINUATION_POOL: ContinuationPool } pub(crate) struct Continuation { generator: Generator<'static, ContinuationInput, ContinuationOutput>, function: ContinuationFunction, state: ContinuationState, } #[allow(clippy::type_complexity)] #[derive(Clone)] struct ContinuationFunction(Rc<Cell<Option<Box<dyn FnOnce() + Send>>>>); unsafe impl Send for ContinuationFunction {} #[derive(Debug, PartialEq, Eq, Clone, Copy)] enum ContinuationInput { Resume, Exit, } #[derive(Debug, PartialEq, Eq, Clone, Copy)] enum ContinuationOutput { Yielded, Finished, Exited, } #[derive(Debug, PartialEq, Eq, Clone, Copy)] enum ContinuationState { NotReady, Ready, Running, } impl Continuation { pub fn new(stack_size: usize) -> Self { let function = ContinuationFunction(Rc::new(Cell::new(None))); let mut gen = { let function = function.clone(); Gn::new_opt(stack_size, move || { loop { match generator::yield_(ContinuationOutput::Finished) { None | Some(ContinuationInput::Exit) => break, _ => (), } let f = function.0.take().expect("must have a function to run"); f(); } ContinuationOutput::Exited }) }; let ret = gen.resume().unwrap(); debug_assert_eq!(ret, ContinuationOutput::Finished); Self { generator: gen, function, state: ContinuationState::NotReady, } } pub fn initialize(&mut self, fun: Box<dyn FnOnce() + Send>) { debug_assert_eq!( self.state, ContinuationState::NotReady, "shouldn't replace a function before it runs" ); let old = self.function.0.replace(Some(fun)); debug_assert!(old.is_none(), "shouldn't replace a function before it runs"); self.state = ContinuationState::Ready; } pub fn resume(&mut self) -> bool { debug_assert!(self.state == ContinuationState::Ready || self.state == ContinuationState::Running); let ret = self.resume_with_input(ContinuationInput::Resume); debug_assert_ne!( ret, ContinuationOutput::Exited, "continuation should not exit if resumed from user code" ); ret == ContinuationOutput::Finished } fn resume_with_input(&mut self, input: ContinuationInput) -> ContinuationOutput { self.generator.set_para(input); let ret = self.generator.resume().unwrap(); if ret == ContinuationOutput::Finished { self.state = ContinuationState::NotReady; } ret } fn reusable(&self) -> bool { self.state == ContinuationState::NotReady } } impl Drop for Continuation { fn drop(&mut self) { if self.reusable() { let ret = self.resume_with_input(ContinuationInput::Exit); debug_assert_eq!(ret, ContinuationOutput::Exited); } } } pub(crate) struct ContinuationPool { continuations: Rc<RefCell<VecDeque<Continuation>>>, } impl ContinuationPool { pub fn new() -> Self { Self { continuations: Rc::new(RefCell::new(VecDeque::new())), } } pub fn acquire(stack_size: usize) -> PooledContinuation { if CONTINUATION_POOL.is_set() { CONTINUATION_POOL.with(|p| p.acquire_inner(stack_size)) } else { let p = Self::new(); p.acquire_inner(stack_size) } } fn acquire_inner(&self, stack_size: usize) -> PooledContinuation { let continuation = self .continuations .borrow_mut() .pop_front() .unwrap_or_else(move || Continuation::new(stack_size)); PooledContinuation { continuation: Some(continuation), queue: self.continuations.clone(), } } } impl Drop for ContinuationPool { fn drop(&mut self) { for c in self.continuations.borrow_mut().iter_mut() { c.state = ContinuationState::Running; } } } pub(crate) struct PooledContinuation { continuation: Option<Continuation>, queue: Rc<RefCell<VecDeque<Continuation>>>, } impl Drop for PooledContinuation { fn drop(&mut self) { let c = self.continuation.take().unwrap(); if c.reusable() { self.queue.borrow_mut().push_back(c); } } } impl Deref for PooledContinuation { type Target = Continuation; fn deref(&self) -> &Self::Target { self.continuation.as_ref().unwrap() } } impl DerefMut for PooledContinuation { fn deref_mut(&mut self) -> &mut Self::Target { self.continuation.as_mut().unwrap() } } impl std::fmt::Debug for PooledContinuation { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.debug_struct("PooledContinuation").finish() } } unsafe impl Send for PooledContinuation {} pub(crate) fn switch() { if ExecutionState::maybe_yield() { let r = generator::yield_(ContinuationOutput::Yielded).unwrap(); assert!(matches!(r, ContinuationInput::Resume)); } } #[cfg(test)] mod tests { use super::*; use crate::Config; #[test] fn reusable_continuation_drop() { let pool = ContinuationPool::new(); let config: Config = Default::default(); let mut c = pool.acquire_inner(config.stack_size); c.initialize(Box::new(|| { let _ = 1 + 1; })); let r = c.resume(); assert!(r, "continuation only has one step"); drop(c); assert_eq!( pool.continuations.borrow().len(), 1, "continuation should be reusable because the function finished" ); let mut c = pool.acquire_inner(config.stack_size); c.initialize(Box::new(|| { generator::yield_with(ContinuationOutput::Yielded); let _ = 1 + 1; })); let r = c.resume(); assert!(!r, "continuation yields once, shouldn't be finished yet"); drop(c); assert_eq!( pool.continuations.borrow().len(), 0, "continuation should not be reusable because the function wasn't finished" ); let c = pool.acquire_inner(config.stack_size); drop(pool); drop(c); } }
#![allow(deprecated)] use crate::runtime::execution::ExecutionState; use generator::{Generator, Gn}; use scoped_tls::scoped_thread_local; use std::cell::{Cell, RefCell}; use std::collections::VecDeque; use std::ops::Deref; use std::ops::DerefMut; use std::rc::Rc; scoped_thread_local! { pub(crate) static CONTINUATION_POOL: ContinuationPool } pub(crate) struct Continuation { generator: Generator<'static, ContinuationInput, ContinuationOutput>, function: ContinuationFunction, state: ContinuationState, } #[allow(clippy::type_complexity)] #[derive(Clone)] struct ContinuationFunction(Rc<Cell<Option<Box<dyn FnOnce() + Send>>>>); unsafe impl Send for ContinuationFunction {} #[derive(Debug, PartialEq, Eq, Clone, Copy)] enum ContinuationInput { Resume, Exit, } #[derive(Debug, PartialEq, Eq, Clone, Copy)] enum ContinuationOutput { Yielded, Finished, Exited, } #[derive(Debug, PartialEq, Eq, Clone, Copy)] enum ContinuationState { NotReady, Ready, Running, } impl Continuation { pub fn new(stack_size: usize) -> Self { let function = ContinuationFunction(Rc::new(Cell::new(None))); let mut gen = { let function = function.clone(); Gn::new_opt(stack_size, move || { loop { match generator::yield_(ContinuationOutput::Finished) { None | Some(ContinuationInput::Exit) => break, _ => (), } let f = function.0.take().expect("must have a function to run"); f(); } ContinuationOutput::Exited }) }; let ret = gen.resume().unwrap(); debug_assert_eq!(ret, ContinuationOutput::Finished); Self { generator: gen, function, state: ContinuationState::NotReady, } }
pub fn resume(&mut self) -> bool { debug_assert!(self.state == ContinuationState::Ready || self.state == ContinuationState::Running); let ret = self.resume_with_input(ContinuationInput::Resume); debug_assert_ne!( ret, ContinuationOutput::Exited, "continuation should not exit if resumed from user code" ); ret == ContinuationOutput::Finished } fn resume_with_input(&mut self, input: ContinuationInput) -> ContinuationOutput { self.generator.set_para(input); let ret = self.generator.resume().unwrap(); if ret == ContinuationOutput::Finished { self.state = ContinuationState::NotReady; } ret } fn reusable(&self) -> bool { self.state == ContinuationState::NotReady } } impl Drop for Continuation { fn drop(&mut self) { if self.reusable() { let ret = self.resume_with_input(ContinuationInput::Exit); debug_assert_eq!(ret, ContinuationOutput::Exited); } } } pub(crate) struct ContinuationPool { continuations: Rc<RefCell<VecDeque<Continuation>>>, } impl ContinuationPool { pub fn new() -> Self { Self { continuations: Rc::new(RefCell::new(VecDeque::new())), } } pub fn acquire(stack_size: usize) -> PooledContinuation { if CONTINUATION_POOL.is_set() { CONTINUATION_POOL.with(|p| p.acquire_inner(stack_size)) } else { let p = Self::new(); p.acquire_inner(stack_size) } } fn acquire_inner(&self, stack_size: usize) -> PooledContinuation { let continuation = self .continuations .borrow_mut() .pop_front() .unwrap_or_else(move || Continuation::new(stack_size)); PooledContinuation { continuation: Some(continuation), queue: self.continuations.clone(), } } } impl Drop for ContinuationPool { fn drop(&mut self) { for c in self.continuations.borrow_mut().iter_mut() { c.state = ContinuationState::Running; } } } pub(crate) struct PooledContinuation { continuation: Option<Continuation>, queue: Rc<RefCell<VecDeque<Continuation>>>, } impl Drop for PooledContinuation { fn drop(&mut self) { let c = self.continuation.take().unwrap(); if c.reusable() { self.queue.borrow_mut().push_back(c); } } } impl Deref for PooledContinuation { type Target = Continuation; fn deref(&self) -> &Self::Target { self.continuation.as_ref().unwrap() } } impl DerefMut for PooledContinuation { fn deref_mut(&mut self) -> &mut Self::Target { self.continuation.as_mut().unwrap() } } impl std::fmt::Debug for PooledContinuation { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.debug_struct("PooledContinuation").finish() } } unsafe impl Send for PooledContinuation {} pub(crate) fn switch() { if ExecutionState::maybe_yield() { let r = generator::yield_(ContinuationOutput::Yielded).unwrap(); assert!(matches!(r, ContinuationInput::Resume)); } } #[cfg(test)] mod tests { use super::*; use crate::Config; #[test] fn reusable_continuation_drop() { let pool = ContinuationPool::new(); let config: Config = Default::default(); let mut c = pool.acquire_inner(config.stack_size); c.initialize(Box::new(|| { let _ = 1 + 1; })); let r = c.resume(); assert!(r, "continuation only has one step"); drop(c); assert_eq!( pool.continuations.borrow().len(), 1, "continuation should be reusable because the function finished" ); let mut c = pool.acquire_inner(config.stack_size); c.initialize(Box::new(|| { generator::yield_with(ContinuationOutput::Yielded); let _ = 1 + 1; })); let r = c.resume(); assert!(!r, "continuation yields once, shouldn't be finished yet"); drop(c); assert_eq!( pool.continuations.borrow().len(), 0, "continuation should not be reusable because the function wasn't finished" ); let c = pool.acquire_inner(config.stack_size); drop(pool); drop(c); } }
pub fn initialize(&mut self, fun: Box<dyn FnOnce() + Send>) { debug_assert_eq!( self.state, ContinuationState::NotReady, "shouldn't replace a function before it runs" ); let old = self.function.0.replace(Some(fun)); debug_assert!(old.is_none(), "shouldn't replace a function before it runs"); self.state = ContinuationState::Ready; }
function_block-function_prefix_line
[ { "content": "/// Run the given function under a randomized concurrency scheduler for some number of iterations.\n\n/// Each iteration will run a (potentially) different randomized schedule.\n\npub fn check_random<F>(f: F, iterations: usize)\n\nwhere\n\n F: Fn() + Send + Sync + 'static,\n\n{\n\n use crate::scheduler::RandomScheduler;\n\n\n\n let scheduler = RandomScheduler::new(iterations);\n\n let runner = Runner::new(scheduler, Default::default());\n\n runner.run(f);\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 0, "score": 216016.19799572162 }, { "content": "#[track_caller]\n\npub fn check_clock(f: impl Fn(usize, u32) -> bool) {\n\n for (i, &c) in current::clock().iter().enumerate() {\n\n assert!(\n\n f(i, c),\n\n \"clock {:?} doesn't satisfy predicate at {}\",\n\n current::clock(),\n\n i\n\n );\n\n }\n\n}\n\n\n", "file_path": "tests/basic/clocks.rs", "rank": 1, "score": 215107.49940371205 }, { "content": "/// Run the given function under a PCT concurrency scheduler for some number of iterations at the\n\n/// given depth. Each iteration will run a (potentially) different randomized schedule.\n\npub fn check_pct<F>(f: F, iterations: usize, depth: usize)\n\nwhere\n\n F: Fn() + Send + Sync + 'static,\n\n{\n\n use crate::scheduler::PctScheduler;\n\n\n\n let scheduler = PctScheduler::new(depth, iterations);\n\n let runner = Runner::new(scheduler, Default::default());\n\n runner.run(f);\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 2, "score": 213199.6153677763 }, { "content": "/// Run the given function under a depth-first-search scheduler until all interleavings have been\n\n/// explored (but if the max_iterations bound is provided, stop after that many iterations).\n\npub fn check_dfs<F>(f: F, max_iterations: Option<usize>)\n\nwhere\n\n F: Fn() + Send + Sync + 'static,\n\n{\n\n use crate::scheduler::DfsScheduler;\n\n\n\n let scheduler = DfsScheduler::new(max_iterations, false);\n\n let runner = Runner::new(scheduler, Default::default());\n\n runner.run(f);\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 3, "score": 205098.37431829562 }, { "content": "#[doc(hidden)]\n\npub fn check<F>(f: F)\n\nwhere\n\n F: Fn() + Send + Sync + 'static,\n\n{\n\n use crate::scheduler::RoundRobinScheduler;\n\n\n\n let runner = Runner::new(RoundRobinScheduler::new(), Default::default());\n\n runner.run(f);\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 5, "score": 199701.09882888995 }, { "content": "// Same as `basic`, but with a static Once. Static synchronization primitives should be reset across\n\n// executions, so this test should work exactly the same way.\n\nfn basic_static<F>(num_threads: usize, checker: F)\n\nwhere\n\n F: FnOnce(Box<dyn Fn() + Send + Sync + 'static>),\n\n{\n\n static O: Once = Once::new();\n\n\n\n let initializer = Arc::new(std::sync::Mutex::new(HashSet::new()));\n\n let initializer_clone = Arc::clone(&initializer);\n\n\n\n checker(Box::new(move || {\n\n let counter = Arc::new(AtomicUsize::new(0));\n\n\n\n assert!(!O.is_completed());\n\n\n\n let threads = (0..num_threads)\n\n .map(|_| {\n\n let counter = Arc::clone(&counter);\n\n let initializer = Arc::clone(&initializer);\n\n thread::spawn(move || {\n\n O.call_once(|| {\n", "file_path": "tests/basic/once.rs", "rank": 6, "score": 187867.63988153174 }, { "content": "#[derive(Default)]\n\nstruct State {\n\n value: u64,\n\n}\n\n\n\nimpl State {\n\n fn foo(&self) -> bool {\n\n self.value > 0\n\n }\n\n\n\n fn bar(&self) -> u64 {\n\n self.value\n\n }\n\n\n\n fn update(&mut self) {\n\n self.value += 1;\n\n }\n\n}\n\n\n\n// #[tokio::main(worker_threads = 2)]\n\nasync fn main() {\n", "file_path": "tests/demo/async_match_deadlock.rs", "rank": 7, "score": 183223.8223299997 }, { "content": "/// Run the given function according to a given encoded schedule, usually produced as the output of\n\n/// a failing Shuttle test case.\n\n///\n\n/// This function allows deterministic replay of a failing schedule, as long as `f` contains no\n\n/// non-determinism other than that introduced by scheduling.\n\n///\n\n/// This is a convenience function for constructing a [`Runner`] that uses\n\n/// [`ReplayScheduler::new_from_encoded`](scheduler::ReplayScheduler::new_from_encoded).\n\npub fn replay<F>(f: F, encoded_schedule: &str)\n\nwhere\n\n F: Fn() + Send + Sync + 'static,\n\n{\n\n use crate::scheduler::ReplayScheduler;\n\n\n\n let scheduler = ReplayScheduler::new_from_encoded(encoded_schedule);\n\n let runner = Runner::new(scheduler, Default::default());\n\n runner.run(f);\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 8, "score": 182047.56347491243 }, { "content": "/// Run a future to completion on the current thread.\n\npub fn block_on<F: Future>(future: F) -> F::Output {\n\n let mut future = Box::pin(future);\n\n let waker = ExecutionState::with(|state| state.current_mut().waker());\n\n let cx = &mut Context::from_waker(&waker);\n\n\n\n thread::switch();\n\n\n\n loop {\n\n match future.as_mut().poll(cx) {\n\n Poll::Ready(result) => break result,\n\n Poll::Pending => {\n\n ExecutionState::with(|state| state.current_mut().sleep_unless_woken());\n\n }\n\n }\n\n\n\n thread::switch();\n\n }\n\n}\n\n\n\n/// Yields execution back to the scheduler.\n", "file_path": "src/future/mod.rs", "rank": 9, "score": 178798.82577829357 }, { "content": "/// Run the given function according to a schedule saved in the given file, usually produced as the\n\n/// output of a failing Shuttle test case.\n\n///\n\n/// This function allows deterministic replay of a failing schedule, as long as `f` contains no\n\n/// non-determinism other than that introduced by scheduling.\n\n///\n\n/// This is a convenience function for constructing a [`Runner`] that uses\n\n/// [`ReplayScheduler::new_from_file`](scheduler::ReplayScheduler::new_from_file).\n\npub fn replay_from_file<F, P>(f: F, path: P)\n\nwhere\n\n F: Fn() + Send + Sync + 'static,\n\n P: AsRef<std::path::Path>,\n\n{\n\n use crate::scheduler::ReplayScheduler;\n\n\n\n let scheduler = ReplayScheduler::new_from_file(path).expect(\"could not load schedule from file\");\n\n let runner = Runner::new(scheduler, Default::default());\n\n runner.run(f);\n\n}\n\n\n\n/// Declare a new thread local storage key of type [`LocalKey`](crate::thread::LocalKey).\n\n#[macro_export]\n\nmacro_rules! thread_local {\n\n // empty (base case for the recursion)\n\n () => {};\n\n\n\n // process multiple declarations with a const initializer\n\n ($(#[$attr:meta])* $vis:vis static $name:ident: $t:ty = const { $init:expr }; $($rest:tt)*) => (\n", "file_path": "src/lib.rs", "rank": 10, "score": 175772.37021943793 }, { "content": "/// Spawn a new thread, returning a JoinHandle for it.\n\n///\n\n/// The join handle can be used (via the `join` method) to block until the child thread has\n\n/// finished.\n\npub fn spawn<F, T>(f: F) -> JoinHandle<T>\n\nwhere\n\n F: FnOnce() -> T,\n\n F: Send + 'static,\n\n T: Send + 'static,\n\n{\n\n spawn_named(f, None, None)\n\n}\n\n\n", "file_path": "src/thread.rs", "rank": 11, "score": 175768.94993804678 }, { "content": "/// A toy benchmark that runs a bunch of threads that just increment a counter\n\nfn counter_sync(scheduler: impl Scheduler + 'static) {\n\n let runner = Runner::new(scheduler, Default::default());\n\n runner.run(|| {\n\n let counter = Arc::new(AtomicUsize::new(0usize));\n\n\n\n let tasks: Vec<_> = (0..NUM_TASKS)\n\n .map(|_| {\n\n let counter = Arc::clone(&counter);\n\n thread::spawn(move || {\n\n counter.fetch_add(1, Ordering::SeqCst);\n\n })\n\n })\n\n .collect();\n\n\n\n for t in tasks {\n\n t.join().unwrap();\n\n }\n\n });\n\n}\n\n\n", "file_path": "benches/counter.rs", "rank": 12, "score": 167782.976498603 }, { "content": "/// A toy benchmark that runs a bunch of tasks that just increment a counter\n\nfn counter_async(scheduler: impl Scheduler + 'static) {\n\n let runner = Runner::new(scheduler, Default::default());\n\n runner.run(|| {\n\n let counter = Arc::new(AtomicUsize::new(0usize));\n\n\n\n let tasks: Vec<_> = (0..NUM_TASKS)\n\n .map(|_| {\n\n let counter = Arc::clone(&counter);\n\n future::spawn(async move {\n\n counter.fetch_add(1, Ordering::SeqCst);\n\n })\n\n })\n\n .collect();\n\n\n\n future::block_on(async move {\n\n for t in tasks {\n\n t.await.unwrap();\n\n }\n\n });\n\n });\n\n}\n\n\n", "file_path": "benches/counter.rs", "rank": 13, "score": 167782.976498603 }, { "content": "pub fn bounded_buffer_benchmark(c: &mut Criterion) {\n\n let mut g = c.benchmark_group(\"buffer\");\n\n g.throughput(Throughput::Elements(ITERATIONS as u64));\n\n\n\n g.bench_function(\"pct\", |b| {\n\n b.iter(|| {\n\n let scheduler = PctScheduler::new_from_seed(0x12345678, 2, ITERATIONS);\n\n bounded_buffer_check(scheduler);\n\n });\n\n });\n\n\n\n g.bench_function(\"random\", |b| {\n\n b.iter(|| {\n\n let scheduler = RandomScheduler::new_from_seed(0x12345678, ITERATIONS);\n\n bounded_buffer_check(scheduler);\n\n });\n\n });\n\n}\n\n\n\ncriterion_group!(benches, bounded_buffer_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "benches/buffer.rs", "rank": 14, "score": 167219.9786317374 }, { "content": "pub fn basic_lock_benchmark(c: &mut Criterion) {\n\n const ITERATIONS: usize = 1000;\n\n\n\n let mut g = c.benchmark_group(\"lock\");\n\n g.throughput(Throughput::Elements(ITERATIONS as u64));\n\n\n\n g.bench_function(\"pct\", |b| {\n\n b.iter(|| {\n\n let scheduler = PctScheduler::new_from_seed(0x12345678, 2, ITERATIONS);\n\n basic_lock_check(scheduler);\n\n });\n\n });\n\n\n\n g.bench_function(\"random\", |b| {\n\n b.iter(|| {\n\n let scheduler = RandomScheduler::new_from_seed(0x12345678, ITERATIONS);\n\n basic_lock_check(scheduler);\n\n });\n\n });\n\n}\n\n\n\ncriterion_group!(benches, basic_lock_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "benches/lock.rs", "rank": 15, "score": 167219.9786317374 }, { "content": "pub fn counter_async_benchmark(c: &mut Criterion) {\n\n let mut g = c.benchmark_group(\"counter async\");\n\n g.throughput(Throughput::Elements(ITERATIONS as u64));\n\n\n\n g.bench_function(\"pct\", |b| {\n\n b.iter(|| {\n\n let scheduler = PctScheduler::new_from_seed(0x12345678, 2, ITERATIONS);\n\n counter_async(scheduler);\n\n });\n\n });\n\n\n\n g.bench_function(\"random\", |b| {\n\n b.iter(|| {\n\n let scheduler = RandomScheduler::new_from_seed(0x12345678, ITERATIONS);\n\n counter_async(scheduler);\n\n });\n\n });\n\n}\n\n\n", "file_path": "benches/counter.rs", "rank": 16, "score": 167219.9786317374 }, { "content": "pub fn counter_sync_benchmark(c: &mut Criterion) {\n\n let mut g = c.benchmark_group(\"counter sync\");\n\n g.throughput(Throughput::Elements(ITERATIONS as u64));\n\n\n\n g.bench_function(\"pct\", |b| {\n\n b.iter(|| {\n\n let scheduler = PctScheduler::new_from_seed(0x12345678, 2, ITERATIONS);\n\n counter_sync(scheduler);\n\n });\n\n });\n\n\n\n g.bench_function(\"random\", |b| {\n\n b.iter(|| {\n\n let scheduler = RandomScheduler::new_from_seed(0x12345678, ITERATIONS);\n\n counter_sync(scheduler);\n\n });\n\n });\n\n}\n\n\n\ncriterion_group!(benches, counter_async_benchmark, counter_sync_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "benches/counter.rs", "rank": 17, "score": 167219.9786317374 }, { "content": "/// Spawn a new async task that the executor will run to completion.\n\npub fn spawn<T, F>(fut: F) -> JoinHandle<T>\n\nwhere\n\n F: Future<Output = T> + Send + 'static,\n\n T: Send + 'static,\n\n{\n\n let result = std::sync::Arc::new(std::sync::Mutex::new(None));\n\n let stack_size = ExecutionState::with(|s| s.config.stack_size);\n\n let task_id = ExecutionState::spawn_future(Wrapper::new(fut, std::sync::Arc::clone(&result)), stack_size, None);\n\n\n\n thread::switch();\n\n\n\n JoinHandle { task_id, result }\n\n}\n\n\n\n/// An owned permission to join on an async task (await its termination).\n\n#[derive(Debug)]\n\npub struct JoinHandle<T> {\n\n task_id: TaskId,\n\n result: std::sync::Arc<std::sync::Mutex<Option<Result<T, JoinError>>>>,\n\n}\n", "file_path": "src/future/mod.rs", "rank": 18, "score": 164752.05945511174 }, { "content": "/// A simple benchmark that just runs 3 threads incrementing a lock a bunch of times. This is a\n\n/// stress test of our `Execution` logic, since the threads spend basically all their time taking\n\n/// and dropping the lock.\n\nfn basic_lock_check(scheduler: impl Scheduler + 'static) {\n\n const INNER_ITERATIONS: usize = 200;\n\n\n\n let runner = Runner::new(scheduler, Default::default());\n\n runner.run(|| {\n\n let lock = Arc::new(Mutex::new(0usize));\n\n\n\n let thds: Vec<_> = (0..3)\n\n .map(|_| {\n\n let lock = Arc::clone(&lock);\n\n thread::spawn(move || {\n\n for _ in 0..INNER_ITERATIONS {\n\n *lock.lock().unwrap() += 1;\n\n }\n\n })\n\n })\n\n .collect();\n\n\n\n for thd in thds {\n\n thd.join().unwrap();\n\n }\n\n });\n\n}\n\n\n", "file_path": "benches/lock.rs", "rank": 19, "score": 164639.88805100537 }, { "content": "/// An implementation of a bounded concurrent queue, minus the actual queue part. Producers wait\n\n/// until there's space in the queue, and then put their object in. Consumers wait until the queue\n\n/// is non-empty, and then consume something from the queue.\n\nfn bounded_buffer_check(scheduler: impl Scheduler + 'static) {\n\n let runner = Runner::new(scheduler, Default::default());\n\n\n\n runner.run(move || {\n\n let lock = Arc::new(Mutex::new(()));\n\n let has_space = Arc::new(Condvar::new()); // count < MAX_QUEUE_SIZE\n\n let has_elements = Arc::new(Condvar::new()); // count > 0\n\n let count = Arc::new(AtomicUsize::new(0));\n\n\n\n let consumers = (0..NUM_CONSUMERS)\n\n .map(|_| {\n\n let lock = Arc::clone(&lock);\n\n let has_space = Arc::clone(&has_space);\n\n let has_elements = Arc::clone(&has_elements);\n\n let count = Arc::clone(&count);\n\n thread::spawn(move || {\n\n let events = NUM_EVENTS / NUM_CONSUMERS;\n\n for _ in 0..events {\n\n let mut guard = lock.lock().unwrap();\n\n while count.load(Ordering::SeqCst) == 0 {\n", "file_path": "benches/buffer.rs", "rank": 20, "score": 164635.8705409352 }, { "content": "/// Emits a machine instruction to signal the processor that it is running in a busy-wait spin-loop\n\n/// (“spin lock”).\n\npub fn spin_loop() {\n\n // Probably not necessary, but let's emit it just in case\n\n std::hint::spin_loop();\n\n\n\n thread::yield_now();\n\n}\n", "file_path": "src/hint.rs", "rank": 21, "score": 164511.2509518337 }, { "content": "/// Cooperatively gives up a timeslice to the Shuttle scheduler.\n\n///\n\n/// Some Shuttle schedulers use this as a hint to deprioritize the current thread in order for other\n\n/// threads to make progress (e.g., in a spin loop).\n\npub fn yield_now() {\n\n let waker = ExecutionState::with(|state| state.current().waker());\n\n waker.wake_by_ref();\n\n ExecutionState::request_yield();\n\n thread::switch();\n\n}\n\n\n", "file_path": "src/thread.rs", "rank": 22, "score": 164466.11647148358 }, { "content": "// Check that PCT correctly deprioritizes a yielding thread. If it wasn't, there would be some\n\n// iteration of this test where the yielding thread has the highest priority and so the others\n\n// never make progress.\n\nfn yield_spin_loop(use_yield: bool) {\n\n const NUM_THREADS: usize = 4;\n\n\n\n let scheduler = PctScheduler::new(1, 100);\n\n let mut config = Config::new();\n\n config.max_steps = MaxSteps::FailAfter(50);\n\n let runner = Runner::new(scheduler, config);\n\n runner.run(move || {\n\n let count = Arc::new(AtomicUsize::new(0usize));\n\n\n\n let _thds = (0..NUM_THREADS)\n\n .map(|_| {\n\n let count = count.clone();\n\n thread::spawn(move || {\n\n count.fetch_add(1, Ordering::SeqCst);\n\n })\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n while count.load(Ordering::SeqCst) < NUM_THREADS {\n\n if use_yield {\n\n thread::yield_now();\n\n } else {\n\n thread::sleep(Duration::from_millis(1));\n\n }\n\n }\n\n });\n\n}\n\n\n", "file_path": "tests/basic/pct.rs", "rank": 23, "score": 162676.62419345533 }, { "content": "// Check that PCT correctly deprioritizes a yielding task. If it wasn't, there would be some\n\n// iteration of this test where the yielding task has the highest priority and so the others\n\n// never make progress.\n\nfn yield_spin_loop(use_yield: bool) {\n\n const NUM_TASKS: usize = 4;\n\n\n\n let scheduler = PctScheduler::new(1, 100);\n\n let mut config = Config::new();\n\n config.max_steps = MaxSteps::FailAfter(50);\n\n let runner = Runner::new(scheduler, config);\n\n runner.run(move || {\n\n let count = Arc::new(AtomicUsize::new(0usize));\n\n\n\n let _thds = (0..NUM_TASKS)\n\n .map(|_| {\n\n let count = count.clone();\n\n future::spawn(async move {\n\n count.fetch_add(1, Ordering::SeqCst);\n\n })\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n future::block_on(async move {\n", "file_path": "tests/future/pct.rs", "rank": 24, "score": 162676.62419345533 }, { "content": "pub fn me() -> usize {\n\n usize::from(thread::current().id())\n\n}\n\n\n", "file_path": "tests/basic/clocks.rs", "rank": 25, "score": 159896.0817271629 }, { "content": "fn basic<F>(num_threads: usize, checker: F)\n\nwhere\n\n F: FnOnce(Box<dyn Fn() + Send + Sync + 'static>),\n\n{\n\n let initializer = Arc::new(std::sync::Mutex::new(HashSet::new()));\n\n let initializer_clone = Arc::clone(&initializer);\n\n\n\n checker(Box::new(move || {\n\n let once = Arc::new(Once::new());\n\n let counter = Arc::new(AtomicUsize::new(0));\n\n\n\n assert!(!once.is_completed());\n\n\n\n let threads = (0..num_threads)\n\n .map(|_| {\n\n let once = Arc::clone(&once);\n\n let counter = Arc::clone(&counter);\n\n let initializer = Arc::clone(&initializer);\n\n thread::spawn(move || {\n\n once.call_once(|| {\n", "file_path": "tests/basic/once.rs", "rank": 27, "score": 157611.60702462928 }, { "content": "/// The number of context switches that happened so far in the current Shuttle execution.\n\n///\n\n/// Note that this is the number of *possible* context switches, i.e., including times when the\n\n/// scheduler decided to continue with the same task. This means the result can be used as a\n\n/// timestamp for atomic actions during an execution.\n\n///\n\n/// Panics if called outside of a Shuttle execution.\n\npub fn context_switches() -> usize {\n\n ExecutionState::context_switches()\n\n}\n\n\n", "file_path": "src/current.rs", "rank": 30, "score": 156095.12946452567 }, { "content": "/// Validate that we see every permutation of message orderings at the receiver of the mpsc channel\n\nfn mpsc_stream_permutations<Tx, Rx, F>(num_tasks: usize, creator: F)\n\nwhere\n\n F: Fn() -> (Tx, Rx) + Send + Sync + 'static,\n\n Tx: Sink<usize> + Clone + Unpin + Send + 'static,\n\n Tx::Error: std::fmt::Debug,\n\n Rx: Stream<Item = usize> + Send + 'static,\n\n{\n\n let permutations = std::sync::Arc::new(std::sync::Mutex::new(HashSet::new()));\n\n let permutations_clone = permutations.clone();\n\n\n\n check_dfs(\n\n move || {\n\n let (tx, rx) = creator();\n\n\n\n for i in 0..num_tasks {\n\n let mut tx = tx.clone();\n\n future::spawn(async move {\n\n tx.send(i).await.expect(\"send should succeed\");\n\n });\n\n }\n", "file_path": "tests/future/channel.rs", "rank": 31, "score": 140084.93028915502 }, { "content": "fn mpsc_stream_sum<Tx, Rx, F>(num_tasks: usize, creator: F)\n\nwhere\n\n F: Fn() -> (Tx, Rx),\n\n Tx: Sink<usize> + Clone + Unpin + Send + 'static,\n\n Tx::Error: std::fmt::Debug,\n\n Rx: Stream<Item = usize> + Send + 'static,\n\n{\n\n let (tx, rx) = creator();\n\n\n\n for i in 0..num_tasks {\n\n let mut tx = tx.clone();\n\n future::spawn(async move {\n\n tx.send(i).await.expect(\"send should succeed\");\n\n });\n\n }\n\n\n\n // A channel's stream terminates when all senders are dropped. Every task has its own clone of\n\n // the sender, so we need to drop the original one.\n\n drop(tx);\n\n\n\n future::block_on(async move {\n\n let stream = rx.fold(0, |acc, x| async move { acc + x });\n\n let result = stream.await;\n\n assert_eq!(result, num_tasks * (num_tasks - 1) / 2);\n\n })\n\n}\n\n\n", "file_path": "tests/future/channel.rs", "rank": 32, "score": 140084.93028915502 }, { "content": "// We wrap a task returning a value inside a wrapper task that returns (). The wrapper\n\n// contains a mutex-wrapped field that stores the value where it can be passed to a task\n\n// waiting on the join handle.\n\nstruct Wrapper<T, F> {\n\n future: Pin<Box<F>>,\n\n result: std::sync::Arc<std::sync::Mutex<Option<Result<T, JoinError>>>>,\n\n}\n\n\n\nimpl<T, F> Wrapper<T, F>\n\nwhere\n\n F: Future<Output = T> + Send + 'static,\n\n{\n\n fn new(future: F, result: std::sync::Arc<std::sync::Mutex<Option<Result<T, JoinError>>>>) -> Self {\n\n Self {\n\n future: Box::pin(future),\n\n result,\n\n }\n\n }\n\n}\n\n\n\nimpl<T, F> Future for Wrapper<T, F>\n\nwhere\n\n F: Future<Output = T> + Send + 'static,\n", "file_path": "src/future/mod.rs", "rank": 33, "score": 132908.5325031607 }, { "content": "fn spawn_named<F, T>(f: F, name: Option<String>, stack_size: Option<usize>) -> JoinHandle<T>\n\nwhere\n\n F: FnOnce() -> T,\n\n F: Send + 'static,\n\n T: Send + 'static,\n\n{\n\n // TODO Check if it's worth avoiding the call to `ExecutionState::config()` if we're going\n\n // TODO to use an existing continuation from the pool.\n\n let stack_size = stack_size.unwrap_or_else(|| ExecutionState::with(|s| s.config.stack_size));\n\n let result = std::sync::Arc::new(std::sync::Mutex::new(None));\n\n let task_id = {\n\n let result = std::sync::Arc::clone(&result);\n\n let f = move || thread_fn(f, result);\n\n ExecutionState::spawn_thread(f, stack_size, name.clone(), None)\n\n };\n\n\n\n thread::switch();\n\n\n\n let thread = Thread {\n\n id: ThreadId { task_id },\n", "file_path": "src/thread.rs", "rank": 34, "score": 131180.93124127015 }, { "content": "fn two_workers<F>(signal_thread: F)\n\nwhere\n\n F: Fn(Arc<Condvar>),\n\n{\n\n let lock = Arc::new(Mutex::new(false));\n\n let cond = Arc::new(Condvar::new());\n\n\n\n for _ in 0..2 {\n\n let lock = Arc::clone(&lock);\n\n let cond = Arc::clone(&cond);\n\n thread::spawn(move || {\n\n let mut guard = lock.lock().unwrap();\n\n while !*guard {\n\n guard = cond.wait(guard).unwrap();\n\n }\n\n });\n\n }\n\n\n\n *lock.lock().unwrap() = true;\n\n signal_thread(cond);\n\n}\n\n\n", "file_path": "tests/basic/condvar.rs", "rank": 35, "score": 129572.96240833172 }, { "content": "/// Blocks unless or until the current thread's token is made available.\n\npub fn park() {\n\n let switch = ExecutionState::with(|s| s.current_mut().park());\n\n\n\n // We only need to context switch if the park token was unavailable. If it was available, then\n\n // any execution reachable by context switching here would also be reachable by having not\n\n // chosen this thread at the last context switch, because the park state of a thread is only\n\n // observable by the thread itself.\n\n if switch {\n\n thread::switch();\n\n }\n\n}\n\n\n", "file_path": "src/thread.rs", "rank": 36, "score": 128380.01295140595 }, { "content": "#[test]\n\nfn yield_spin_loop_fair() {\n\n yield_spin_loop(true);\n\n}\n\n\n", "file_path": "tests/basic/pct.rs", "rank": 37, "score": 127813.61471537196 }, { "content": "#[test]\n\n#[should_panic(expected = \"exceeded max_steps bound\")]\n\nfn yield_spin_loop_unfair() {\n\n yield_spin_loop(false);\n\n}\n\n\n\n#[test]\n\n#[should_panic(expected = \"null dereference\")]\n", "file_path": "tests/basic/pct.rs", "rank": 38, "score": 127813.61471537196 }, { "content": "#[test]\n\nfn yield_spin_loop_fair() {\n\n yield_spin_loop(true);\n\n}\n\n\n", "file_path": "tests/future/pct.rs", "rank": 39, "score": 127813.61471537196 }, { "content": "#[test]\n\nfn yield_loop_three_threads() {\n\n let iterations = Arc::new(AtomicUsize::new(0));\n\n\n\n {\n\n let counter = Arc::clone(&iterations);\n\n let scheduler = DfsScheduler::new(None, false);\n\n let runner = Runner::new(scheduler, Default::default());\n\n runner.run(move || {\n\n counter.fetch_add(1, Ordering::SeqCst);\n\n\n\n thread::spawn(|| {\n\n for _ in 0..3 {\n\n thread::yield_now();\n\n }\n\n });\n\n\n\n thread::spawn(|| {\n\n for _ in 0..3 {\n\n thread::yield_now();\n\n }\n", "file_path": "tests/basic/dfs.rs", "rank": 40, "score": 127813.61471537196 }, { "content": "#[test]\n\nfn yield_loop_one_thread() {\n\n let iterations = Arc::new(AtomicUsize::new(0));\n\n\n\n {\n\n let counter = Arc::clone(&iterations);\n\n let scheduler = DfsScheduler::new(None, false);\n\n let runner = Runner::new(scheduler, Default::default());\n\n runner.run(move || {\n\n counter.fetch_add(1, Ordering::SeqCst);\n\n\n\n thread::spawn(|| {\n\n for _ in 0..4 {\n\n thread::yield_now();\n\n }\n\n });\n\n\n\n // no-op\n\n });\n\n }\n\n\n\n // 6 places we can run thread 0: before thread 1 starts, before each of the 4 yields, or last\n\n assert_eq!(iterations.load(Ordering::SeqCst), 6);\n\n}\n\n\n", "file_path": "tests/basic/dfs.rs", "rank": 41, "score": 127813.61471537196 }, { "content": "#[test]\n\n#[should_panic(expected = \"exceeded max_steps bound\")]\n\nfn yield_spin_loop_unfair() {\n\n yield_spin_loop(false);\n\n}\n", "file_path": "tests/future/pct.rs", "rank": 42, "score": 127813.61471537196 }, { "content": "#[test]\n\nfn yield_loop_two_threads() {\n\n let iterations = Arc::new(AtomicUsize::new(0));\n\n\n\n {\n\n let counter = Arc::clone(&iterations);\n\n let scheduler = DfsScheduler::new(None, false);\n\n let runner = Runner::new(scheduler, Default::default());\n\n runner.run(move || {\n\n counter.fetch_add(1, Ordering::SeqCst);\n\n\n\n thread::spawn(|| {\n\n for _ in 0..4 {\n\n thread::yield_now();\n\n }\n\n });\n\n\n\n for _ in 0..4 {\n\n thread::yield_now();\n\n }\n\n });\n\n }\n\n\n\n // 2 threads, 5 operations each (thread start + 4 yields)\n\n // 2*5 choose 5 = 252\n\n assert_eq!(iterations.load(Ordering::SeqCst), 252);\n\n}\n\n\n", "file_path": "tests/basic/dfs.rs", "rank": 43, "score": 127813.61471537196 }, { "content": "#[test]\n\nfn yield_loop_max_depth() {\n\n let iterations = Arc::new(AtomicUsize::new(0));\n\n\n\n {\n\n let counter = Arc::clone(&iterations);\n\n let scheduler = DfsScheduler::new(None, false);\n\n let runner = Runner::new(scheduler, max_steps(20));\n\n runner.run(move || {\n\n for _ in 0..100 {\n\n counter.fetch_add(1, Ordering::SeqCst);\n\n thread::yield_now();\n\n }\n\n });\n\n }\n\n\n\n assert_eq!(iterations.load(Ordering::SeqCst), 20);\n\n}\n", "file_path": "tests/basic/dfs.rs", "rank": 44, "score": 127813.61471537196 }, { "content": "#[test]\n\nfn yield_loop_two_threads_bounded() {\n\n let iterations = Arc::new(AtomicUsize::new(0));\n\n\n\n {\n\n let counter = Arc::clone(&iterations);\n\n let scheduler = DfsScheduler::new(Some(100), false);\n\n let runner = Runner::new(scheduler, Default::default());\n\n runner.run(move || {\n\n counter.fetch_add(1, Ordering::SeqCst);\n\n\n\n thread::spawn(|| {\n\n for _ in 0..4 {\n\n thread::yield_now();\n\n }\n\n });\n\n\n\n for _ in 0..4 {\n\n thread::yield_now();\n\n }\n\n });\n\n }\n\n\n\n assert_eq!(iterations.load(Ordering::SeqCst), 100);\n\n}\n\n\n", "file_path": "tests/basic/dfs.rs", "rank": 45, "score": 124470.36923469327 }, { "content": "/// Get a handle to the thread that invokes it\n\npub fn current() -> Thread {\n\n let (task_id, name) = ExecutionState::with(|s| {\n\n let me = s.current();\n\n (me.id(), me.name())\n\n });\n\n\n\n Thread {\n\n id: ThreadId { task_id },\n\n name,\n\n }\n\n}\n\n\n", "file_path": "src/thread.rs", "rank": 46, "score": 121197.74968958407 }, { "content": "// Note: `panic_iteration` is 1-indexed because \"iterations\" is a count\n\nfn iterations_test(run_iterations: usize, panic_iteration: usize) {\n\n let metrics = MetricsSubscriber::new();\n\n let _guard = tracing::subscriber::set_default(metrics.clone());\n\n\n\n let iterations = Arc::new(AtomicUsize::new(0));\n\n\n\n let result = catch_unwind(AssertUnwindSafe(|| {\n\n check_random(\n\n move || {\n\n iterations.fetch_add(1, Ordering::SeqCst);\n\n if iterations.load(Ordering::SeqCst) >= panic_iteration {\n\n panic!(\"expected panic\");\n\n }\n\n\n\n thread::spawn(move || {\n\n thread::yield_now();\n\n });\n\n },\n\n run_iterations,\n\n );\n\n }));\n\n\n\n assert_eq!(result.is_err(), panic_iteration <= run_iterations);\n\n assert_eq!(\n\n metrics.iterations.load(Ordering::SeqCst),\n\n run_iterations.min(panic_iteration)\n\n );\n\n}\n\n\n", "file_path": "tests/basic/metrics.rs", "rank": 47, "score": 120322.91002671179 }, { "content": "/// Get the current thread's vector clock\n\npub fn clock() -> VectorClock {\n\n crate::runtime::execution::ExecutionState::with(|state| {\n\n let me = state.current();\n\n state.get_clock(me.id()).clone()\n\n })\n\n}\n\n\n", "file_path": "src/current.rs", "rank": 48, "score": 118690.54312345074 }, { "content": "/// Create a bounded channel\n\npub fn sync_channel<T>(bound: usize) -> (SyncSender<T>, Receiver<T>) {\n\n let channel = Arc::new(Channel::new(Some(bound)));\n\n let sender = SyncSender {\n\n inner: Arc::clone(&channel),\n\n };\n\n let receiver = Receiver {\n\n inner: Arc::clone(&channel),\n\n };\n\n (sender, receiver)\n\n}\n\n\n", "file_path": "src/sync/mpsc.rs", "rank": 49, "score": 116785.33456698933 }, { "content": "/// Validates that schedule replay works by running a test, expecting it to fail, and then parsing\n\n/// and replaying the failing schedule from its output.\n\nfn check_replay_roundtrip<F, S>(test_func: F, scheduler: S)\n\nwhere\n\n F: Fn() + Send + Sync + RefUnwindSafe + 'static,\n\n S: Scheduler + UnwindSafe + 'static,\n\n{\n\n let test_func = Arc::new(test_func);\n\n\n\n // Run the test that should fail and capture the schedule it prints\n\n let result = {\n\n let test_func = test_func.clone();\n\n panic::catch_unwind(move || {\n\n let mut config = Config::new();\n\n config.failure_persistence = FailurePersistence::Print;\n\n let runner = Runner::new(scheduler, config);\n\n runner.run(move || test_func())\n\n })\n\n .expect_err(\"test should panic\")\n\n };\n\n let output = result.downcast::<String>().unwrap();\n\n let schedule = parse_schedule::from_stdout(&output).expect(\"output should contain a schedule\");\n", "file_path": "tests/mod.rs", "rank": 50, "score": 115938.28720515197 }, { "content": "/// A `Once` cell can either be `Running`, in which case a `Mutex` mediates racing threads trying to\n\n/// invoke `call_once`, or `Complete` once an initializer has completed, in which case the `Mutex`\n\n/// is no longer necessary.\n\nenum OnceInitState {\n\n Running(Rc<Mutex<bool>>),\n\n Complete(VectorClock),\n\n}\n\n\n\nimpl std::fmt::Debug for OnceInitState {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n match self {\n\n Self::Running(_) => write!(f, \"Running\"),\n\n Self::Complete(_) => write!(f, \"Complete\"),\n\n }\n\n }\n\n}\n\n\n\nimpl Once {\n\n /// Creates a new `Once` value.\n\n pub const fn new() -> Self {\n\n Self { _dummy: 0 }\n\n }\n\n\n", "file_path": "src/sync/once.rs", "rank": 51, "score": 115803.5958937145 }, { "content": "/// Puts the current thread to sleep for at least the specified amount of time.\n\n// Note that Shuttle does not model time, so this behaves just like a context switch.\n\npub fn sleep(_dur: Duration) {\n\n thread::switch();\n\n}\n\n\n", "file_path": "src/thread.rs", "rank": 52, "score": 114922.65814816294 }, { "content": "/// Validates that schedule replay works by running a test, expecting it to fail, and then parsing\n\n/// and replaying the failing schedule from its output.\n\nfn check_replay_roundtrip_file<F, S>(test_func: F, scheduler: S)\n\nwhere\n\n F: Fn() + Send + Sync + RefUnwindSafe + 'static,\n\n S: Scheduler + UnwindSafe + 'static,\n\n{\n\n let tempdir = tempfile::tempdir().expect(\"could not create tempdir\");\n\n let test_func = Arc::new(test_func);\n\n\n\n // Run the test that should fail and capture the schedule it prints\n\n let result = {\n\n let test_func = test_func.clone();\n\n let tempdir_path = tempdir.path().to_path_buf();\n\n panic::catch_unwind(move || {\n\n let mut config = Config::new();\n\n config.failure_persistence = FailurePersistence::File(Some(tempdir_path));\n\n let runner = Runner::new(scheduler, config);\n\n runner.run(move || test_func())\n\n })\n\n .expect_err(\"test should panic\")\n\n };\n", "file_path": "tests/mod.rs", "rank": 53, "score": 114357.45825928399 }, { "content": "#[derive(Debug)]\n\nstruct BarrierState {\n\n bound: usize,\n\n leader: Option<TaskId>,\n\n waiters: HashSet<TaskId>,\n\n clock: VectorClock,\n\n}\n\n\n\n#[derive(Debug)]\n\n/// A barrier enables multiple threads to synchronize the beginning of some computation.\n\npub struct Barrier {\n\n state: Rc<RefCell<BarrierState>>,\n\n}\n\n\n\nimpl Barrier {\n\n /// Creates a new barrier that can block a given number of threads.\n\n /// A barrier will block n-1 threads which call `wait()` and then wake up all threads\n\n /// at once when the nth thread calls `wait()`.\n\n pub fn new(n: usize) -> Self {\n\n let state = BarrierState {\n\n bound: n,\n", "file_path": "src/sync/barrier.rs", "rank": 54, "score": 112772.83493415231 }, { "content": "#[derive(Debug)]\n\nstruct CondvarState {\n\n waiters: HashMap<TaskId, CondvarWaitStatus>,\n\n next_epoch: usize,\n\n}\n\n\n\n// For tracking causal dependencies, we record the clock C of the thread that does the notify.\n\n// When a thread is unblocked, its clock is updated by C.\n", "file_path": "src/sync/condvar.rs", "rank": 55, "score": 112772.83493415231 }, { "content": "#[derive(Debug)]\n\nstruct MutexState {\n\n holder: Option<TaskId>,\n\n waiters: TaskSet,\n\n clock: VectorClock,\n\n}\n\n\n\nimpl<T> Mutex<T> {\n\n /// Creates a new mutex in an unlocked state ready for use.\n\n pub fn new(value: T) -> Self {\n\n let state = MutexState {\n\n holder: None,\n\n waiters: TaskSet::new(),\n\n clock: VectorClock::new(),\n\n };\n\n\n\n Self {\n\n inner: std::sync::Mutex::new(value),\n\n state: Rc::new(RefCell::new(state)),\n\n }\n\n }\n", "file_path": "src/sync/mutex.rs", "rank": 56, "score": 112772.83493415231 }, { "content": "/// Blocks unless or until the current thread's token is made available or the specified duration\n\n/// has been reached (may wake spuriously).\n\n///\n\n/// Note that Shuttle does not module time, so this behaves identically to `park`. It cannot\n\n/// spuriously wake.\n\npub fn park_timeout(_dur: Duration) {\n\n park();\n\n}\n\n\n\n/// Thread factory, which can be used in order to configure the properties of a new thread.\n\n#[derive(Debug, Default)]\n\npub struct Builder {\n\n name: Option<String>,\n\n stack_size: Option<usize>,\n\n}\n\n\n\nimpl Builder {\n\n /// Generates the base configuration for spawning a thread, from which configuration methods can be chained.\n\n pub fn new() -> Self {\n\n Self {\n\n name: None,\n\n stack_size: None,\n\n }\n\n }\n\n\n", "file_path": "src/thread.rs", "rank": 57, "score": 112578.55161610793 }, { "content": "/// Retrieve the thread-local random number generator, seeded by the system. Intended to be used in\n\n/// method chaining style, e.g. `thread_rng().gen::<i32>()`, or cached locally, e.g.\n\n/// `let mut rng = thread_rng();`.\n\npub fn thread_rng() -> rngs::ThreadRng {\n\n rngs::ThreadRng\n\n}\n\n\n\npub use rand::{Rng, RngCore};\n", "file_path": "src/rand.rs", "rank": 58, "score": 110397.7609842945 }, { "content": "#[derive(Debug, Clone)]\n\nenum PanicHookState {\n\n Disarmed,\n\n Armed(Config),\n\n Persisted(String),\n\n}\n\n\n\nthread_local! {\n\n static PANIC_HOOK: Mutex<PanicHookState> = Mutex::new(PanicHookState::Disarmed);\n\n}\n\n\n\n/// A guard that disarms the panic hook when dropped\n\n#[derive(Debug)]\n\n#[non_exhaustive]\n\npub struct PanicHookGuard;\n\n\n\nimpl Drop for PanicHookGuard {\n\n fn drop(&mut self) {\n\n PANIC_HOOK.with(|lock| *lock.lock().unwrap() = PanicHookState::Disarmed);\n\n }\n\n}\n\n\n\n/// Set up a panic hook that will try to print the current schedule to stderr so that the failure\n\n/// can be replayed. Returns a guard that will disarm the panic hook when dropped.\n\n///\n\n/// See the module documentation for more details on how this method fits into the failure reporting\n\n/// story.\n", "file_path": "src/runtime/failure.rs", "rank": 59, "score": 110394.43633523706 }, { "content": "/// An atomic fence, like the standard library's [std::sync::atomic::fence].\n\npub fn fence(order: Ordering) {\n\n if order == Ordering::Relaxed {\n\n panic!(\"there is no such thing as a relaxed fence\");\n\n }\n\n\n\n maybe_warn_about_ordering(order);\n\n\n\n // SeqCst fences are no-ops in our execution model\n\n}\n\n\n\n// We can just reuse the standard library's compiler fence, as they have no visible run-time\n\n// behavior and so we need neither insert yieldpoints nor warn about non-SeqCst orderings.\n\npub use std::sync::atomic::compiler_fence;\n\n\n\n/// Base type for atomic implementations. This type handles generating the right interleavings for\n\n/// all atomics. The interesting stuff is in `load`, `store`, `swap`, and `fetch_update`; all other\n\n/// atomic operations are implemented in terms of those four primitives.\n", "file_path": "src/sync/atomic/mod.rs", "rank": 60, "score": 110382.13137396771 }, { "content": "#[derive(Debug)]\n\nstruct RwLockState {\n\n holder: RwLockHolder,\n\n waiting_readers: TaskSet,\n\n waiting_writers: TaskSet,\n\n clock: VectorClock,\n\n}\n\n\n", "file_path": "src/sync/rwlock.rs", "rank": 61, "score": 110164.2490269967 }, { "content": "/// Like [`shuttle::future::yield_now`] but doesn't request a yield from the scheduler\n\nstruct UnfairYieldNow {\n\n yielded: bool,\n\n}\n\n\n\nimpl Future for UnfairYieldNow {\n\n type Output = ();\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<()> {\n\n if self.yielded {\n\n return Poll::Ready(());\n\n }\n\n\n\n self.yielded = true;\n\n cx.waker().wake_by_ref();\n\n Poll::Pending\n\n }\n\n}\n\n\n", "file_path": "tests/future/pct.rs", "rank": 62, "score": 110105.11889129721 }, { "content": "// Note: The channels in std::sync::mpsc only support a single Receiver (which cannot be\n\n// cloned). The state below admits a more general use case, where multiple Senders\n\n// and Receivers can share a single channel.\n\nstruct ChannelState<T> {\n\n messages: SmallVec<[TimestampedValue<T>; MAX_INLINE_MESSAGES]>, // messages in the channel\n\n receiver_clock: Option<SmallVec<[VectorClock; MAX_INLINE_MESSAGES]>>, // receiver vector clocks for bounded case\n\n known_senders: usize, // number of senders referencing this channel\n\n known_receivers: usize, // number or receivers referencing this channel\n\n waiting_senders: SmallVec<[TaskId; DEFAULT_INLINE_TASKS]>, // list of currently blocked senders\n\n waiting_receivers: SmallVec<[TaskId; DEFAULT_INLINE_TASKS]>, // list of currently blocked receivers\n\n}\n\n\n\nimpl<T> Debug for ChannelState<T> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"Channel {{ \")?;\n\n write!(f, \"num_messages: {} \", self.messages.len())?;\n\n write!(\n\n f,\n\n \"known_senders {} known_receivers {} \",\n\n self.known_senders, self.known_receivers\n\n )?;\n\n write!(f, \"waiting_senders: [{:?}] \", self.waiting_senders)?;\n\n write!(f, \"waiting_receivers: [{:?}] \", self.waiting_receivers)?;\n", "file_path": "src/sync/mpsc.rs", "rank": 63, "score": 106784.6822691902 }, { "content": "/// Gets the clock for the thread with the given task ID\n\npub fn clock_for(task_id: TaskId) -> VectorClock {\n\n ExecutionState::with(|state| state.get_clock(task_id).clone())\n\n}\n", "file_path": "src/current.rs", "rank": 64, "score": 103175.42546128752 }, { "content": "struct Inner<T: Copy> {\n\n buffer: Box<[T]>,\n\n buffer_size: usize,\n\n put_at: usize,\n\n take_at: usize,\n\n occupied: usize,\n\n}\n\n\n\nimpl<T: Copy + Default> BoundedBuffer<T> {\n\n fn new(buffer_size: usize) -> Self {\n\n let inner = Inner {\n\n buffer: vec![T::default(); buffer_size].into_boxed_slice(),\n\n buffer_size,\n\n put_at: 0,\n\n take_at: 0,\n\n occupied: 0,\n\n };\n\n\n\n BoundedBuffer {\n\n inner: Arc::new(Mutex::new(inner)),\n", "file_path": "tests/demo/bounded_buffer.rs", "rank": 65, "score": 101587.6976276476 }, { "content": "#[test]\n\n#[should_panic(expected = \"tried to acquire a RwLock it already holds\")]\n\nfn async_match_deadlock() {\n\n shuttle::check_random(|| tokio::block_on(main()), 1000)\n\n}\n\n\n", "file_path": "tests/demo/async_match_deadlock.rs", "rank": 66, "score": 100318.01564079786 }, { "content": "#[must_use = \"the panic hook will be disarmed when the returned guard is dropped\"]\n\npub fn init_panic_hook(config: Config) -> PanicHookGuard {\n\n static INIT: Once = Once::new();\n\n INIT.call_once(|| {\n\n let original_hook = panic::take_hook();\n\n panic::set_hook(Box::new(move |panic_info| {\n\n let state = PANIC_HOOK.with(|lock| std::mem::replace(&mut *lock.lock().unwrap(), PanicHookState::Disarmed));\n\n // The hook is armed if this is the first time it's fired\n\n if let PanicHookState::Armed(config) = state {\n\n // We might not be able to get the info we need (e.g., if we panic while borrowing\n\n // ExecutionState)\n\n if let Some((name, schedule)) = ExecutionState::failure_info() {\n\n persist_task_failure(&schedule, name, &config, true);\n\n }\n\n }\n\n original_hook(panic_info);\n\n }));\n\n });\n\n\n\n PANIC_HOOK.with(|lock| *lock.lock().unwrap() = PanicHookState::Armed(config));\n\n\n\n PanicHookGuard\n\n}\n", "file_path": "src/runtime/failure.rs", "rank": 67, "score": 99625.48179473815 }, { "content": "#[derive(Clone)]\n\nstruct BoundedBuffer<T: Copy> {\n\n inner: Arc<Mutex<Inner<T>>>,\n\n cond: Arc<Condvar>,\n\n}\n\n\n", "file_path": "tests/demo/bounded_buffer.rs", "rank": 68, "score": 99305.78526485854 }, { "content": "#[test]\n\nfn shared_static() {\n\n static O: Once = Once::new();\n\n\n\n let counter = Arc::new(AtomicUsize::new(0));\n\n let mut total_executions = 0;\n\n\n\n // Try a bunch of times to provoke the race\n\n for _ in 0..50 {\n\n #[allow(clippy::needless_collect)] // https://github.com/rust-lang/rust-clippy/issues/7207\n\n let threads = (0..3)\n\n .map(|_| {\n\n let counter = Arc::clone(&counter);\n\n std::thread::spawn(move || {\n\n let scheduler = DfsScheduler::new(None, false);\n\n let runner = Runner::new(scheduler, Default::default());\n\n runner.run(move || {\n\n let thds = (0..2)\n\n .map(|_| {\n\n let counter = Arc::clone(&counter);\n\n thread::spawn(move || {\n", "file_path": "tests/basic/once.rs", "rank": 69, "score": 99157.03620501544 }, { "content": "#[test]\n\n#[should_panic(expected = \"tried to acquire a RwLock it already holds\")]\n\nfn async_match_deadlock_replay() {\n\n // Deterministically replay a deadlocking execution so we can, for example, single-step through\n\n // it in a debugger.\n\n shuttle::replay(|| tokio::block_on(main()), \"91010cbbc0daf8c5a5a9b162a08a08\")\n\n}\n", "file_path": "tests/demo/async_match_deadlock.rs", "rank": 70, "score": 98392.50950929451 }, { "content": "/// Create an unbounded channel\n\npub fn channel<T>() -> (Sender<T>, Receiver<T>) {\n\n let channel = Arc::new(Channel::new(None));\n\n let sender = Sender {\n\n inner: Arc::clone(&channel),\n\n };\n\n let receiver = Receiver {\n\n inner: Arc::clone(&channel),\n\n };\n\n (sender, receiver)\n\n}\n\n\n", "file_path": "src/sync/mpsc.rs", "rank": 71, "score": 98017.22713579392 }, { "content": "#[test]\n\nfn wake_after_finish() {\n\n #[derive(Clone)]\n\n struct Future1 {\n\n // We don't care about interleaving this lock; just using it to share the waker across tasks\n\n waker: std::sync::Arc<std::sync::Mutex<Option<Waker>>>,\n\n }\n\n\n\n impl Future1 {\n\n fn new() -> Self {\n\n Self {\n\n waker: std::sync::Arc::new(std::sync::Mutex::new(None)),\n\n }\n\n }\n\n }\n\n\n\n impl Future for Future1 {\n\n type Output = ();\n\n\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n *self.waker.lock().unwrap() = Some(cx.waker().clone());\n", "file_path": "tests/future/waker.rs", "rank": 72, "score": 96515.00689101793 }, { "content": "#[test]\n\nfn basic_static_dfs() {\n\n basic_static(2, |f| check_dfs(f, None));\n\n}\n\n\n", "file_path": "tests/basic/once.rs", "rank": 73, "score": 96461.80704410918 }, { "content": "#[test]\n\nfn basic_static_pct() {\n\n basic_static(10, |f| check_pct(f, 1000, 3));\n\n}\n\n\n\n// Test that multiple Once cells race for initialization independently\n", "file_path": "tests/basic/once.rs", "rank": 74, "score": 96461.80704410918 }, { "content": "#[test]\n\nfn unlock_yields() {\n\n let observed_values = Arc::new(std::sync::Mutex::new(HashSet::new()));\n\n let observed_values_clone = Arc::clone(&observed_values);\n\n\n\n check_dfs(\n\n move || {\n\n let lock = Arc::new(Mutex::new(0usize));\n\n\n\n let add_thread = {\n\n let lock = Arc::clone(&lock);\n\n thread::spawn(move || {\n\n *lock.lock().unwrap() += 1;\n\n *lock.lock().unwrap() += 1;\n\n })\n\n };\n\n let mul_thread = {\n\n let lock = Arc::clone(&lock);\n\n thread::spawn(move || {\n\n *lock.lock().unwrap() *= 2;\n\n })\n", "file_path": "tests/basic/mutex.rs", "rank": 75, "score": 96387.63555196143 }, { "content": "#[test]\n\nfn async_yield() {\n\n check_dfs(\n\n || {\n\n let v = future::block_on(async {\n\n future::yield_now().await;\n\n 42u32\n\n });\n\n assert_eq!(v, 42u32);\n\n },\n\n None,\n\n )\n\n}\n\n\n", "file_path": "tests/future/basic.rs", "rank": 76, "score": 96387.63555196143 }, { "content": "// Slightly generalized version of barrier test from std::sync\n\nfn barrier_test(n: usize, c: usize) {\n\n let barrier = Arc::new(Barrier::new(c));\n\n let (tx, rx) = channel();\n\n\n\n let handles = (0..n - 1)\n\n .map(|_| {\n\n let barrier = Arc::clone(&barrier);\n\n let tx = tx.clone();\n\n thread::spawn(move || {\n\n tx.send(barrier.wait().is_leader()).unwrap();\n\n })\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n // At this point, all spawned threads should be blocked,\n\n // so we shouldn't get anything from the port\n\n // TODO uncomment this line after we add support for try_recv()\n\n // assert!(matches!(rx.try_recv(), Err(TryRecvError::Empty)));\n\n\n\n let mut leader_found = barrier.wait().is_leader();\n", "file_path": "tests/basic/barrier.rs", "rank": 77, "score": 95663.33579704384 }, { "content": "#[test]\n\nfn max_steps_none() {\n\n let counter = Arc::new(AtomicUsize::new(0));\n\n let counter_clone = Arc::clone(&counter);\n\n\n\n let mut config = Config::new();\n\n config.max_steps = MaxSteps::None;\n\n\n\n let scheduler = RandomScheduler::new(10);\n\n let runner = Runner::new(scheduler, config);\n\n runner.run(move || {\n\n for _ in 0..100 {\n\n counter.fetch_add(1, Ordering::SeqCst);\n\n thread::yield_now();\n\n }\n\n });\n\n\n\n assert_eq!(counter_clone.load(Ordering::SeqCst), 100 * 10);\n\n}\n\n\n", "file_path": "tests/basic/execution.rs", "rank": 78, "score": 93991.23635285269 }, { "content": "#[test]\n\nfn replay_persist_none() {\n\n let result = panic::catch_unwind(|| {\n\n let scheduler = PctScheduler::new(2, 100);\n\n let mut config = Config::new();\n\n config.failure_persistence = FailurePersistence::None;\n\n let runner = Runner::new(scheduler, config);\n\n runner.run(concurrent_increment_buggy);\n\n })\n\n .expect_err(\"test should panic\");\n\n let output = result.downcast::<String>().unwrap();\n\n assert!(output.contains(\"counter is wrong\"));\n\n // All our current failure persistence modes print the word \"schedule\", so check that's missing\n\n assert!(!output.contains(\"schedule\"));\n\n}\n", "file_path": "tests/basic/replay.rs", "rank": 79, "score": 93991.23635285269 }, { "content": "#[test]\n\nfn iterations_without_running() {\n\n let metrics = MetricsSubscriber::new();\n\n\n\n {\n\n let _guard = tracing::subscriber::set_default(metrics.clone());\n\n let scheduler = RandomScheduler::new(10);\n\n let _runner = Runner::new(scheduler, Default::default());\n\n }\n\n\n\n assert_eq!(metrics.iterations.load(Ordering::SeqCst), 0);\n\n}\n", "file_path": "tests/basic/metrics.rs", "rank": 80, "score": 93991.23635285269 }, { "content": "#[test]\n\nfn async_thread_yield() {\n\n // This tests if thread::yield_now can be called from within an async block\n\n check_dfs(\n\n || {\n\n future::spawn(async move {\n\n thread::yield_now();\n\n });\n\n future::spawn(async move {});\n\n },\n\n None,\n\n )\n\n}\n\n\n", "file_path": "tests/future/basic.rs", "rank": 81, "score": 93876.12599249977 }, { "content": "#[test]\n\nfn thread_yield_point() {\n\n let success = Arc::new(AtomicU8::new(0));\n\n let success_clone = Arc::clone(&success);\n\n\n\n // We want to see executions that include both threads running first, otherwise we have\n\n // messed up the yieldpoints around spawn.\n\n check_random(\n\n move || {\n\n let flag = Arc::new(AtomicBool::new(false));\n\n let flag_clone = Arc::clone(&flag);\n\n\n\n thread::spawn(move || {\n\n flag_clone.store(true, Ordering::SeqCst);\n\n });\n\n\n\n if flag.load(Ordering::SeqCst) {\n\n success.fetch_or(0x1, Ordering::SeqCst);\n\n } else {\n\n success.fetch_or(0x2, Ordering::SeqCst);\n\n }\n\n },\n\n 100,\n\n );\n\n\n\n assert_eq!(success_clone.load(Ordering::SeqCst), 0x3);\n\n}\n\n\n", "file_path": "tests/basic/thread.rs", "rank": 82, "score": 93876.12599249977 }, { "content": "#[test]\n\nfn max_steps_continue() {\n\n let counter = Arc::new(AtomicUsize::new(0));\n\n let counter_clone = Arc::clone(&counter);\n\n\n\n let mut config = Config::new();\n\n config.max_steps = MaxSteps::ContinueAfter(50);\n\n\n\n let scheduler = RandomScheduler::new(10);\n\n let runner = Runner::new(scheduler, config);\n\n runner.run(move || {\n\n for _ in 0..100 {\n\n counter.fetch_add(1, Ordering::SeqCst);\n\n thread::yield_now();\n\n }\n\n });\n\n\n\n assert_eq!(counter_clone.load(Ordering::SeqCst), 50 * 10);\n\n}\n\n\n", "file_path": "tests/basic/execution.rs", "rank": 83, "score": 93868.920613374 }, { "content": "#[test]\n\nfn wake_self_on_join_handle() {\n\n check_dfs(\n\n || {\n\n let yielder = future::spawn(async move {\n\n future::yield_now().await;\n\n });\n\n\n\n struct Timeout<F: Future> {\n\n inner: Pin<Box<F>>,\n\n counter: u8,\n\n }\n\n\n\n impl<F> Future for Timeout<F>\n\n where\n\n F: Future,\n\n {\n\n type Output = ();\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n if self.counter == 0 {\n", "file_path": "tests/future/basic.rs", "rank": 84, "score": 91647.08190364644 }, { "content": "#[test]\n\nfn mpsc_send_from_outside_runtime() {\n\n check_dfs(\n\n || {\n\n let (tx1, rx1) = channel::<()>();\n\n let (tx2, rx2) = channel::<i32>();\n\n let t1 = thread::spawn(move || {\n\n tx1.send(()).unwrap();\n\n for _ in 0..7 {\n\n assert_eq!(rx2.recv().unwrap(), 1);\n\n }\n\n });\n\n rx1.recv().unwrap();\n\n let t2 = thread::spawn(move || {\n\n for _ in 0..7 {\n\n tx2.send(1).unwrap();\n\n }\n\n });\n\n t1.join().expect(\"thread panicked\");\n\n t2.join().expect(\"thread panicked\");\n\n },\n\n None,\n\n );\n\n}\n\n\n\n// From libstd test suite\n", "file_path": "tests/basic/mpsc.rs", "rank": 85, "score": 91639.36264003182 }, { "content": "#[test]\n\nfn drop_shuttle_yield_future() {\n\n let orderings = Arc::new(AtomicUsize::new(0));\n\n let async_accesses = Arc::new(AtomicUsize::new(0));\n\n let post_yield_accesses = Arc::new(AtomicUsize::new(0));\n\n let orderings_clone = orderings.clone();\n\n let async_accesses_clone = async_accesses.clone();\n\n let post_yield_accesses_clone = post_yield_accesses.clone();\n\n\n\n check_dfs(\n\n move || {\n\n orderings.fetch_add(1, Ordering::SeqCst);\n\n let async_accesses = async_accesses.clone();\n\n let post_yield_accesses = post_yield_accesses.clone();\n\n future::spawn(async move {\n\n async_accesses.fetch_add(1, Ordering::SeqCst);\n\n future::yield_now().await;\n\n post_yield_accesses.fetch_add(1, Ordering::SeqCst);\n\n });\n\n },\n\n None,\n", "file_path": "tests/future/basic.rs", "rank": 86, "score": 91527.99638755377 }, { "content": "// Barrier\n\nfn clock_barrier(n: usize) {\n\n // This test checks that threads waiting on a barrier inherit the clocks from all the other participants in the barrier.\n\n //\n\n // The test creates a barrier with bound n and creates n threads (including the main thread).\n\n // Each thread initially checks that its clock is nonzero only for the main thread, and then waits on the barrier.\n\n // When it exits the barrier, each thread checks that its current clock is nonzero for all threads.\n\n // For sanity checking, we also spawn a dummy thread and check that its clock entry is always 0.\n\n let barrier = Arc::new(Barrier::new(n));\n\n\n\n // Create dummy thread (should have id 1)\n\n thread::spawn(|| {\n\n assert_eq!(me(), 1usize);\n\n });\n\n\n\n let _thds = (0..n - 1)\n\n .map(|_| {\n\n let barrier = Arc::clone(&barrier);\n\n thread::spawn(move || {\n\n check_clock(|i, c| (c > 0) == (i == 0));\n\n barrier.wait();\n", "file_path": "tests/basic/clocks.rs", "rank": 87, "score": 89997.88623622448 }, { "content": "#[test]\n\nfn max_steps_early_exit_scheduler() {\n\n use shuttle::scheduler::{Schedule, Scheduler, TaskId};\n\n\n\n #[derive(Debug)]\n\n struct EarlyExitScheduler {\n\n iterations: usize,\n\n max_iterations: usize,\n\n steps: usize,\n\n max_steps: usize,\n\n }\n\n\n\n impl EarlyExitScheduler {\n\n fn new(max_iterations: usize, max_steps: usize) -> Self {\n\n Self {\n\n iterations: 0,\n\n max_iterations,\n\n steps: 0,\n\n max_steps,\n\n }\n\n }\n", "file_path": "tests/basic/execution.rs", "rank": 88, "score": 89443.1406689577 }, { "content": "// RWLocks\n\nfn clock_rwlock(num_writers: usize, num_readers: usize) {\n\n // This test checks that when a thread acquires a RwLock, it inherits the clocks of writers that\n\n // accessed the lock before it. It's the same as `clock_mutex`, except that readers don't update\n\n // the set S, and aren't required to appear in the clock for future lock holders.\n\n //\n\n // TODO this test is pretty weak. Testing readers is hard because they race with each other; for\n\n // example, a reader might see the clock update from another reader before that reader has a\n\n // chance to update the set S. Causality is also pretty fuzzy for readers (see the TODOs in the\n\n // RwLock implementation). So we don't test very much about them here.\n\n let set = Arc::new(std::sync::Mutex::new(HashSet::from([0])));\n\n let lock = Arc::new(RwLock::new(()));\n\n\n\n // Create dummy thread (should have id 1)\n\n thread::spawn(|| {\n\n assert_eq!(me(), 1usize);\n\n });\n\n\n\n // Spawn the writers\n\n let _thds = (0..num_writers)\n\n .map(|_| {\n", "file_path": "tests/basic/clocks.rs", "rank": 89, "score": 88153.0555282512 }, { "content": "// Based on Fig 1(b) from the PCT paper. We model NULL pointer dereference with an Option unwrap.\n\nfn figure1b(num_threads: usize) {\n\n assert!(num_threads >= 2);\n\n\n\n let x1 = Arc::new(Mutex::new(Some(1)));\n\n let x2 = Arc::clone(&x1);\n\n\n\n // Optionally, spawn a bunch of threads that add scheduling choice points, each taking 5 steps\n\n for _ in 0..num_threads - 2 {\n\n thread::spawn(|| {\n\n for _ in 0..5 {\n\n thread::sleep(Duration::from_millis(1));\n\n }\n\n });\n\n }\n\n\n\n // Main worker threads take 10 steps each\n\n thread::spawn(move || {\n\n for _ in 0..5 {\n\n thread::sleep(Duration::from_millis(1));\n\n }\n", "file_path": "tests/basic/pct.rs", "rank": 90, "score": 87651.87593781405 }, { "content": "fn clock_once(num_threads: usize) {\n\n let once = Arc::new(Once::new());\n\n let init = Arc::new(std::sync::atomic::AtomicUsize::new(0));\n\n\n\n let threads = (0..num_threads)\n\n .map(|_| {\n\n let once = Arc::clone(&once);\n\n let init = Arc::clone(&init);\n\n thread::spawn(move || {\n\n check_clock(|i, c| (c > 0) == (i == 0));\n\n once.call_once(|| init.store(me(), std::sync::atomic::Ordering::SeqCst));\n\n let who_inited = init.load(std::sync::atomic::Ordering::SeqCst);\n\n // should have inhaled the clock of the thread that inited the Once, but might also\n\n // have inhaled the clocks of threads that we were racing with for initialization\n\n check_clock(|i, c| !(i == who_inited || i == 0 || i == me()) || c > 0);\n\n })\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n for thd in threads {\n\n thd.join().unwrap();\n\n }\n\n}\n\n\n", "file_path": "tests/basic/clocks.rs", "rank": 91, "score": 87651.87593781405 }, { "content": "fn reader(buffer: BoundedBuffer<usize>, iterations: usize) {\n\n for _ in 0..iterations {\n\n let _ = buffer.take();\n\n }\n\n}\n\n\n", "file_path": "tests/demo/bounded_buffer.rs", "rank": 92, "score": 87095.66055913802 }, { "content": "fn writer(buffer: BoundedBuffer<usize>, iterations: usize) {\n\n for i in 0..iterations {\n\n buffer.put(i);\n\n }\n\n}\n\n\n\n/// We’ll now write a small test driver program, which Coyote can use to find the bug.\n\n///\n\n/// The first test you write might look like this. Here we setup two tasks. First is a reader\n\n/// calling Take and the other is a Writer calling Put.\n\n///\n\n/// Clearly, we have to Put the same number of items as we Take.\n\n///\n\n/// Otherwise, there will be a trivial deadlock waiting for more items.\n\n///\n\n/// We have matched both in this test with 10 iterations of each Put and Take. We find no deadlock\n\n/// when we run the test above, despite Coyote systematically exploring different possible\n\n/// interleavings between the Put and Take calls.\n", "file_path": "tests/demo/bounded_buffer.rs", "rank": 93, "score": 87095.66055913802 }, { "content": "fn max_steps(n: usize) -> Config {\n\n let mut config = Config::new();\n\n config.max_steps = MaxSteps::ContinueAfter(n);\n\n config\n\n}\n\n\n", "file_path": "tests/basic/dfs.rs", "rank": 94, "score": 86207.57777658677 }, { "content": "fn clock_mutex(num_threads: usize) {\n\n // This test checks that when a thread acquires a lock, it inherits the vector clocks of\n\n // threads that accessed the lock before it.\n\n //\n\n // Test: create a mutex-protected set, initialized with 0 (the id of the main thread)\n\n // and spawn N threads where each thread does the following:\n\n // (1) check that its own initial vector clock only has nonzero for the creator (thread 0)\n\n // this checks that when a thread is created, it only inherits the clock of the spawner\n\n // (2) lock the set and add its own thread id to it; let the resulting set be S\n\n // (3) read its own clock again, call this C\n\n // (4) check that the only nonzero entries in C are for the threads in S\n\n // For sanity checking, we also spawn an initial dummy thread (with id 1) and ensure that its\n\n // clock is always 0.\n\n let mut set = HashSet::new();\n\n set.insert(0);\n\n let set = Arc::new(Mutex::new(set));\n\n\n\n // Create dummy thread (should have id 1)\n\n thread::spawn(|| {\n\n assert_eq!(me(), 1usize);\n", "file_path": "tests/basic/clocks.rs", "rank": 95, "score": 85453.6718727536 }, { "content": "// Threads\n\nfn clock_threads(num_threads: usize) {\n\n // Use an AtomicBool to create a synchronization point so a thread's clock is incremented.\n\n let flag = Arc::new(AtomicBool::new(false));\n\n let handles = (1..num_threads + 1)\n\n .map(|k| {\n\n let flag = Arc::clone(&flag);\n\n thread::spawn(move || {\n\n assert_eq!(me(), k);\n\n check_clock(|i, c| (c > 0) == (i == 0));\n\n assert!(!flag.load(Ordering::SeqCst));\n\n check_clock(|i, c| (c > 0) == (i == 0) || (i == k));\n\n k\n\n })\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n // As each thread joins, we get knowledge of its vector clock.\n\n for handle in handles {\n\n let k = handle.join().unwrap();\n\n check_clock(move |i, c| (c > 0) == (i <= k));\n\n }\n\n}\n\n\n", "file_path": "tests/basic/clocks.rs", "rank": 96, "score": 85453.6718727536 }, { "content": "fn mpsc_senders_with_blocking_inner(num_senders: usize, channel_size: usize) {\n\n assert!(num_senders >= channel_size);\n\n let num_receives = num_senders - channel_size;\n\n let (tx, rx) = sync_channel::<usize>(channel_size);\n\n let senders = (0..num_senders)\n\n .map(move |i| {\n\n let tx = tx.clone();\n\n thread::spawn(move || {\n\n tx.send(i).unwrap();\n\n })\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n // Receive enough messages to ensure no sender will block\n\n for _ in 0..num_receives {\n\n rx.recv().unwrap();\n\n }\n\n for sender in senders {\n\n sender.join().unwrap();\n\n }\n\n}\n\n\n", "file_path": "tests/basic/mpsc.rs", "rank": 97, "score": 84918.07556080859 }, { "content": "fn clock_condvar_notify_one(num_notifiers: usize, num_waiters: usize) {\n\n let lock = Arc::new(Mutex::new(0usize));\n\n let cond = Arc::new(Condvar::new());\n\n\n\n for _ in 0..num_notifiers {\n\n let lock = Arc::clone(&lock);\n\n let cond = Arc::clone(&cond);\n\n thread::spawn(move || {\n\n assert!(me() <= num_notifiers);\n\n *lock.lock().unwrap() = me();\n\n cond.notify_one();\n\n });\n\n }\n\n\n\n for _ in 0..num_waiters {\n\n let lock = Arc::clone(&lock);\n\n let cond = Arc::clone(&cond);\n\n thread::spawn(move || {\n\n let mut guard = lock.lock().unwrap();\n\n while *guard == 0 {\n\n check_clock(|i, c| !(i >= 1 && i <= num_notifiers) || (c == 0)); // no notifier has gone yet\n\n guard = cond.wait(guard).unwrap();\n\n }\n\n // Note that since all the threads touch the lock, any of them may have preceded this thread.\n\n // But we know for sure that the thread that unblocked us should causally precede us.\n\n check_clock(|i, c| (i != *guard) || (c > 0));\n\n });\n\n }\n\n}\n\n\n", "file_path": "tests/basic/clocks.rs", "rank": 98, "score": 84918.07556080859 }, { "content": "fn clock_condvar_notify_all(num_waiters: usize) {\n\n let lock = Arc::new(Mutex::new(0usize));\n\n let cond = Arc::new(Condvar::new());\n\n\n\n {\n\n let lock = Arc::clone(&lock);\n\n let cond = Arc::clone(&cond);\n\n thread::spawn(move || {\n\n assert_eq!(me(), 1);\n\n *lock.lock().unwrap() = me();\n\n cond.notify_all();\n\n });\n\n }\n\n\n\n for _ in 0..num_waiters {\n\n let lock = Arc::clone(&lock);\n\n let cond = Arc::clone(&cond);\n\n thread::spawn(move || {\n\n let mut guard = lock.lock().unwrap();\n\n while *guard == 0 {\n\n check_clock(|i, c| (i != 1) || (c == 0)); // notifier hasn't been scheduled\n\n guard = cond.wait(guard).unwrap();\n\n }\n\n // Note that since all the threads touch the lock, any of them may have preceded this thread.\n\n // But we know for sure that the thread that unblocked us should causally precede us.\n\n check_clock(|i, c| (i != *guard) || (c > 0));\n\n });\n\n }\n\n}\n\n\n", "file_path": "tests/basic/clocks.rs", "rank": 99, "score": 83389.73222433131 } ]
Rust
fyrox-sound/src/dsp/filters.rs
Libertus-Lab/Fyrox
c925304f42744659fd3a6be5c4a1a8609556033a
use crate::dsp::DelayLine; use fyrox_core::{ inspect::{Inspect, PropertyInfo}, visitor::{Visit, VisitResult, Visitor}, }; #[derive(Debug, Clone, Visit)] pub struct OnePole { a0: f32, b1: f32, last: f32, } impl Default for OnePole { fn default() -> Self { Self { a0: 1.0, b1: 0.0, last: 0.0, } } } fn get_b1(fc: f32) -> f32 { (-2.0 * std::f32::consts::PI * fc.min(1.0).max(0.0)).exp() } impl OnePole { pub fn new(fc: f32) -> Self { let b1 = get_b1(fc); Self { b1, a0: 1.0 - b1, last: 0.0, } } pub fn set_fc(&mut self, fc: f32) { self.b1 = get_b1(fc); self.a0 = 1.0 - self.b1; } pub fn set_pole(&mut self, pole: f32) { self.b1 = pole.min(1.0).max(0.0); self.a0 = 1.0 - self.b1; } pub fn feed(&mut self, sample: f32) -> f32 { let result = sample * self.a0 + self.last * self.b1; self.last = result; result } } #[derive(Debug, Clone, Visit)] pub struct LpfComb { low_pass: OnePole, delay_line: DelayLine, feedback: f32, } impl Default for LpfComb { fn default() -> Self { Self { low_pass: Default::default(), delay_line: Default::default(), feedback: 0.0, } } } impl LpfComb { pub fn new(len: usize, fc: f32, feedback: f32) -> Self { Self { low_pass: OnePole::new(fc), delay_line: DelayLine::new(len), feedback, } } pub fn set_feedback(&mut self, feedback: f32) { self.feedback = feedback; } pub fn feedback(&self) -> f32 { self.feedback } pub fn set_fc(&mut self, fc: f32) { self.low_pass.set_fc(fc) } pub fn len(&self) -> usize { self.delay_line.len() } pub fn feed(&mut self, sample: f32) -> f32 { let result = sample + self.feedback * self.low_pass.feed(self.delay_line.last()); self.delay_line.feed(result); result } } #[derive(Debug, Clone, Visit)] pub struct AllPass { delay_line: DelayLine, gain: f32, } impl Default for AllPass { fn default() -> Self { Self { delay_line: Default::default(), gain: 1.0, } } } impl AllPass { pub fn new(len: usize, gain: f32) -> Self { Self { delay_line: DelayLine::new(len), gain, } } pub fn set_gain(&mut self, gain: f32) { self.gain = gain; } pub fn len(&self) -> usize { self.delay_line.len() } pub fn feed(&mut self, sample: f32) -> f32 { let delay_line_output = self.delay_line.last(); let am_arm = -self.gain * delay_line_output; let sum_left = sample + am_arm; let b0_arm = sum_left * self.gain; self.delay_line.feed(sum_left); delay_line_output + b0_arm } } pub enum BiquadKind { LowPass, HighPass, BandPass, AllPass, LowShelf, HighShelf, } #[derive(Clone, Debug, Inspect, Visit)] pub struct Biquad { pub b0: f32, pub b1: f32, pub b2: f32, pub a1: f32, pub a2: f32, #[inspect(skip)] prev1: f32, #[inspect(skip)] prev2: f32, } impl Biquad { pub fn new(kind: BiquadKind, fc: f32, gain: f32, quality: f32) -> Self { let mut filter = Self::default(); filter.tune(kind, fc, gain, quality); filter } pub fn from_coefficients(b0: f32, b1: f32, b2: f32, a1: f32, a2: f32) -> Self { Self { b0, b1, b2, a1, a2, prev1: 0.0, prev2: 0.0, } } pub fn tune(&mut self, kind: BiquadKind, fc: f32, gain: f32, quality: f32) { let w0 = 2.0 * std::f32::consts::PI * fc; let w0_cos = w0.cos(); let w0_sin = w0.sin(); let alpha = w0_sin / (2.0 * quality); let (b0, b1, b2, a0, a1, a2) = match kind { BiquadKind::LowPass => { let b0 = (1.0 - w0_cos) / 2.0; let b1 = 1.0 - w0_cos; let b2 = b0; let a0 = 1.0 + alpha; let a1 = -2.0 * w0_cos; let a2 = 1.0 - alpha; (b0, b1, b2, a0, a1, a2) } BiquadKind::HighPass => { let b0 = (1.0 + w0_cos) / 2.0; let b1 = -(1.0 + w0_cos); let b2 = b0; let a0 = 1.0 + alpha; let a1 = -2.0 * w0_cos; let a2 = 1.0 - alpha; (b0, b1, b2, a0, a1, a2) } BiquadKind::BandPass => { let b0 = w0_sin / 2.0; let b1 = 0.0; let b2 = -b0; let a0 = 1.0 + alpha; let a1 = -2.0 * w0_cos; let a2 = 1.0 - alpha; (b0, b1, b2, a0, a1, a2) } BiquadKind::AllPass => { let b0 = 1.0 - alpha; let b1 = -2.0 * w0_cos; let b2 = 1.0 + alpha; let a0 = b2; let a1 = -2.0 * w0_cos; let a2 = 1.0 - alpha; (b0, b1, b2, a0, a1, a2) } BiquadKind::LowShelf => { let sq = 2.0 * gain.sqrt() * alpha; let b0 = gain * ((gain + 1.0) - (gain - 1.0) * w0_cos + sq); let b1 = 2.0 * gain * ((gain - 1.0) - (gain + 1.0) * w0_cos); let b2 = gain * ((gain + 1.0) - (gain - 1.0) * w0_cos - sq); let a0 = (gain + 1.0) + (gain - 1.0) * w0_cos + sq; let a1 = -2.0 * ((gain - 1.0) + (gain + 1.0) * w0_cos); let a2 = (gain + 1.0) + (gain - 1.0) * w0_cos - sq; (b0, b1, b2, a0, a1, a2) } BiquadKind::HighShelf => { let sq = 2.0 * gain.sqrt() * alpha; let b0 = gain * ((gain + 1.0) + (gain - 1.0) * w0_cos + sq); let b1 = -2.0 * gain * ((gain - 1.0) + (gain + 1.0) * w0_cos); let b2 = gain * ((gain + 1.0) + (gain - 1.0) * w0_cos - sq); let a0 = (gain + 1.0) - (gain - 1.0) * w0_cos + sq; let a1 = 2.0 * ((gain - 1.0) - (gain + 1.0) * w0_cos); let a2 = (gain + 1.0) - (gain - 1.0) * w0_cos - sq; (b0, b1, b2, a0, a1, a2) } }; self.b0 = b0 / a0; self.b1 = b1 / a0; self.b2 = b2 / a0; self.a1 = a1 / a0; self.a2 = a2 / a0; } pub fn feed(&mut self, sample: f32) -> f32 { let result = sample * self.b0 + self.prev1; self.prev1 = sample * self.b1 - result * self.a1 + self.prev2; self.prev2 = sample * self.b2 - result * self.a2; result } } impl Default for Biquad { fn default() -> Self { Self { b0: 1.0, b1: 0.0, b2: 0.0, a1: 0.0, a2: 0.0, prev1: 0.0, prev2: 0.0, } } }
use crate::dsp::DelayLine; use fyrox_core::{ inspect::{Inspect, PropertyInfo}, visitor::{Visit, VisitResult, Visitor}, }; #[derive(Debug, Clone, Visit)] pub struct OnePole { a0: f32, b1: f32, last: f32, } impl Default for OnePole { fn default() -> Self { Self { a0: 1.0, b1: 0.0, last: 0.0, } } } fn get_b1(fc: f32) -> f32 { (-2.0 * std::f32::consts::PI * fc.min(1.0).max(0.0)).exp() } impl OnePole { pub fn new(fc: f32) -> Self { let b1 = get_b1(fc); Self { b1, a0: 1.0 - b1, last: 0.0, } } pub fn set_fc(&mut self, fc: f32) { self.b1 = get_b1(fc); self.a0 = 1.0 - self.b1; } pub fn set_pole(&mut self, pole: f32) { self.b1 = pole.min(1.0).max(0.0); self.a0 = 1.0 - self.b1; } pub fn feed(&mut self, sample: f32) -> f32 { let result = sample * self.a0 + self.last * self.b1; self.last = result; result } } #[derive(Debug, Clone, Visit)] pub struct LpfComb { low_pass: OnePole, delay_line: DelayLine, feedback: f32, } impl Default for LpfComb { fn default() -> Self { Self { low_pass: Default::default(), delay_line: Default::default(), feedback: 0.0, } } } impl LpfComb { pub fn new(len: usize, fc: f32, feedback: f32) -> Self { Self { low_pass: OnePole::new(fc), delay_line: DelayLine::new(len), feedback, } } pub fn set_feedback(&mut self, feedback: f32) { self.feedback = feedback; } pub fn feedback(&self) -> f32 { self.feedback } pub fn set_fc(&mut self, fc: f32) { self.low_pass.set_fc(fc) } pub fn len(&self) -> usize { self.delay_line.len() } pub fn feed(&mut self, sample: f32) -> f32 { let result = sample + self.feedback * self.low_pass.feed(self.delay_line.last()); self.delay_line.feed(result); result } } #[derive(Debug, Clone, Visit)] pub struct AllPass { delay_line: DelayLine, gain: f32, } impl Default for AllPass { fn default() -> Self { Self { delay_line: Default::default(), gain: 1.0, } } } impl AllPass { pub fn new(len: usize, gain: f32) -> Self { Self { delay_line: DelayLine::new(len), gain, } } pub fn set_gain(&mut self, gain: f32) { self.gain = gain; } pub fn len(&self) -> usize { self.delay_line.len() } pub fn feed(&mut self, sample: f32) -> f32 { let delay_line_output = self.delay_line.last(); let am_arm = -self.gain * delay_line_output; let sum_left = sample + am_arm; let b0_arm = sum_left * self.gain; self.delay_line.feed(sum_left); delay_line_output + b0_arm } } pub enum BiquadKind { LowPass, HighPass, BandPass, AllPass, LowShelf, HighShelf, } #[derive(Clone, Debug, Inspect, Visit)] pub struct Biquad { pub b0: f32, pub b1: f32, pub b2: f32, pub a1: f32, pub a2: f32, #[inspect(skip)] prev1: f32, #[inspect(skip)] prev2: f32, } impl Biquad { pub fn new(kind: BiquadKind, fc: f32, gain: f32, quality: f32) -> Self { let mut filter = Self::default(); filter.tune(kind, fc, gain, quality); filter } pub fn from_coefficients(b0: f32, b1: f32, b2: f32, a1: f32, a2: f32) -> Self { Self { b0, b1, b2, a1, a2, prev1: 0.0, prev2: 0.0, } } pub fn tune(&mut self, kind: BiquadKind, fc: f32, gain: f32, quality: f32) { let w0 = 2.0 * std::f32::consts::PI * fc; let w0_cos = w0.cos(); let w0_sin = w0.sin(); let alpha = w0_sin / (2.0 * quality); let (b0, b1, b2, a0, a1, a2) = match kind { BiquadKind::LowPass => { let b0 = (1.0 - w0_cos) / 2.0; let b1 = 1.0 - w0_cos; let b2 = b0; let a0 = 1.0 + alpha; let a1 = -2.0 * w0_cos; let a2 = 1.0 - alpha; (b0, b1, b2, a0, a1, a2) } BiquadKind::HighPass => { let b0 = (1.0 + w0_cos) / 2.0; let b1 = -(1.0 + w0_cos); let b2 = b0; let a0 = 1.0 + alpha; let a1 = -2.0 * w0_cos; let a2 = 1.0 - alpha; (b0, b1, b2, a0, a1, a2) } BiquadKind::BandPass => { let b0 = w0_sin / 2.0; let b1 = 0.0; let b2 = -b0; let a0 = 1.0 + alpha; let a1 = -2.0 * w0_cos; let a2 = 1.0 - alpha; (b0, b1, b2, a0, a1, a2) } BiquadKind::AllPass => { let b0 = 1.0 - alpha; let b1 = -2.0 * w0_cos; let b2 = 1.0 + alpha; let a0 = b2; let a1 = -2.0 * w0_cos; let a2 = 1.0 - alpha; (b0, b1, b2, a0, a1, a2) } BiquadKind::LowShelf => { let sq = 2.0 * gain.sqrt() * alpha; let b0 = gain * ((gain + 1.0) - (gain - 1.0) * w0_cos + sq); let b1 = 2.0 * gain * ((gain - 1.0) - (gain + 1.0) * w0_cos); let b2 = gain * ((gain + 1.0) - (gain - 1.0) * w0_cos - sq); let a0 = (gain + 1.0) + (gain - 1.0) * w0_cos + sq; let a1 = -2.0 * ((gain - 1.0) + (gain + 1.0) * w0_cos); let a2 = (gain + 1.0) + (gain - 1.0) * w0_cos - sq; (b0, b1, b2, a0, a1, a2) } BiquadKind::HighShelf => { let sq = 2.0 * gain.sqrt() * alpha; let b0 = gain * ((gain + 1.0) + (gain - 1.0) * w0_cos + sq); let b1 = -2.0 * gain * ((gain - 1.0) + (gain + 1.0) * w0_cos); let b2 = gain * ((gain + 1.0) + (gain - 1.0) * w0_cos - sq); let a0 = (gain + 1.0) - (gain - 1.0) * w0_cos + sq; let a1 = 2.0 * ((gain - 1.0) - (gain + 1.0) * w0_cos); let a2 = (gain + 1.0) - (gain - 1.0) * w0_cos - sq; (b0, b1, b2, a0, a1, a2) } }; self.b0 = b0 / a0; self.b1 = b1 / a0; self.b2 = b2 / a0; self.a1 = a1 / a0; self.a2 = a2 / a0; } pub fn feed(&mut self, sample: f32) -> f32 { let result = sample * self.b0 + self.prev1; self.prev1 = sample * self.b1 - result * self.a1 + self.prev2; self.prev2 = sample * self.b2 - result * self.a2; result } } impl Default for Biquad {
}
fn default() -> Self { Self { b0: 1.0, b1: 0.0, b2: 0.0, a1: 0.0, a2: 0.0, prev1: 0.0, prev2: 0.0, } }
function_block-full_function
[ { "content": "fn write_node(name: &str, node: &mut Node, visitor: &mut Visitor) -> VisitResult {\n\n let mut region = visitor.enter_region(name)?;\n\n\n\n let mut id = node.id();\n\n id.visit(\"TypeUuid\", &mut region)?;\n\n\n\n node.visit(\"NodeData\", &mut region)?;\n\n\n\n Ok(())\n\n}\n\n\n\nimpl Visit for NodeContainer {\n\n fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult {\n\n let mut region = visitor.enter_region(name)?;\n\n\n\n let mut is_some = if self.is_some() { 1u8 } else { 0u8 };\n\n is_some.visit(\"IsSome\", &mut region)?;\n\n\n\n if is_some != 0 {\n\n if region.is_reading() {\n", "file_path": "src/scene/node/container.rs", "rank": 1, "score": 293204.8694213878 }, { "content": "#[inline]\n\npub fn wrapf(mut n: f32, mut min_limit: f32, mut max_limit: f32) -> f32 {\n\n if n >= min_limit && n <= max_limit {\n\n return n;\n\n }\n\n\n\n if max_limit == 0.0 && min_limit == 0.0 {\n\n return 0.0;\n\n }\n\n\n\n max_limit -= min_limit;\n\n\n\n let offset = min_limit;\n\n min_limit = 0.0;\n\n n -= offset;\n\n\n\n let num_of_max = (n / max_limit).abs().floor();\n\n\n\n if n >= max_limit {\n\n n -= num_of_max * max_limit;\n\n } else if n < min_limit {\n\n n += (num_of_max + 1.0) * max_limit;\n\n }\n\n\n\n n + offset\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 2, "score": 283810.21516115195 }, { "content": "fn read_node(name: &str, visitor: &mut Visitor) -> Result<Node, VisitError> {\n\n let node = {\n\n // Handle legacy nodes.\n\n let mut kind_id = 0u8;\n\n if kind_id.visit(\"KindId\", visitor).is_ok() {\n\n let mut node = match kind_id {\n\n 0 => Node::new(Pivot::default()),\n\n 1 => {\n\n let mut region = visitor.enter_region(name)?;\n\n\n\n let mut light_id = 0u32;\n\n light_id.visit(\"KindId\", &mut region)?;\n\n\n\n let mut light_node = match light_id {\n\n 0 => Node::new(SpotLight::default()),\n\n 1 => Node::new(PointLight::default()),\n\n 2 => Node::new(DirectionalLight::default()),\n\n _ => {\n\n return Err(VisitError::User(format!(\n\n \"Invalid legacy light kind {}\",\n", "file_path": "src/scene/node/container.rs", "rank": 3, "score": 277158.78388240293 }, { "content": "/// Creates new window using specified window function.\n\n/// <https://en.wikipedia.org/wiki/Window_function>\n\npub fn make_window<W: Fn(usize, usize) -> f32>(sample_count: usize, func: W) -> Vec<f32> {\n\n (0..sample_count).map(|i| func(i, sample_count)).collect()\n\n}\n", "file_path": "fyrox-sound/src/dsp/mod.rs", "rank": 4, "score": 268456.1095306186 }, { "content": "/// Calculates single coefficient of Hamming window.\n\n/// <https://en.wikipedia.org/wiki/Window_function#Hamming_window>\n\npub fn hamming_window(i: usize, sample_count: usize) -> f32 {\n\n 0.54 - 0.46 * (2.0 * std::f32::consts::PI * i as f32 / (sample_count - 1) as f32).cos()\n\n}\n\n\n", "file_path": "fyrox-sound/src/dsp/mod.rs", "rank": 5, "score": 263448.77280674374 }, { "content": "/// Calculates single coefficient of Hann window.\n\n/// <https://en.wikipedia.org/wiki/Hann_function>\n\npub fn hann_window(i: usize, sample_count: usize) -> f32 {\n\n 0.5 - 0.5 * (2.0 * std::f32::consts::PI * i as f32 / (sample_count - 1) as f32).cos()\n\n}\n\n\n", "file_path": "fyrox-sound/src/dsp/mod.rs", "rank": 6, "score": 263448.77280674374 }, { "content": "/// Saves given `data` and overwrites `data_default` with the saved data.\n\n///\n\n/// Test the equality after running this method!\n\npub fn save_load<T: Visit>(test_name: &str, data: &mut T, data_default: &mut T) {\n\n // Locate output path\n\n let (bin, txt) = {\n\n let manifest_dir = env::var(\"CARGO_MANIFEST_DIR\").unwrap();\n\n let root = PathBuf::from(manifest_dir).join(\"test_output\");\n\n let _ = std::fs::create_dir(&root);\n\n (\n\n root.join(format!(\"{}.bin\", test_name)),\n\n root.join(format!(\"{}.txt\", test_name)),\n\n )\n\n };\n\n\n\n // Save `data`\n\n {\n\n let mut visitor = Visitor::new();\n\n data.visit(\"Data\", &mut visitor).unwrap();\n\n\n\n visitor.save_binary(&bin).unwrap();\n\n let mut file = File::create(&txt).unwrap();\n\n file.write_all(visitor.save_text().as_bytes()).unwrap();\n", "file_path": "fyrox-core-derive/tests/it/visit.rs", "rank": 7, "score": 256202.01616297854 }, { "content": "///\n\n/// Triangulates specified polygon.\n\n///\n\npub fn triangulate(vertices: &[Vector3<f32>], out_triangles: &mut Vec<[usize; 3]>) {\n\n out_triangles.clear();\n\n if vertices.len() == 3 {\n\n // Triangulating a triangle?\n\n out_triangles.push([0, 1, 2]);\n\n } else if vertices.len() == 4 {\n\n // Special case for quadrilaterals (much faster than generic)\n\n let mut start_vertex = 0;\n\n for i in 0..4 {\n\n let v = vertices[i];\n\n let v0 = vertices[(i + 3) % 4];\n\n if let Some(left) = (v0 - v).try_normalize(f32::EPSILON) {\n\n let v1 = vertices[(i + 2) % 4];\n\n if let Some(diag) = (v1 - v).try_normalize(f32::EPSILON) {\n\n let v2 = vertices[(i + 1) % 4];\n\n if let Some(right) = (v2 - v).try_normalize(f32::EPSILON) {\n\n // Check for concave vertex\n\n let angle = left.dot(&diag).acos() + right.dot(&diag).acos();\n\n if angle > std::f32::consts::PI {\n\n start_vertex = i;\n", "file_path": "fyrox-core/src/math/triangulator.rs", "rank": 8, "score": 250683.66110117562 }, { "content": "/// A trait for user-defined actions for behavior tree.\n\npub trait Behavior<'a>: Visit + Default + PartialEq + Debug {\n\n /// A context in which the behavior will be performed.\n\n type Context;\n\n\n\n /// A function that will be called each frame depending on\n\n /// the current execution path of the behavior tree it belongs\n\n /// to.\n\n fn tick(&mut self, context: &mut Self::Context) -> Status;\n\n}\n\n\n\n/// Root node of the tree.\n\n#[derive(Debug, PartialEq, Visit)]\n\npub struct RootNode<B> {\n\n child: Handle<BehaviorNode<B>>,\n\n}\n\n\n\nimpl<B> Default for RootNode<B> {\n\n fn default() -> Self {\n\n Self {\n\n child: Default::default(),\n", "file_path": "src/utils/behavior/mod.rs", "rank": 9, "score": 247866.86060288685 }, { "content": "pub trait BaseScript: Visit + Inspect + Send + Debug + 'static {\n\n fn clone_box(&self) -> Box<dyn ScriptTrait>;\n\n}\n\n\n\nimpl<T> BaseScript for T\n\nwhere\n\n T: Clone + ScriptTrait,\n\n{\n\n fn clone_box(&self) -> Box<dyn ScriptTrait> {\n\n Box::new(self.clone())\n\n }\n\n}\n\n\n\npub struct ScriptContext<'a, 'b, 'c> {\n\n pub dt: f32,\n\n pub plugin: &'a mut dyn Plugin,\n\n pub node: &'b mut Node,\n\n pub handle: Handle<Node>,\n\n pub scene: &'c mut Scene,\n\n pub resource_manager: &'a ResourceManager,\n\n}\n\n\n", "file_path": "src/script/mod.rs", "rank": 10, "score": 247816.1315096252 }, { "content": "pub trait InspectableEnum: Debug + Inspect + 'static {}\n\n\n\nimpl<T: Debug + Inspect + 'static> InspectableEnum for T {}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum EnumPropertyEditorMessage {\n\n Variant(usize),\n\n PropertyChanged(PropertyChanged),\n\n}\n\n\n\nimpl EnumPropertyEditorMessage {\n\n define_constructor!(EnumPropertyEditorMessage:Variant => fn variant(usize), layout: false);\n\n define_constructor!(EnumPropertyEditorMessage:PropertyChanged => fn property_changed(PropertyChanged), layout: false);\n\n}\n\n\n\npub struct EnumPropertyEditor<T: InspectableEnum> {\n\n widget: Widget,\n\n variant_selector: Handle<UiNode>,\n\n inspector: Handle<UiNode>,\n\n definition: EnumPropertyEditorDefinition<T>,\n", "file_path": "fyrox-ui/src/inspector/editors/enumeration.rs", "rank": 11, "score": 245756.0458513006 }, { "content": "/// A trait for resource data.\n\npub trait ResourceData: 'static + Default + Debug + Visit + Send {\n\n /// Returns path of resource data.\n\n fn path(&self) -> Cow<Path>;\n\n\n\n /// Sets new path to resource data.\n\n fn set_path(&mut self, path: PathBuf);\n\n}\n\n\n", "file_path": "fyrox-resource/src/lib.rs", "rank": 12, "score": 242951.79310065665 }, { "content": "pub fn fix_shadows_distance(mut quality: QualitySettings) -> QualitySettings {\n\n // Scale distance because game world has different scale.\n\n quality.spot_shadows_distance *= 2.0;\n\n quality.point_shadows_distance *= 2.0;\n\n quality\n\n}\n", "file_path": "examples/shared/mod.rs", "rank": 13, "score": 240081.4173366913 }, { "content": "/// Creates `Inspect` trait impl and field prop keys\n\npub fn create_inspect_impl<'f>(\n\n ty_args: &args::TypeArgs,\n\n field_args: impl Iterator<Item = &'f args::FieldArgs>,\n\n impl_body: TokenStream2,\n\n) -> TokenStream2 {\n\n let prop_keys_impl = prop_keys::prop_keys_impl(ty_args);\n\n let trait_impl = self::inspect_trait_impl(ty_args, field_args, impl_body);\n\n\n\n quote! {\n\n #prop_keys_impl\n\n #trait_impl\n\n }\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/inspect/utils.rs", "rank": 14, "score": 234920.55887460534 }, { "content": "pub fn create_impl(\n\n ty_args: &args::TypeArgs,\n\n field_args: impl Iterator<Item = args::FieldArgs>,\n\n impl_body: TokenStream2,\n\n) -> TokenStream2 {\n\n let ty_ident = &ty_args.ident;\n\n let generics = self::create_impl_generics(&ty_args.generics, field_args);\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n quote! {\n\n impl #impl_generics Visit for #ty_ident #ty_generics #where_clause {\n\n fn visit(\n\n &mut self,\n\n name: &str,\n\n visitor: &mut Visitor,\n\n ) -> VisitResult {\n\n #impl_body\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/visit/utils.rs", "rank": 15, "score": 231837.27319743088 }, { "content": "pub fn make_scene_file_filter() -> Filter {\n\n Filter::new(|p: &Path| {\n\n if let Some(ext) = p.extension() {\n\n ext.to_string_lossy().as_ref() == \"rgs\"\n\n } else {\n\n p.is_dir()\n\n }\n\n })\n\n}\n\n\n", "file_path": "editor/src/lib.rs", "rank": 16, "score": 230971.45420339616 }, { "content": "#[inline]\n\npub fn get_polygon_normal(polygon: &[Vector3<f32>]) -> Result<Vector3<f32>, &'static str> {\n\n let mut normal = Vector3::default();\n\n\n\n for (i, current) in polygon.iter().enumerate() {\n\n let next = polygon[(i + 1) % polygon.len()];\n\n normal.x += (current.y - next.y) * (current.z + next.z);\n\n normal.y += (current.z - next.z) * (current.x + next.x);\n\n normal.z += (current.x - next.x) * (current.y + next.y);\n\n }\n\n\n\n normal\n\n .try_normalize(f32::EPSILON)\n\n .ok_or(\"Unable to get normal of degenerated polygon!\")\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 17, "score": 229370.0246755001 }, { "content": "fn get_last_sample(buffer: &StreamingBuffer) -> (f32, f32) {\n\n let len = buffer.samples.len();\n\n if len == 0 {\n\n return (0.0, 0.0);\n\n }\n\n if buffer.channel_count == 2 {\n\n (buffer.samples[len - 2], buffer.samples[len - 1])\n\n } else {\n\n (buffer.samples[len - 1], buffer.samples[len - 1])\n\n }\n\n}\n\n\n\nimpl Drop for SoundSource {\n\n fn drop(&mut self) {\n\n if let Some(buffer) = self.buffer.as_ref() {\n\n let mut buffer = buffer.data_ref();\n\n if let SoundBufferState::Streaming(ref mut streaming) = *buffer {\n\n streaming.use_count = streaming.use_count.saturating_sub(1);\n\n }\n\n }\n", "file_path": "fyrox-sound/src/source.rs", "rank": 18, "score": 227664.0683318552 }, { "content": "#[inline(always)]\n\npub fn lerpf(a: f32, b: f32, t: f32) -> f32 {\n\n a + (b - a) * t\n\n}\n\n\n\n// https://en.wikipedia.org/wiki/Cubic_Hermite_spline\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 19, "score": 224444.15203210956 }, { "content": "#[inline]\n\npub fn ieee_remainder(x: f32, y: f32) -> f32 {\n\n x - (x / y).round() * y\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 20, "score": 223211.73295744258 }, { "content": "/// impl `Visit` for `struct`\n\nfn impl_visit_struct(\n\n ty_args: &args::TypeArgs,\n\n field_args: &ast::Fields<args::FieldArgs>,\n\n) -> TokenStream2 {\n\n let visit_fn_body = if field_args.style == ast::Style::Unit {\n\n quote! { Ok(()) }\n\n } else {\n\n // `field.visit(..);` parts\n\n let field_visits =\n\n utils::create_field_visits(None, field_args.fields.iter(), field_args.style);\n\n\n\n quote! {\n\n let mut region = visitor.enter_region(name)?;\n\n #(self.#field_visits)*\n\n Ok(())\n\n }\n\n };\n\n\n\n utils::create_impl(ty_args, field_args.iter().cloned(), visit_fn_body)\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/visit.rs", "rank": 21, "score": 222142.21923848335 }, { "content": "pub fn make_default_anchor(ctx: &mut BuildContext, row: usize, column: usize) -> Handle<UiNode> {\n\n let default_anchor_size = 30.0;\n\n BorderBuilder::new(\n\n WidgetBuilder::new()\n\n .with_width(default_anchor_size)\n\n .with_height(default_anchor_size)\n\n .with_visibility(false)\n\n .on_row(row)\n\n .on_column(column)\n\n .with_draw_on_top(true)\n\n .with_background(Brush::Solid(DEFAULT_ANCHOR_COLOR)),\n\n )\n\n .build(ctx)\n\n}\n\n\n\nimpl TileBuilder {\n\n pub fn new(widget_builder: WidgetBuilder) -> Self {\n\n Self {\n\n widget_builder,\n\n content: TileContent::Empty,\n", "file_path": "fyrox-ui/src/dock.rs", "rank": 22, "score": 220647.68326847837 }, { "content": "#[inline]\n\npub fn round_to_step(x: f32, step: f32) -> f32 {\n\n x - ieee_remainder(x, step)\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 23, "score": 219713.83351770398 }, { "content": "#[inline]\n\npub fn wrap_angle(angle: f32) -> f32 {\n\n let two_pi = 2.0 * std::f32::consts::PI;\n\n\n\n if angle > 0.0 {\n\n angle % two_pi\n\n } else {\n\n (angle + two_pi) % two_pi\n\n }\n\n}\n\n\n\n/// There are two versions of remainder, the standard `%` operator which does `x - (x/y).trunc()*y` and IEEE remainder which does `x - (x/y).round()*y`.\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 24, "score": 219410.84163344247 }, { "content": "#[inline]\n\npub fn barycentric_is_inside(bary: (f32, f32, f32)) -> bool {\n\n (bary.0 >= 0.0) && (bary.1 >= 0.0) && (bary.0 + bary.1 < 1.0)\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 25, "score": 216400.82258233652 }, { "content": "pub fn enum_prop_ident(\n\n variant_args: &args::VariantArgs,\n\n nth: usize,\n\n field: &args::FieldArgs,\n\n) -> Ident {\n\n let variant_ident = &variant_args.ident;\n\n let field_ident = self::field_ident(&variant_args.fields, nth, field);\n\n\n\n let ident = format!(\"{}_{}\", variant_ident, field_ident).to_case(Case::UpperSnake);\n\n syn::parse_str(&ident).unwrap()\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/inspect/utils/prop_keys.rs", "rank": 26, "score": 216069.58946626307 }, { "content": "pub fn create_ui(ui: &mut BuildContext, screen_size: Vector2<f32>) -> Interface {\n\n let debug_text;\n\n let progress_bar;\n\n let progress_text;\n\n let root = GridBuilder::new(\n\n WidgetBuilder::new()\n\n .with_width(screen_size.x)\n\n .with_height(screen_size.y)\n\n .with_child({\n\n debug_text = TextBuilder::new(WidgetBuilder::new().on_row(0).on_column(0))\n\n .with_wrap(WrapMode::Word)\n\n .build(ui);\n\n debug_text\n\n })\n\n .with_child({\n\n progress_bar =\n\n ProgressBarBuilder::new(WidgetBuilder::new().on_row(1).on_column(1)).build(ui);\n\n progress_bar\n\n })\n\n .with_child({\n", "file_path": "examples/shared/mod.rs", "rank": 27, "score": 215598.0793855338 }, { "content": "pub fn gen_inspect_fn_body(\n\n field_prefix: FieldPrefix,\n\n field_args: &ast::Fields<args::FieldArgs>,\n\n) -> TokenStream2 {\n\n // `inspect` function body, consisting of a sequence of quotes\n\n let mut quotes = Vec::new();\n\n\n\n let props = field_args\n\n .fields\n\n .iter()\n\n // enumerate first, and then filter!\n\n .enumerate()\n\n .filter(|(_i, f)| !f.skip)\n\n .map(|(i, field)| self::quote_field_prop(&field_prefix, i, field, field_args.style));\n\n\n\n quotes.push(quote! {\n\n let mut props = Vec::new();\n\n #(props.push(#props);)*\n\n });\n\n\n\n // concatenate the quotes\n\n quote! {\n\n #(#quotes)*\n\n props\n\n }\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/inspect/utils.rs", "rank": 28, "score": 211950.22943305332 }, { "content": "#[inline]\n\npub fn solve_quadratic(a: f32, b: f32, c: f32) -> Option<[f32; 2]> {\n\n let discriminant = b * b - 4.0 * a * c;\n\n if discriminant < 0.0 {\n\n // No real roots\n\n None\n\n } else {\n\n // Dont care if quadratic equation has only one root (discriminant == 0), this is edge-case\n\n // which requires additional branching instructions which is not good for branch-predictor in CPU.\n\n let _2a = 2.0 * a;\n\n let discr_root = discriminant.sqrt();\n\n let r1 = (-b + discr_root) / _2a;\n\n let r2 = (-b - discr_root) / _2a;\n\n Some([r1, r2])\n\n }\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 29, "score": 211255.65715689037 }, { "content": "// impl `#[derive(Visit)]` for `struct` or `enum`\n\npub fn impl_visit(ast: DeriveInput) -> TokenStream2 {\n\n let ty_args = args::TypeArgs::from_derive_input(&ast).unwrap();\n\n match &ty_args.data {\n\n ast::Data::Struct(ref field_args) => self::impl_visit_struct(&ty_args, field_args),\n\n ast::Data::Enum(ref variants) => self::impl_visit_enum(&ty_args, variants),\n\n }\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/visit.rs", "rank": 30, "score": 210793.8850863793 }, { "content": "pub fn impl_inspect(ast: DeriveInput) -> TokenStream2 {\n\n let ty_args = args::TypeArgs::from_derive_input(&ast).unwrap();\n\n match &ty_args.data {\n\n ast::Data::Struct(ref field_args) => self::impl_inspect_struct(&ty_args, field_args),\n\n ast::Data::Enum(ref variant_args) => self::impl_inspect_enum(&ty_args, variant_args),\n\n }\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/inspect.rs", "rank": 31, "score": 210755.6453713467 }, { "content": "pub trait Visit {\n\n fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult;\n\n}\n\n\n\nimpl Default for Visitor {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl Visitor {\n\n const MAGIC: &'static str = \"RG3D\";\n\n\n\n pub fn new() -> Self {\n\n let mut nodes = Pool::new();\n\n let root = nodes.spawn(Node::new(\"__ROOT__\", Handle::NONE));\n\n Self {\n\n nodes,\n\n rc_map: FxHashMap::default(),\n\n arc_map: FxHashMap::default(),\n", "file_path": "fyrox-core/src/visitor.rs", "rank": 32, "score": 209185.7760263767 }, { "content": "pub fn read_ascii<R>(reader: &mut R) -> Result<FbxDocument, FbxError>\n\nwhere\n\n R: Read + Seek,\n\n{\n\n let mut nodes: Pool<FbxNode> = Pool::new();\n\n let root_handle = nodes.spawn(FbxNode {\n\n name: String::from(\"__ROOT__\"),\n\n children: Vec::new(),\n\n parent: Handle::NONE,\n\n attributes: Vec::new(),\n\n });\n\n let mut parent_handle: Handle<FbxNode> = root_handle;\n\n let mut node_handle: Handle<FbxNode> = Handle::NONE;\n\n let mut buffer: Vec<u8> = Vec::new();\n\n let mut name: Vec<u8> = Vec::new();\n\n let mut value: Vec<u8> = Vec::new();\n\n\n\n let buf_len = reader.seek(SeekFrom::End(0))?;\n\n reader.seek(SeekFrom::Start(0))?;\n\n\n", "file_path": "src/resource/fbx/document/ascii.rs", "rank": 33, "score": 208348.75453148456 }, { "content": "pub fn read_binary<R>(file: &mut R) -> Result<FbxDocument, FbxError>\n\nwhere\n\n R: Read + Seek,\n\n{\n\n let total_length = file.seek(SeekFrom::End(0))?;\n\n file.seek(SeekFrom::Start(0))?;\n\n\n\n // Ignore all stuff until version.\n\n let mut temp = [0; 23];\n\n file.read_exact(&mut temp)?;\n\n\n\n // Verify version.\n\n let version = file.read_u32::<LittleEndian>()? as i32;\n\n\n\n // Anything else should be supported.\n\n if version < 7100 {\n\n return Err(FbxError::UnsupportedVersion(version));\n\n }\n\n\n\n let mut nodes = Pool::new();\n", "file_path": "src/resource/fbx/document/binary.rs", "rank": 34, "score": 208348.75453148456 }, { "content": "#[allow(clippy::cast_ref_to_mut)] // See SAFETY block below\n\npub fn serialize_script(script: &Script) -> Result<Vec<u8>, VisitError> {\n\n let mut visitor = Visitor::new();\n\n\n\n let mut script_type_uuid = script.id();\n\n script_type_uuid.visit(\"TypeUuid\", &mut visitor)?;\n\n\n\n // SAFETY: It is guaranteed that visitor will **not** modify internal state of the object\n\n // if it is in \"write\" mode (serialization mode).\n\n let script = unsafe { &mut *(script as *const _ as *mut Script) };\n\n script.visit(\"ScriptData\", &mut visitor)?;\n\n\n\n visitor.save_binary_to_vec()\n\n}\n\n\n", "file_path": "src/scene/base.rs", "rank": 35, "score": 207777.4256531689 }, { "content": "#[inline]\n\npub fn cubicf(p0: f32, p1: f32, t: f32, m0: f32, m1: f32) -> f32 {\n\n let t2 = t * t;\n\n let t3 = t2 * t;\n\n let scale = (p1 - p0).abs();\n\n\n\n (2.0 * t3 - 3.0 * t2 + 1.0) * p0\n\n + (t3 - 2.0 * t2 + t) * m0 * scale\n\n + (-2.0 * t3 + 3.0 * t2) * p1\n\n + (t3 - t2) * m1 * scale\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 36, "score": 207649.49374144006 }, { "content": "fn filtered_out(filter: &mut Option<Filter>, path: &Path) -> bool {\n\n match filter.as_mut() {\n\n Some(filter) => !filter.0.borrow_mut().deref_mut().lock().unwrap()(path),\n\n None => false,\n\n }\n\n}\n\n\n", "file_path": "fyrox-ui/src/file_browser.rs", "rank": 37, "score": 206466.23598739374 }, { "content": "#[inline]\n\npub fn spherical_to_cartesian(azimuth: f32, elevation: f32, radius: f32) -> Vector3<f32> {\n\n let x = radius * elevation.sin() * azimuth.sin();\n\n let y = radius * elevation.cos();\n\n let z = -radius * elevation.sin() * azimuth.cos();\n\n Vector3::new(x, y, z)\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 38, "score": 205744.58054224588 }, { "content": "#[inline]\n\npub fn inf_sup_cubicf(p0: f32, p1: f32, m0: f32, m1: f32) -> (f32, f32) {\n\n // Find two `t`s where derivative of cubicf is zero - these will be\n\n // extreme points of the spline. Then get the values at those `t`s\n\n let d = -(9.0 * p0 * p0 + 6.0 * p0 * (-3.0 * p1 + m1 + m0) + 9.0 * p1 * p1\n\n - 6.0 * p1 * (m1 + m0)\n\n + m1 * m1\n\n + m1 * m0\n\n + m0 * m0)\n\n .sqrt();\n\n let k = 3.0 * (2.0 * p0 - 2.0 * p1 + m1 + m0);\n\n let v = 3.0 * p0 - 3.0 * p1 + m1 + 2.0 * m0;\n\n let t0 = (-d + v) / k;\n\n let t1 = (d + v) / k;\n\n (cubicf(p0, p1, t0, m0, m1), cubicf(p0, p1, t1, m0, m1))\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 39, "score": 205566.97124877863 }, { "content": "#[inline]\n\npub fn cubicf_derivative(p0: f32, p1: f32, t: f32, m0: f32, m1: f32) -> f32 {\n\n let t2 = t * t;\n\n let scale = (p1 - p0).abs();\n\n\n\n (6.0 * t2 - 6.0 * t) * p0\n\n + (3.0 * t2 - 4.0 * t + 1.0) * m0 * scale\n\n + (6.0 * t - 6.0 * t2) * p1\n\n + (3.0 * t2 - 2.0 * t) * m1 * scale\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 40, "score": 205281.4038514658 }, { "content": "#[inline]\n\npub fn triangle_area(a: Vector3<f32>, b: Vector3<f32>, c: Vector3<f32>) -> f32 {\n\n (b - a).cross(&(c - a)).norm() * 0.5\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 41, "score": 202449.4499076433 }, { "content": "/// Serializes Option<Script> using given serializer.\n\npub fn visit_opt_script(\n\n name: &str,\n\n script: &mut Option<Script>,\n\n visitor: &mut Visitor,\n\n) -> VisitResult {\n\n let mut region = visitor.enter_region(name)?;\n\n\n\n let mut script_type_uuid = script.as_ref().map(|s| s.id()).unwrap_or_default();\n\n script_type_uuid.visit(\"TypeUuid\", &mut region)?;\n\n\n\n if region.is_reading() {\n\n *script = if script_type_uuid.is_nil() {\n\n None\n\n } else {\n\n let serialization_context = region\n\n .environment\n\n .as_ref()\n\n .and_then(|e| e.downcast_ref::<SerializationContext>())\n\n .expect(\"Visitor environment must contain serialization context!\");\n\n\n", "file_path": "src/scene/base.rs", "rank": 42, "score": 199499.37533775062 }, { "content": "#[derive(Debug, Clone, Default, PartialEq, Visit)]\n\nstruct TupleStruct(f32, u32);\n\n\n", "file_path": "fyrox-core-derive/tests/it/visit/basic.rs", "rank": 43, "score": 198911.28309263694 }, { "content": "#[inline]\n\npub fn vector_to_quat(vec: Vector3<f32>) -> UnitQuaternion<f32> {\n\n let dot = vec.normalize().dot(&Vector3::y());\n\n\n\n if dot.abs() > 1.0 - 10.0 * f32::EPSILON {\n\n // Handle singularity when vector is collinear with Y axis.\n\n UnitQuaternion::from_axis_angle(&Vector3::x_axis(), -dot.signum() * 90.0f32.to_radians())\n\n } else {\n\n UnitQuaternion::face_towards(&vec, &Vector3::y())\n\n }\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 44, "score": 198269.56064836675 }, { "content": "/// `<prefix>field.visit(\"name\", visitor);`\n\npub fn create_field_visits<'a>(\n\n // None or `f` when bindings tuple variants. NOTE: We can't use `prefix: Ident`\n\n prefix: Option<Ident>,\n\n fields: impl Iterator<Item = &'a args::FieldArgs>,\n\n field_style: ast::Style,\n\n) -> Vec<TokenStream2> {\n\n if field_style == ast::Style::Unit {\n\n // `Unit` (struct/enum variant) has no field to visit.\n\n // We won't even enter this region:\n\n return vec![];\n\n }\n\n\n\n let visit_args = fields\n\n .filter(|field| !field.skip)\n\n .enumerate()\n\n .map(|(field_index, field)| {\n\n let (ident, name) = match field_style {\n\n // `NamedFields { a: f32, .. }`\n\n ast::Style::Struct => {\n\n let ident = field.ident.as_ref().unwrap_or_else(|| unreachable!());\n", "file_path": "fyrox-core-derive/src/visit/utils.rs", "rank": 45, "score": 194658.2991414101 }, { "content": "/// A main trait for any scene graph node.\n\npub trait NodeTrait: BaseNodeTrait + Inspect + Visit {\n\n /// Allows a node to provide access to inner components.\n\n fn query_component_ref(&self, type_id: TypeId) -> Option<&dyn Any>;\n\n\n\n /// Allows a node to provide access to inner components.\n\n fn query_component_mut(&mut self, type_id: TypeId) -> Option<&mut dyn Any>;\n\n\n\n /// Returns axis-aligned bounding box in **local space** of the node.\n\n fn local_bounding_box(&self) -> AxisAlignedBoundingBox;\n\n\n\n /// Returns axis-aligned bounding box in **world space** of the node.\n\n ///\n\n /// # Important notes\n\n ///\n\n /// World bounding box will become valid **only** after first `update` call of the parent scene.\n\n /// It is because to calculate world bounding box we must get world transform first, but it\n\n /// can be calculated with a knowledge of parent world transform, so node on its own cannot know\n\n /// its world bounding box without additional information.\n\n fn world_bounding_box(&self) -> AxisAlignedBoundingBox;\n\n\n", "file_path": "src/scene/node/mod.rs", "rank": 46, "score": 193008.23399084838 }, { "content": "#[inline]\n\npub fn get_farthest_point(points: &[Vector3<f32>], dir: Vector3<f32>) -> Vector3<f32> {\n\n let mut n_farthest = 0;\n\n let mut max_dot = -f32::MAX;\n\n for (i, point) in points.iter().enumerate() {\n\n let dot = dir.dot(point);\n\n if dot > max_dot {\n\n n_farthest = i;\n\n max_dot = dot\n\n }\n\n }\n\n points[n_farthest]\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 47, "score": 192869.47372493875 }, { "content": "pub fn print() -> Result<String, fmt::Error> {\n\n #[cfg(feature = \"enable_profiler\")]\n\n {\n\n let mut buffer = String::new();\n\n PROFILER.lock().unwrap().print(&mut buffer)?;\n\n Ok(buffer)\n\n }\n\n\n\n #[cfg(not(feature = \"enable_profiler\"))]\n\n {\n\n Ok(\"Performance profiling results are not available, because feature 'enable_profiler' wasn't defined!\".to_owned())\n\n }\n\n}\n\n\n", "file_path": "fyrox-core/src/profiler.rs", "rank": 48, "score": 191466.30590384756 }, { "content": "pub fn struct_prop_key_name(nth: usize, field: &args::FieldArgs) -> String {\n\n field.name.clone().unwrap_or_else(|| {\n\n let field_ident = match &field.ident {\n\n Some(ident) => quote!(#ident),\n\n None => {\n\n let nth_field = Index::from(nth);\n\n quote!(#nth_field)\n\n }\n\n };\n\n\n\n field_ident.to_string()\n\n })\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/inspect/utils/prop_keys.rs", "rank": 49, "score": 190337.71696416056 }, { "content": "#[inline]\n\npub fn get_signed_triangle_area(v1: Vector2<f32>, v2: Vector2<f32>, v3: Vector2<f32>) -> f32 {\n\n 0.5 * (v1.x * (v3.y - v2.y) + v2.x * (v1.y - v3.y) + v3.x * (v2.y - v1.y))\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 50, "score": 189878.0912711945 }, { "content": "#[inline]\n\npub fn m4x4_approx_eq(a: &Matrix4<f32>, b: &Matrix4<f32>) -> bool {\n\n a.iter()\n\n .zip(b.iter())\n\n .all(|(a, b)| (*a - *b).abs() <= 0.001)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::algebra::Vector2;\n\n use crate::math::Rect;\n\n use crate::math::SmoothAngle;\n\n\n\n #[test]\n\n fn ray_rect_intersection() {\n\n let rect = Rect::new(0.0, 0.0, 10.0, 10.0);\n\n\n\n // Edge-case: Horizontal ray.\n\n assert!(super::ray_rect_intersection(\n\n rect,\n\n Vector2::new(-1.0, 5.0),\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 51, "score": 189860.37194676365 }, { "content": "#[proc_macro_derive(Visit, attributes(visit))]\n\npub fn visit(input: TokenStream) -> TokenStream {\n\n let ast = parse_macro_input!(input as DeriveInput);\n\n TokenStream::from(visit::impl_visit(ast))\n\n}\n\n\n\n/// Implements `Inspect` trait\n\n///\n\n/// User has to import `Inspect` and `PropertyInfo` to use this macro.\n", "file_path": "fyrox-core-derive/src/lib.rs", "rank": 52, "score": 189580.17783873732 }, { "content": "#[proc_macro_derive(Inspect, attributes(inspect))]\n\npub fn inspect(input: TokenStream) -> TokenStream {\n\n let ast = parse_macro_input!(input as DeriveInput);\n\n TokenStream::from(inspect::impl_inspect(ast))\n\n}\n", "file_path": "fyrox-core-derive/src/lib.rs", "rank": 53, "score": 189579.61307213537 }, { "content": "/// A value of a property.\n\npub trait PropertyValue: Any + Debug {\n\n /// Casts `self` to a `&dyn Any`\n\n fn as_any(&self) -> &dyn Any;\n\n}\n\n\n\nimpl<T: Debug + 'static> PropertyValue for T {\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n}\n\n\n\n/// An error that can occur during \"type casting\"\n\n#[derive(Debug)]\n\npub enum CastError {\n\n /// Given type does not match expected.\n\n TypeMismatch {\n\n /// A name of the property.\n\n property_name: String,\n\n\n\n /// Expected type identifier.\n", "file_path": "fyrox-core/src/inspect.rs", "rank": 54, "score": 186919.8168902071 }, { "content": "pub fn print_hot_path() -> Result<String, fmt::Error> {\n\n #[cfg(feature = \"enable_profiler\")]\n\n {\n\n let mut buffer = String::new();\n\n PROFILER.lock().unwrap().print_hot_path(&mut buffer)?;\n\n Ok(buffer)\n\n }\n\n\n\n #[cfg(not(feature = \"enable_profiler\"))]\n\n {\n\n Ok(\"Performance profiling results are not available, because feature 'enable_profiler' wasn't defined!\".to_owned())\n\n }\n\n}\n\n\n", "file_path": "fyrox-core/src/profiler.rs", "rank": 55, "score": 183865.8734392247 }, { "content": "/// A trait for resource import options. It provides generic functionality shared over all types of import options.\n\npub trait ImportOptions: Serialize + DeserializeOwned + Default + Clone {\n\n /// Saves import options into a specified file.\n\n fn save(&self, path: &Path) -> bool {\n\n if let Ok(file) = File::create(path) {\n\n if ron::ser::to_writer_pretty(file, self, PrettyConfig::default()).is_ok() {\n\n return true;\n\n }\n\n }\n\n false\n\n }\n\n}\n\n\n\n/// Tries to load import settings for a resource. It is not part of ImportOptions trait because\n\n/// `async fn` is not yet supported for traits.\n\npub async fn try_get_import_settings<T>(resource_path: &Path) -> Option<T>\n\nwhere\n\n T: ImportOptions,\n\n{\n\n let settings_path = append_extension(resource_path, \"options\");\n\n\n", "file_path": "src/engine/resource_manager/options.rs", "rank": 56, "score": 183707.58079692227 }, { "content": "#[test]\n\nfn inspect_default() {\n\n #[derive(Debug, Default, Inspect)]\n\n pub struct Data {\n\n the_field: String,\n\n another_field: f32,\n\n }\n\n\n\n let data = Data::default();\n\n\n\n let expected = vec![\n\n PropertyInfo {\n\n owner_type_id: TypeId::of::<Data>(),\n\n name: \"the_field\",\n\n display_name: \"The Field\",\n\n value: &data.the_field,\n\n ..default_prop()\n\n },\n\n PropertyInfo {\n\n owner_type_id: TypeId::of::<Data>(),\n\n name: \"another_field\",\n\n display_name: \"Another Field\",\n\n value: &data.another_field,\n\n ..default_prop()\n\n },\n\n ];\n\n\n\n assert_eq!(data.properties(), expected);\n\n}\n\n\n", "file_path": "fyrox-core-derive/tests/it/inspect.rs", "rank": 57, "score": 183675.52874505208 }, { "content": "#[test]\n\nfn inspect_enum() {\n\n #[derive(Debug, Inspect)]\n\n pub struct NonCopy {\n\n inner: u32,\n\n }\n\n\n\n #[derive(Debug, Inspect)]\n\n pub enum Data {\n\n Named { x: u32, y: u32, z: NonCopy },\n\n Tuple(f32, f32),\n\n Unit,\n\n }\n\n\n\n let data = Data::Named {\n\n x: 0,\n\n y: 1,\n\n z: NonCopy { inner: 10 },\n\n };\n\n\n\n assert_eq!(\n", "file_path": "fyrox-core-derive/tests/it/inspect.rs", "rank": 58, "score": 183654.55169878746 }, { "content": "#[test]\n\nfn inspect_struct() {\n\n #[derive(Debug, Default, Inspect)]\n\n struct Tuple(f32, f32);\n\n\n\n let x = Tuple::default();\n\n assert_eq!(\n\n x.properties(),\n\n vec![\n\n PropertyInfo {\n\n owner_type_id: TypeId::of::<Tuple>(),\n\n name: \"0\",\n\n display_name: \"0\",\n\n value: &x.0,\n\n ..default_prop()\n\n },\n\n PropertyInfo {\n\n owner_type_id: TypeId::of::<Tuple>(),\n\n name: \"1\",\n\n display_name: \"1\",\n\n value: &x.1,\n", "file_path": "fyrox-core-derive/tests/it/inspect.rs", "rank": 59, "score": 183578.6087412699 }, { "content": "#[inline]\n\npub fn is_point_inside_triangle(p: &Vector3<f32>, vertices: &[Vector3<f32>; 3]) -> bool {\n\n let ba = vertices[1] - vertices[0];\n\n let ca = vertices[2] - vertices[0];\n\n let vp = *p - vertices[0];\n\n\n\n let ba_dot_ba = ba.dot(&ba);\n\n let ca_dot_ba = ca.dot(&ba);\n\n let ca_dot_ca = ca.dot(&ca);\n\n\n\n let dot02 = ca.dot(&vp);\n\n let dot12 = ba.dot(&vp);\n\n\n\n let inv_denom = 1.0 / (ca_dot_ca * ba_dot_ba - ca_dot_ba.powi(2));\n\n\n\n // Calculate barycentric coordinates\n\n let u = (ba_dot_ba * dot02 - ca_dot_ba * dot12) * inv_denom;\n\n let v = (ca_dot_ca * dot12 - ca_dot_ba * dot02) * inv_denom;\n\n\n\n (u >= 0.0) && (v >= 0.0) && (u + v < 1.0)\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 60, "score": 183235.011580481 }, { "content": "fn default_prop() -> PropertyInfo<'static> {\n\n PropertyInfo {\n\n owner_type_id: TypeId::of::<()>(),\n\n name: \"\",\n\n display_name: \"\",\n\n value: &(),\n\n read_only: false,\n\n min_value: None,\n\n max_value: None,\n\n step: None,\n\n precision: None,\n\n description: \"\".to_string(),\n\n is_modified: false,\n\n }\n\n}\n\n\n", "file_path": "fyrox-core-derive/tests/it/inspect.rs", "rank": 61, "score": 181073.74793693214 }, { "content": "fn arrange_dims(dims: &mut [GridDimension], final_size: f32) {\n\n let mut preset_width = 0.0;\n\n for dim in dims.iter() {\n\n if dim.size_mode == SizeMode::Auto || dim.size_mode == SizeMode::Strict {\n\n preset_width += dim.actual_size;\n\n }\n\n }\n\n\n\n let stretch_count = count_stretch_dims(dims);\n\n let avg_size = if stretch_count > 0 {\n\n (final_size - preset_width) / stretch_count as f32\n\n } else {\n\n 0.0\n\n };\n\n\n\n let mut location = 0.0;\n\n for dim in dims.iter_mut() {\n\n dim.location = location;\n\n location += match dim.size_mode {\n\n SizeMode::Strict | SizeMode::Auto => dim.actual_size,\n", "file_path": "fyrox-ui/src/grid.rs", "rank": 62, "score": 180921.4862426449 }, { "content": "pub fn check(err_code: c_int) -> Result<(), SoundError> {\n\n if err_code < 0 {\n\n Err(SoundError::FailedToInitializeDevice(err_code_to_string(\n\n err_code,\n\n )))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl AlsaSoundDevice {\n\n pub fn new<F: FnMut(&mut [(f32, f32)]) + Send + 'static>(\n\n buffer_len_bytes: u32,\n\n callback: F,\n\n ) -> Result<Self, SoundError> {\n\n unsafe {\n\n let name = CString::new(\"default\").unwrap();\n\n // 16-bit stereo is 4 bytes, so frame count is bufferHalfSize / 4\n\n let frame_count = buffer_len_bytes / 4;\n\n let mut playback_device = std::ptr::null_mut();\n", "file_path": "fyrox-sound/src/device/alsa.rs", "rank": 63, "score": 180414.61506380164 }, { "content": "#[inline]\n\n#[allow(clippy::useless_let_if_seq)]\n\npub fn classify_plane(normal: Vector3<f32>) -> PlaneClass {\n\n let mut longest = 0.0f32;\n\n let mut class = PlaneClass::XY;\n\n\n\n if normal.x.abs() > longest {\n\n longest = normal.x.abs();\n\n class = PlaneClass::YZ;\n\n }\n\n\n\n if normal.y.abs() > longest {\n\n longest = normal.y.abs();\n\n class = PlaneClass::XZ;\n\n }\n\n\n\n if normal.z.abs() > longest {\n\n class = PlaneClass::XY;\n\n }\n\n\n\n class\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 64, "score": 180124.71445008303 }, { "content": "/// A trait that allows you to \"look inside\" an object that implements it. It is used for lightweight\n\n/// runtime read-only reflection. The most common use case for it is various editors.\n\n///\n\n/// It is not advised to manually implement this trait. You should use `#[derive(Inspect)]` whenever\n\n/// possible.\n\n///\n\n/// ## `#[derive(Inspect)]`\n\n///\n\n/// The proc macro reduces amount of boilerplate code to the minimum and significantly reduces a\n\n/// change of error.\n\n///\n\n/// ### Supported attributes\n\n///\n\n/// - `#[inspect(name = \"new_field_name\")]` - override field name.\n\n/// - `#[inspect(display_name = \"Human-readable Name\")]` - override display name.\n\n/// - `#[inspect(group = \"Group Name\")]` - override group name.\n\n/// - `#[inspect(expand)]` - extends the list of properties in case of composition, in other words it\n\n/// \"flattens\" and exposes the properties of an inner object. Useful when you have a structure that\n\n/// has some fields that are complex objects that implements `Inspect` too.\n\npub trait Inspect {\n\n /// Returns information about \"public\" properties.\n\n fn properties(&self) -> Vec<PropertyInfo<'_>>;\n\n}\n\n\n\nimpl<T: Inspect> Inspect for Option<T> {\n\n fn properties(&self) -> Vec<PropertyInfo<'_>> {\n\n match self {\n\n Some(v) => v.properties(),\n\n None => vec![],\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Inspect> Inspect for Box<T> {\n\n fn properties(&self) -> Vec<PropertyInfo<'_>> {\n\n (**self).properties()\n\n }\n\n}\n\n\n", "file_path": "fyrox-core/src/inspect.rs", "rank": 65, "score": 177007.93701480556 }, { "content": "pub fn cascade_size(base_size: usize, cascade: usize) -> usize {\n\n match cascade {\n\n 0 => base_size,\n\n 1 => (base_size / 2).max(1),\n\n 2 => (base_size / 4).max(1),\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "src/renderer/shadow/mod.rs", "rank": 66, "score": 175553.20216718857 }, { "content": "pub fn set_mesh_diffuse_color(mesh: &mut Mesh, color: Color) {\n\n for surface in mesh.surfaces() {\n\n surface\n\n .material()\n\n .lock()\n\n .set_property(\n\n &ImmutableString::new(\"diffuseColor\"),\n\n PropertyValue::Color(color),\n\n )\n\n .unwrap();\n\n }\n\n}\n\n\n", "file_path": "editor/src/lib.rs", "rank": 67, "score": 175273.34533649788 }, { "content": "pub fn make_button(ctx: &mut BuildContext, arrow: ArrowDirection, row: usize) -> Handle<UiNode> {\n\n ButtonBuilder::new(\n\n WidgetBuilder::new()\n\n .with_margin(Thickness::right(1.0))\n\n .on_row(row),\n\n )\n\n .with_back(\n\n DecoratorBuilder::new(BorderBuilder::new(\n\n WidgetBuilder::new().with_foreground(Brush::Solid(Color::opaque(90, 90, 90))),\n\n ))\n\n .with_normal_brush(Brush::Solid(Color::opaque(60, 60, 60)))\n\n .with_hover_brush(Brush::Solid(Color::opaque(80, 80, 80)))\n\n .with_pressed_brush(Brush::Solid(Color::opaque(80, 118, 178)))\n\n .build(ctx),\n\n )\n\n .with_content(make_arrow(ctx, arrow, 6.0))\n\n .build(ctx)\n\n}\n\n\n\nimpl<T: NumericType> NumericUpDownBuilder<T> {\n", "file_path": "fyrox-ui/src/numeric.rs", "rank": 68, "score": 174917.34905506263 }, { "content": "#[inline]\n\npub fn get_closest_point<P: PositionProvider>(points: &[P], point: Vector3<f32>) -> Option<usize> {\n\n let mut closest_sqr_distance = f32::MAX;\n\n let mut closest_index = None;\n\n for (i, vertex) in points.iter().enumerate() {\n\n let sqr_distance = (vertex.position() - point).norm_squared();\n\n if sqr_distance < closest_sqr_distance {\n\n closest_sqr_distance = sqr_distance;\n\n closest_index = Some(i);\n\n }\n\n }\n\n closest_index\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 69, "score": 174876.42682181182 }, { "content": "fn calculate_decay(len: usize, sample_rate: u32, decay_time: Duration) -> f32 {\n\n let time_len = len as f32 / sample_rate as f32;\n\n // Asymptotically goes to 1.0 by exponential law\n\n DB60.powf(time_len / decay_time.as_secs_f32())\n\n}\n\n\n\nimpl ChannelReverb {\n\n /// Filter lengths given in samples\n\n const COMB_LENGTHS: [usize; 8] = [1557, 1617, 1491, 1422, 1277, 1356, 1188, 1116];\n\n const ALLPASS_LENGTHS: [usize; 4] = [225, 556, 441, 341];\n\n\n\n fn new(stereo_spread: u32, fc: f32, feedback: f32) -> Self {\n\n Self {\n\n fc,\n\n stereo_spread,\n\n sample_rate: DESIGN_SAMPLE_RATE,\n\n lp_fb_comb_filters: Self::COMB_LENGTHS\n\n .iter()\n\n .map(|len| LpfComb::new(*len + stereo_spread as usize, fc, feedback))\n\n .collect(),\n", "file_path": "fyrox-sound/src/effects/reverb.rs", "rank": 70, "score": 174433.3389550181 }, { "content": "pub fn enum_prop_key_name(nth: usize, field: &args::FieldArgs, v: &args::VariantArgs) -> String {\n\n field.name.clone().unwrap_or_else(|| {\n\n let field_ident = match &field.ident {\n\n Some(ident) => quote!(#ident),\n\n None => {\n\n let nth_field = Index::from(nth);\n\n quote!(#nth_field)\n\n }\n\n };\n\n\n\n format!(\"{}.{}\", v.ident, field_ident)\n\n })\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/inspect/utils/prop_keys.rs", "rank": 71, "score": 173301.9339828051 }, { "content": "pub fn send_sync_message(ui: &UserInterface, mut msg: UiMessage) {\n\n msg.flags = MSG_SYNC_FLAG;\n\n ui.send_message(msg);\n\n}\n\n\n", "file_path": "editor/src/lib.rs", "rank": 72, "score": 172027.9563960628 }, { "content": "fn create_ui(ctx: &mut BuildContext, screen_size: Vector2<f32>) -> Interface {\n\n let debug_text;\n\n let progress_bar;\n\n let progress_text;\n\n let root = GridBuilder::new(\n\n WidgetBuilder::new()\n\n .with_width(screen_size.x)\n\n .with_height(screen_size.y)\n\n .with_child({\n\n debug_text =\n\n TextBuilder::new(WidgetBuilder::new().on_row(0).on_column(0)).build(ctx);\n\n debug_text\n\n })\n\n .with_child({\n\n progress_bar =\n\n ProgressBarBuilder::new(WidgetBuilder::new().on_row(1).on_column(1)).build(ctx);\n\n progress_bar\n\n })\n\n .with_child({\n\n progress_text = TextBuilder::new(\n", "file_path": "examples/async.rs", "rank": 73, "score": 171668.41956772836 }, { "content": "fn create_ui(ctx: &mut BuildContext, screen_size: Vector2<f32>) -> Interface {\n\n let debug_text;\n\n let progress_bar;\n\n let progress_text;\n\n let cancel;\n\n let progress_grid;\n\n let root = GridBuilder::new(\n\n WidgetBuilder::new()\n\n .with_width(screen_size.x)\n\n .with_height(screen_size.y)\n\n .with_child({\n\n debug_text =\n\n TextBuilder::new(WidgetBuilder::new().on_row(0).on_column(0)).build(ctx);\n\n debug_text\n\n })\n\n .with_child({\n\n progress_grid = GridBuilder::new(\n\n WidgetBuilder::new()\n\n .with_visibility(!Path::new(LIGHTMAP_SCENE_PATH).exists())\n\n .on_column(1)\n", "file_path": "examples/lightmap.rs", "rank": 74, "score": 171668.41956772836 }, { "content": "/// `impl Inspect`\n\nfn inspect_trait_impl<'f>(\n\n ty_args: &args::TypeArgs,\n\n field_args: impl Iterator<Item = &'f args::FieldArgs>,\n\n impl_body: TokenStream2,\n\n) -> TokenStream2 {\n\n let ty_ident = &ty_args.ident;\n\n let generics = self::impl_inspect_generics(&ty_args.generics, field_args);\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n quote! {\n\n impl #impl_generics Inspect for #ty_ident #ty_generics #where_clause {\n\n fn properties(&self) -> Vec<PropertyInfo<'_>> {\n\n #impl_body\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/inspect/utils.rs", "rank": 75, "score": 170941.83122578816 }, { "content": "/// Creates `Generic` for `impl Inspect` block\n\n///\n\n/// TODO: Add `where Field: Inspect` boundaries to support inspectable types with generics\n\nfn impl_inspect_generics<'a>(\n\n generics: &Generics,\n\n _field_args: impl Iterator<Item = &'a args::FieldArgs>,\n\n) -> Generics {\n\n generics.clone()\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/inspect/utils.rs", "rank": 76, "score": 170941.69954393286 }, { "content": "/// https://en.wikipedia.org/wiki/Smoothstep\n\nfn smoothstep(edge0: f32, edge1: f32, x: f32) -> f32 {\n\n let k = ((x - edge0) / (edge1 - edge0)).max(0.0).min(1.0);\n\n k * k * (3.0 - 2.0 * k)\n\n}\n\n\n", "file_path": "src/utils/lightmap.rs", "rank": 77, "score": 170756.00760839568 }, { "content": "pub fn struct_prop_ident(ty_args: &args::TypeArgs, nth: usize, field: &args::FieldArgs) -> Ident {\n\n let fields = match &ty_args.data {\n\n ast::Data::Struct(xs) => xs,\n\n _ => unreachable!(),\n\n };\n\n let field_ident = self::field_ident(fields, nth, field);\n\n\n\n let ident = field_ident.to_case(Case::UpperSnake);\n\n syn::parse_str(&ident).unwrap()\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/inspect/utils/prop_keys.rs", "rank": 78, "score": 170057.6079615977 }, { "content": "/// `pub const [VARIANT_]FIELD: &'static str = \"key\";`\n\npub fn prop_keys_impl(ty_args: &args::TypeArgs) -> TokenStream2 {\n\n let ty_ident = &ty_args.ident;\n\n let (impl_generics, ty_generics, where_clause) = ty_args.generics.split_for_impl();\n\n\n\n let prop_keys = self::quote_prop_keys(ty_args);\n\n\n\n quote! {\n\n /// Property key constants\n\n impl #impl_generics #ty_ident #ty_generics #where_clause {\n\n #prop_keys\n\n }\n\n }\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/inspect/utils/prop_keys.rs", "rank": 79, "score": 169362.216572196 }, { "content": "pub fn make_status_enum_editor_definition() -> EnumPropertyEditorDefinition<Status> {\n\n EnumPropertyEditorDefinition {\n\n variant_generator: |i| match i {\n\n 0 => Status::Stopped,\n\n 1 => Status::Playing,\n\n 2 => Status::Paused,\n\n _ => unreachable!(),\n\n },\n\n index_generator: |v| *v as usize,\n\n names_generator: || {\n\n vec![\n\n \"Stopped\".to_string(),\n\n \"Playing\".to_string(),\n\n \"Paused\".to_string(),\n\n ]\n\n },\n\n }\n\n}\n\n\n", "file_path": "editor/src/inspector/editors/mod.rs", "rank": 80, "score": 169293.58221961837 }, { "content": "pub fn make_save_file_selector(ctx: &mut BuildContext) -> Handle<UiNode> {\n\n FileSelectorBuilder::new(\n\n WindowBuilder::new(WidgetBuilder::new().with_width(300.0).with_height(400.0))\n\n .with_title(WindowTitle::Text(\"Save Scene As\".into()))\n\n .open(false),\n\n )\n\n .with_mode(FileBrowserMode::Save {\n\n default_file_name: PathBuf::from(\"unnamed.rgs\"),\n\n })\n\n .with_path(\"./\")\n\n .with_filter(make_scene_file_filter())\n\n .build(ctx)\n\n}\n\n\n\npub enum Mode {\n\n Edit,\n\n Play {\n\n // Play mode scene.\n\n scene: Handle<Scene>,\n\n // List of scenes that existed before entering play mode.\n", "file_path": "editor/src/lib.rs", "rank": 81, "score": 168969.78040602052 }, { "content": "fn stepf(p0: f32, p1: f32, t: f32) -> f32 {\n\n if t.eq(&1.0) {\n\n p1\n\n } else {\n\n p0\n\n }\n\n}\n\n\n\n#[derive(Visit, Clone, Debug, PartialEq)]\n\npub enum CurveKeyKind {\n\n Constant,\n\n Linear,\n\n Cubic {\n\n /// A `tan(angle)` of left tangent.\n\n left_tangent: f32,\n\n /// A `tan(angle)` of right tangent.\n\n right_tangent: f32,\n\n },\n\n}\n\n\n", "file_path": "fyrox-core/src/curve.rs", "rank": 82, "score": 168505.10573211557 }, { "content": "#[inline]\n\npub fn quat_from_euler<T: SimdRealField + RealField + Copy + Clone>(\n\n euler_radians: Vector3<T>,\n\n order: RotationOrder,\n\n) -> UnitQuaternion<T> {\n\n let qx = UnitQuaternion::from_axis_angle(&Vector3::x_axis(), euler_radians.x);\n\n let qy = UnitQuaternion::from_axis_angle(&Vector3::y_axis(), euler_radians.y);\n\n let qz = UnitQuaternion::from_axis_angle(&Vector3::z_axis(), euler_radians.z);\n\n match order {\n\n RotationOrder::XYZ => qz * qy * qx,\n\n RotationOrder::XZY => qy * qz * qx,\n\n RotationOrder::YZX => qx * qz * qy,\n\n RotationOrder::YXZ => qz * qx * qy,\n\n RotationOrder::ZXY => qy * qx * qz,\n\n RotationOrder::ZYX => qx * qy * qz,\n\n }\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 83, "score": 166354.93018932937 }, { "content": "fn create_float_view(ctx: &mut BuildContext, value: f32) -> Handle<UiNode> {\n\n NumericUpDownBuilder::new(WidgetBuilder::new().with_height(24.0))\n\n .with_value(value)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/material.rs", "rank": 84, "score": 165359.94638727698 }, { "content": "/// Calculates distance attenuation for a point using given distance to the point and\n\n/// radius of a light.\n\nfn distance_attenuation(distance: f32, sqr_radius: f32) -> f32 {\n\n let attenuation = (1.0 - distance * distance / sqr_radius).max(0.0).min(1.0);\n\n attenuation * attenuation\n\n}\n\n\n", "file_path": "src/utils/lightmap.rs", "rank": 85, "score": 165269.06429607308 }, { "content": "fn round_to_step(x: f32, step: f32) -> f32 {\n\n x - x % step\n\n}\n\n\n\nimpl CurveEditor {\n\n fn update_matrices(&self) {\n\n let vp = Vector2::new(self.view_position.x, -self.view_position.y);\n\n self.view_matrix.set(\n\n Matrix3::new_nonuniform_scaling_wrt_point(\n\n &Vector2::new(self.zoom, self.zoom),\n\n &Point2::from(self.actual_size().scale(0.5)),\n\n ) * Matrix3::new_translation(&vp),\n\n );\n\n\n\n let screen_bounds = self.screen_bounds();\n\n self.screen_matrix.set(\n\n Matrix3::new_translation(&screen_bounds.position)\n\n // Flip Y because in math origin is in lower left corner.\n\n * Matrix3::new_translation(&Vector2::new(0.0, screen_bounds.h()))\n\n * Matrix3::new_nonuniform_scaling(&Vector2::new(1.0, -1.0))\n", "file_path": "fyrox-ui/src/curve/mod.rs", "rank": 86, "score": 165263.3817046283 }, { "content": "pub fn make_arrow_primitives(orientation: ArrowDirection, size: f32) -> Vec<Primitive> {\n\n vec![match orientation {\n\n ArrowDirection::Top => Primitive::Triangle {\n\n points: [\n\n Vector2::new(size * 0.5, 0.0),\n\n Vector2::new(size, size),\n\n Vector2::new(0.0, size),\n\n ],\n\n },\n\n ArrowDirection::Bottom => Primitive::Triangle {\n\n points: [\n\n Vector2::new(0.0, 0.0),\n\n Vector2::new(size, 0.0),\n\n Vector2::new(size * 0.5, size),\n\n ],\n\n },\n\n ArrowDirection::Right => Primitive::Triangle {\n\n points: [\n\n Vector2::new(0.0, 0.0),\n\n Vector2::new(size, size * 0.5),\n", "file_path": "fyrox-ui/src/utils.rs", "rank": 87, "score": 164654.38030701096 }, { "content": "/// A samples generator.\n\n///\n\n/// # Notes\n\n///\n\n/// Iterator implementation (the `next()` method) must produce samples in interleaved format, this\n\n/// means that samples emitted by the method should be in `LRLRLR..` order, where `L` and `R` are\n\n/// samples from left and right channels respectively. The sound engine supports both mono and\n\n/// stereo sample sources.\n\npub trait RawStreamingDataSource: Iterator<Item = f32> + Send + Sync + Debug {\n\n /// Should return sample rate of the source.\n\n fn sample_rate(&self) -> usize;\n\n\n\n /// Should return total channel count.\n\n fn channel_count(&self) -> usize;\n\n\n\n /// Tells whether the provider should restart.\n\n fn rewind(&mut self) -> Result<(), SoundError> {\n\n Ok(())\n\n }\n\n\n\n /// Allows you to start playback from given duration.\n\n fn time_seek(&mut self, _duration: Duration) {}\n\n\n\n /// Returns total duration of data. Can be `None` if internal decoder does not supports seeking.\n\n fn duration(&self) -> Option<Duration> {\n\n None\n\n }\n\n}\n", "file_path": "fyrox-sound/src/buffer/mod.rs", "rank": 88, "score": 164545.13312724064 }, { "content": "#[test]\n\nfn complex_enum() {\n\n let mut data = ComplexEnum::Tuple(100, 200);\n\n let mut data_default = ComplexEnum::UnitVariant;\n\n\n\n super::save_load(\"complex_enum\", &mut data, &mut data_default);\n\n\n\n assert_eq!(data, data_default);\n\n}\n\n\n", "file_path": "fyrox-core-derive/tests/it/visit/basic.rs", "rank": 89, "score": 164284.18039235426 }, { "content": "#[test]\n\nfn generic_enum() {\n\n #[derive(Debug)]\n\n struct NotVisit;\n\n\n\n #[allow(warnings)]\n\n let mut not_compile = GenericEnum::Tuple(1, vec![NotVisit]);\n\n\n\n // Compile error because `Generics<NotVisit> is not `Visit`:\n\n // let mut visitor = Visitor::new();\n\n // not_compile.visit(\"Data\", &mut visitor).unwrap();\n\n\n\n let mut data = GenericEnum::Tuple(1, vec![100u32]);\n\n let mut data_default = GenericEnum::UnitVariant;\n\n\n\n super::save_load(\"generic_enum\", &mut data, &mut data_default);\n\n\n\n assert_eq!(data, data_default);\n\n}\n", "file_path": "fyrox-core-derive/tests/it/visit/basic.rs", "rank": 90, "score": 164284.18039235426 }, { "content": "#[test]\n\nfn plain_enum() {\n\n let mut data = PlainEnum::C;\n\n let mut data_default = PlainEnum::A;\n\n\n\n super::save_load(\"plain_enum\", &mut data, &mut data_default);\n\n\n\n assert_eq!(data, data_default);\n\n}\n\n\n", "file_path": "fyrox-core-derive/tests/it/visit/basic.rs", "rank": 91, "score": 164284.18039235426 }, { "content": "#[test]\n\nfn unit_struct() {\n\n let mut data = UnitStruct;\n\n let mut data_default = UnitStruct;\n\n\n\n // non seuse.. but anyways,\n\n // `Visit` is implemented `UnitStruct;` as empty code block `{}`\n\n super::save_load(\"unit_struct\", &mut data, &mut data_default);\n\n\n\n assert_eq!(data, data_default);\n\n}\n\n\n", "file_path": "fyrox-core-derive/tests/it/visit/basic.rs", "rank": 92, "score": 164210.9819747652 }, { "content": "#[test]\n\nfn tuple_struct() {\n\n let mut data = TupleStruct(10.0, 20);\n\n let mut data_default = TupleStruct(0.0, 0);\n\n\n\n super::save_load(\"tuple_struct\", &mut data, &mut data_default);\n\n\n\n assert_eq!(data, data_default);\n\n}\n\n\n", "file_path": "fyrox-core-derive/tests/it/visit/basic.rs", "rank": 93, "score": 164210.9819747652 }, { "content": "fn heuristic(a: Vector3<f32>, b: Vector3<f32>) -> f32 {\n\n (a - b).norm_squared()\n\n}\n\n\n\nimpl Default for PathFinder {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl PositionProvider for PathVertex {\n\n fn position(&self) -> Vector3<f32> {\n\n self.position\n\n }\n\n}\n\n\n\n/// Path search can be interrupted by errors, this enum stores all possible\n\n/// kinds of errors.\n\n#[derive(Clone, Debug, thiserror::Error)]\n\npub enum PathError {\n", "file_path": "src/utils/astar.rs", "rank": 94, "score": 163964.4815468832 }, { "content": "fn read_string<R>(file: &mut R) -> Result<FbxAttribute, FbxError>\n\nwhere\n\n R: Read,\n\n{\n\n let length = file.read_u32::<LittleEndian>()? as usize;\n\n let mut raw_string = vec![Default::default(); length];\n\n file.read_exact(raw_string.as_mut_slice())?;\n\n // Find null terminator. It is required because for some reason some strings\n\n // have additional data after null terminator like this: Omni004\\x0\\x1Model, but\n\n // length still more than position of null terminator.\n\n if let Some(null_terminator_pos) = raw_string.iter().position(|c| *c == 0) {\n\n raw_string.truncate(null_terminator_pos);\n\n }\n\n let string = String::from_utf8(raw_string)?;\n\n Ok(FbxAttribute::String(string))\n\n}\n\n\n\nconst VERSION_7500: i32 = 7500;\n\nconst VERSION_7500_NULLREC_SIZE: usize = 25; // in bytes\n\nconst NORMAL_NULLREC_SIZE: usize = 13; // in bytes\n\n\n", "file_path": "src/resource/fbx/document/binary.rs", "rank": 95, "score": 162730.3168023643 }, { "content": "fn distance_scale_factor(fov: f32) -> f32 {\n\n fov.tan() * 0.1\n\n}\n\n\n\n/// Helper enum to be able to access interaction modes in array directly.\n\n#[derive(Copy, Clone, PartialOrd, PartialEq, Hash, Debug)]\n\n#[repr(usize)]\n\npub enum InteractionModeKind {\n\n Select = 0,\n\n Move = 1,\n\n Scale = 2,\n\n Rotate = 3,\n\n Navmesh = 4,\n\n Terrain = 5,\n\n}\n", "file_path": "editor/src/interaction/mod.rs", "rank": 96, "score": 161548.8063743718 }, { "content": "fn create_impl_generics(\n\n generics: &Generics,\n\n field_args: impl Iterator<Item = args::FieldArgs>,\n\n) -> Generics {\n\n let mut generics = generics.clone();\n\n\n\n // Add where clause for every visited field\n\n generics.make_where_clause().predicates.extend(\n\n field_args\n\n .filter(|f| !f.skip)\n\n .map(|f| f.ty)\n\n .map::<WherePredicate, _>(|ty| parse_quote! { #ty: Visit }),\n\n );\n\n\n\n generics\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/visit/utils.rs", "rank": 97, "score": 159794.54306873365 }, { "content": "pub fn make_property_margin(layer_index: usize) -> Thickness {\n\n let mut margin = HEADER_MARGIN;\n\n margin.left += 10.0 + layer_index as f32 * 10.0;\n\n margin\n\n}\n\n\n", "file_path": "fyrox-ui/src/inspector/mod.rs", "rank": 98, "score": 159132.58594986683 }, { "content": "fn create_vec2_view(ctx: &mut BuildContext, value: Vector2<f32>) -> Handle<UiNode> {\n\n Vec2EditorBuilder::new(WidgetBuilder::new().with_height(24.0))\n\n .with_value(value)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/material.rs", "rank": 99, "score": 158687.28931239547 } ]
Rust
vm-bindings/build_support/builder.rs
feenkcom/gtoolkit-vm
0647fe53203d614d776cb5af4f228cddc4a5f29d
use file_matcher::{FileNamed, OneEntry, OneEntryCopier}; use std::fmt::Debug; use std::path::{Path, PathBuf}; use std::{env, fmt, fs}; const VM_CLIENT_VMMAKER_VM_VAR: &str = "VM_CLIENT_VMMAKER"; const VM_CLIENT_VMMAKER_IMAGE_VAR: &str = "VM_CLIENT_VMMAKER_IMAGE"; pub trait Builder: Debug { fn is_compiled(&self) -> bool { self.vm_binary().exists() } fn profile(&self) -> String { std::env::var("PROFILE").unwrap() } fn is_debug(&self) -> bool { self.profile() == "debug" } fn ensure_build_tools(&self) {} fn vmmaker_vm(&self) -> Option<PathBuf> { std::env::var(VM_CLIENT_VMMAKER_VM_VAR).map_or(None, |path| { let path = Path::new(&path); if path.exists() { Some(path.to_path_buf()) } else { panic!( "Specified {} does not exist: {}", VM_CLIENT_VMMAKER_VM_VAR, path.display() ); } }) } fn vmmaker_image(&self) -> Option<PathBuf> { std::env::var(VM_CLIENT_VMMAKER_IMAGE_VAR).map_or(None, |path| { let path = Path::new(&path); if path.exists() { Some(path.to_path_buf()) } else { panic!( "Specified {} does not exist: {}", VM_CLIENT_VMMAKER_IMAGE_VAR, path.display() ); } }) } fn output_directory(&self) -> PathBuf { Path::new(env::var("OUT_DIR").unwrap().as_str()).to_path_buf() } fn vm_binary(&self) -> PathBuf; fn vm_sources_directory(&self) -> PathBuf { std::env::current_dir() .unwrap() .parent() .unwrap() .to_path_buf() .join("opensmalltalk-vm") } fn compiled_libraries_directory(&self) -> PathBuf; fn exported_libraries_directory(&self) -> PathBuf { let target = std::env::var("CARGO_TARGET"); let mut path = PathBuf::new() .join("..") .join(std::env::var("CARGO_TARGET_DIR").unwrap_or("target".to_string())); if let Ok(target) = target { path = path.join(target); } path.join(self.profile()).join("shared_libraries") } fn compile_sources(&self); fn squeak_include_directory(&self) -> PathBuf { self.vm_sources_directory() .join("extracted") .join("vm") .join("include") } fn common_include_directory(&self) -> PathBuf { self.squeak_include_directory().join("common") } fn platform_include_directory(&self) -> PathBuf; fn generated_config_directory(&self) -> PathBuf { self.output_directory() .join("build") .join("build") .join("include") .join("pharovm") } fn generated_include_directory(&self) -> PathBuf { self.output_directory() .join("build") .join("generated") .join("64") .join("vm") .join("include") } fn generate_bindings(&self) { let include_dir = self.vm_sources_directory().join("include"); let generated_vm_include_dir = self.generated_include_directory(); assert!( generated_vm_include_dir.exists(), "Generated vm include directory must exist: {:?}", generated_vm_include_dir.display() ); let generated_config_directory = self.generated_config_directory(); assert!( generated_config_directory.exists(), "Generated config.h directory must exist: {:?}", generated_config_directory.display() ); let bindings = bindgen::Builder::default() .whitelist_function("vm_.*") .whitelist_function("free") .header( include_dir .join("pharovm") .join("pharoClient.h") .display() .to_string(), ) .clang_arg(format!("-I{}", &include_dir.display())) .clang_arg(format!("-I{}", &include_dir.join("pharovm").display())) .clang_arg(format!("-I{}", generated_config_directory.display())) .clang_arg(format!("-I{}", generated_vm_include_dir.display())) .clang_arg(format!("-I{}", self.common_include_directory().display())) .clang_arg(format!("-I{}", self.platform_include_directory().display())) .clang_arg("-DLSB_FIRST=1") .parse_callbacks(Box::new(bindgen::CargoCallbacks)) .generate() .expect("Unable to generate bindings"); bindings .write_to_file(self.output_directory().join("bindings.rs")) .expect("Couldn't write bindings!"); } fn link_libraries(&self); fn export_shared_libraries(&self) { if !self.exported_libraries_directory().exists() { fs::create_dir_all(self.exported_libraries_directory()).unwrap(); } for shared_library in self.shared_libraries_to_export() { let target = self.exported_libraries_directory(); match shared_library.copy(&target) { Ok(_) => {} Err(error) => { panic!( "Could not copy {:?} to {} due to {}", &shared_library, &target.display(), error ) } } } } fn shared_libraries_to_export(&self) -> Vec<OneEntry>; fn print_directories(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_map() .entry(&"is_compiled".to_string(), &self.is_compiled()) .entry( &"output_directory".to_string(), &self.output_directory().display(), ) .entry(&"vm_binary".to_string(), &self.vm_binary().display()) .entry( &"vm_sources_directory".to_string(), &self.vm_sources_directory().display(), ) .entry( &"compiled_libraries_directory".to_string(), &self.compiled_libraries_directory().display(), ) .entry( &"exported_libraries_directory".to_string(), &self.exported_libraries_directory().display(), ) .finish() } fn boxed(self) -> Box<dyn Builder>; fn filenames_from_libdir(&self, filenames: Vec<&str>, libdir: PathBuf) -> Vec<OneEntry> { filenames .into_iter() .map(FileNamed::exact) .map(|each| each.within(&libdir)) .collect() } }
use file_matcher::{FileNamed, OneEntry, OneEntryCopier}; use std::fmt::Debug; use std::path::{Path, PathBuf}; use std::{env, fmt, fs}; const VM_CLIENT_VMMAKER_VM_VAR: &str = "VM_CLIENT_VMMAKER"; const VM_CLIENT_VMMAKER_IMAGE_VAR: &str = "VM_CLIENT_VMMAKER_IMAGE"; pub trait Builder: Debug { fn is_compiled(&self) -> bool { self.vm_binary().exists() } fn profile(&self) -> String { std::env::var("PROFILE").un
(), &self.is_compiled()) .entry( &"output_directory".to_string(), &self.output_directory().display(), ) .entry(&"vm_binary".to_string(), &self.vm_binary().display()) .entry( &"vm_sources_directory".to_string(), &self.vm_sources_directory().display(), ) .entry( &"compiled_libraries_directory".to_string(), &self.compiled_libraries_directory().display(), ) .entry( &"exported_libraries_directory".to_string(), &self.exported_libraries_directory().display(), ) .finish() } fn boxed(self) -> Box<dyn Builder>; fn filenames_from_libdir(&self, filenames: Vec<&str>, libdir: PathBuf) -> Vec<OneEntry> { filenames .into_iter() .map(FileNamed::exact) .map(|each| each.within(&libdir)) .collect() } }
wrap() } fn is_debug(&self) -> bool { self.profile() == "debug" } fn ensure_build_tools(&self) {} fn vmmaker_vm(&self) -> Option<PathBuf> { std::env::var(VM_CLIENT_VMMAKER_VM_VAR).map_or(None, |path| { let path = Path::new(&path); if path.exists() { Some(path.to_path_buf()) } else { panic!( "Specified {} does not exist: {}", VM_CLIENT_VMMAKER_VM_VAR, path.display() ); } }) } fn vmmaker_image(&self) -> Option<PathBuf> { std::env::var(VM_CLIENT_VMMAKER_IMAGE_VAR).map_or(None, |path| { let path = Path::new(&path); if path.exists() { Some(path.to_path_buf()) } else { panic!( "Specified {} does not exist: {}", VM_CLIENT_VMMAKER_IMAGE_VAR, path.display() ); } }) } fn output_directory(&self) -> PathBuf { Path::new(env::var("OUT_DIR").unwrap().as_str()).to_path_buf() } fn vm_binary(&self) -> PathBuf; fn vm_sources_directory(&self) -> PathBuf { std::env::current_dir() .unwrap() .parent() .unwrap() .to_path_buf() .join("opensmalltalk-vm") } fn compiled_libraries_directory(&self) -> PathBuf; fn exported_libraries_directory(&self) -> PathBuf { let target = std::env::var("CARGO_TARGET"); let mut path = PathBuf::new() .join("..") .join(std::env::var("CARGO_TARGET_DIR").unwrap_or("target".to_string())); if let Ok(target) = target { path = path.join(target); } path.join(self.profile()).join("shared_libraries") } fn compile_sources(&self); fn squeak_include_directory(&self) -> PathBuf { self.vm_sources_directory() .join("extracted") .join("vm") .join("include") } fn common_include_directory(&self) -> PathBuf { self.squeak_include_directory().join("common") } fn platform_include_directory(&self) -> PathBuf; fn generated_config_directory(&self) -> PathBuf { self.output_directory() .join("build") .join("build") .join("include") .join("pharovm") } fn generated_include_directory(&self) -> PathBuf { self.output_directory() .join("build") .join("generated") .join("64") .join("vm") .join("include") } fn generate_bindings(&self) { let include_dir = self.vm_sources_directory().join("include"); let generated_vm_include_dir = self.generated_include_directory(); assert!( generated_vm_include_dir.exists(), "Generated vm include directory must exist: {:?}", generated_vm_include_dir.display() ); let generated_config_directory = self.generated_config_directory(); assert!( generated_config_directory.exists(), "Generated config.h directory must exist: {:?}", generated_config_directory.display() ); let bindings = bindgen::Builder::default() .whitelist_function("vm_.*") .whitelist_function("free") .header( include_dir .join("pharovm") .join("pharoClient.h") .display() .to_string(), ) .clang_arg(format!("-I{}", &include_dir.display())) .clang_arg(format!("-I{}", &include_dir.join("pharovm").display())) .clang_arg(format!("-I{}", generated_config_directory.display())) .clang_arg(format!("-I{}", generated_vm_include_dir.display())) .clang_arg(format!("-I{}", self.common_include_directory().display())) .clang_arg(format!("-I{}", self.platform_include_directory().display())) .clang_arg("-DLSB_FIRST=1") .parse_callbacks(Box::new(bindgen::CargoCallbacks)) .generate() .expect("Unable to generate bindings"); bindings .write_to_file(self.output_directory().join("bindings.rs")) .expect("Couldn't write bindings!"); } fn link_libraries(&self); fn export_shared_libraries(&self) { if !self.exported_libraries_directory().exists() { fs::create_dir_all(self.exported_libraries_directory()).unwrap(); } for shared_library in self.shared_libraries_to_export() { let target = self.exported_libraries_directory(); match shared_library.copy(&target) { Ok(_) => {} Err(error) => { panic!( "Could not copy {:?} to {} due to {}", &shared_library, &target.display(), error ) } } } } fn shared_libraries_to_export(&self) -> Vec<OneEntry>; fn print_directories(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_map() .entry(&"is_compiled".to_string
random
[ { "content": "#[no_mangle]\n\npub fn gtoolkit_vm_is_on_worker_thread(gt_vm_ptr: *const Mutex<Option<GToolkitVM>>) -> bool {\n\n gt_vm_ptr.with(|| false, |gt_vm| gt_vm.is_on_worker_thread())\n\n}\n\n\n", "file_path": "experimental/src/lib.rs", "rank": 1, "score": 118262.39327501913 }, { "content": "#[allow(dead_code)]\n\npub fn validate_user_image_file(image_name: Option<&str>) -> Option<PathBuf> {\n\n if let Some(image_file_name) = image_name {\n\n let image_path = PathBuf::new().join(image_file_name);\n\n if image_path.exists() {\n\n return Some(image_path);\n\n }\n\n }\n\n None\n\n}\n", "file_path": "vm-client/src/image_finder.rs", "rank": 2, "score": 103549.68257431456 }, { "content": "#[no_mangle]\n\npub fn gtoolkit_vm_wake_up(gt_vm_ptr: *const Mutex<Option<GToolkitVM>>) {\n\n gt_vm_ptr.with_not_null(|gt_vm| gt_vm.wake_up());\n\n}\n\n\n", "file_path": "experimental/src/lib.rs", "rank": 3, "score": 92200.60872267836 }, { "content": "#[no_mangle]\n\npub fn gtoolkit_vm_minor_version(gt_vm_ptr: *const Mutex<Option<GToolkitVM>>) -> usize {\n\n gt_vm_ptr.with(|| 0, |gt_vm| gt_vm.minor_version())\n\n}\n\n\n", "file_path": "experimental/src/lib.rs", "rank": 4, "score": 86941.03756800925 }, { "content": "#[no_mangle]\n\npub fn gtoolkit_vm_major_version(gt_vm_ptr: *const Mutex<Option<GToolkitVM>>) -> usize {\n\n gt_vm_ptr.with(|| 0, |gt_vm| gt_vm.major_version())\n\n}\n\n\n", "file_path": "experimental/src/lib.rs", "rank": 5, "score": 86941.03756800925 }, { "content": "#[cfg(all(not(target_os = \"macos\"),))]\n\npub fn executable_working_directory() -> Result<PathBuf> {\n\n // working_directory/bin/executable\n\n let mut app_dir = std::env::current_exe()?;\n\n\n\n // working_directory/bin/\n\n app_dir = app_dir\n\n .parent()\n\n .ok_or_else(|| ApplicationError::NoParentDirectory(app_dir.clone()))?\n\n .to_path_buf();\n\n\n\n // working_directory/\n\n app_dir = app_dir\n\n .parent()\n\n .ok_or_else(|| ApplicationError::NoParentDirectory(app_dir.clone()))?\n\n .to_path_buf();\n\n\n\n Ok(app_dir)\n\n}\n", "file_path": "vm-client/src/working_directory.rs", "rank": 6, "score": 85496.70761932316 }, { "content": "/// Implements Hash for the native type so that the wrapper type\n\n/// can derive it from.\n\npub trait NativeHash {\n\n fn hash<H: Hasher>(&self, state: &mut H);\n\n}\n\n\n\n/// Wraps a native type that can be represented and used in Rust memory.\n\n///\n\n/// This type requires the trait `NativeDrop` to be implemented.\n\n#[repr(transparent)]\n\npub struct Handle<N: NativeDrop>(\n\n N,\n\n // `*const` is needed to suppress automatic Send and Sync derivation, which happens when the\n\n // underlying type generated by bindgen is Send and Sync.\n\n PhantomData<*const ()>,\n\n);\n\n\n\nimpl<N: NativeDrop> AsRef<Handle<N>> for Handle<N> {\n\n fn as_ref(&self) -> &Self {\n\n &self\n\n }\n\n}\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 7, "score": 85203.65425648226 }, { "content": "/// Tag the type to automatically implement get() and set() functions\n\n/// for all Index & IndexMut implementation for that type.\n\npub trait IndexSet {}\n\n\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 8, "score": 85203.65425648226 }, { "content": "/// Clone for bindings types we can not implement Clone for.\n\npub trait NativeClone {\n\n fn clone(&self) -> Self;\n\n}\n\n\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 9, "score": 85203.65425648226 }, { "content": "/// Implements Drop for native types we can not implement Drop for.\n\npub trait NativeDrop {\n\n fn drop(&mut self);\n\n}\n\n\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 10, "score": 85203.65425648226 }, { "content": "/// Tag the type to automatically implement get() functions for\n\n/// all Index implementations.\n\npub trait IndexGet {}\n\n\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 11, "score": 85203.65425648226 }, { "content": "pub fn app_main() {\n\n let (sender, receiver) = channel();\n\n\n\n\n\n\n\n unsafe {\n\n let interpreter: VirtualMachine = unsafe { *sqGetInterpreterProxy() };\n\n\n\n let mut gt_vm = GTVM.lock().unwrap();\n\n *gt_vm = Some(GToolkitVM::new(sender, transmute(&receiver), interpreter));\n\n };\n\n\n\n // create a vector of zero terminated strings\n\n let mut args = std::env::args()\n\n .map(|arg| CString::new(arg).unwrap())\n\n .collect::<Vec<CString>>();\n\n\n\n // convert the strings to raw pointers\n\n let mut c_args = args\n\n .iter()\n", "file_path": "experimental/src/lib.rs", "rank": 12, "score": 85071.00427791136 }, { "content": "/// Even though some types may have value semantics, equality\n\n/// comparison may need to be customized.\n\npub trait NativePartialEq {\n\n fn eq(&self, rhs: &Self) -> bool;\n\n}\n\n\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 13, "score": 83344.73602411397 }, { "content": "pub trait GToolkitVMPointer {\n\n fn with<DefaultBlock, Block, Return>(&self, default: DefaultBlock, block: Block) -> Return\n\n where\n\n DefaultBlock: FnOnce() -> Return,\n\n Block: FnOnce(&GToolkitVM) -> Return;\n\n\n\n fn with_not_null<Block>(&self, block: Block)\n\n where\n\n Block: FnOnce(&GToolkitVM);\n\n}\n\n\n\nimpl GToolkitVMPointer for *const Mutex<Option<GToolkitVM>> {\n\n fn with<DefaultBlock, Block, Return>(&self, default: DefaultBlock, block: Block) -> Return\n\n where\n\n DefaultBlock: FnOnce() -> Return,\n\n Block: FnOnce(&GToolkitVM) -> Return,\n\n {\n\n if self.is_null() {\n\n return default();\n\n }\n", "file_path": "experimental/src/vm.rs", "rank": 14, "score": 83344.73602411397 }, { "content": "/// Trait that enables access to a native representation of a wrapper type.\n\npub trait NativeAccess<N> {\n\n /// Provides shared access to the native type of the wrapper.\n\n fn native(&self) -> &N;\n\n\n\n /// Provides exclusive access to the native type of the wrapper.\n\n fn native_mut(&mut self) -> &mut N;\n\n\n\n // Returns a ptr to the native mutable value.\n\n unsafe fn native_mut_force(&self) -> *mut N {\n\n self.native() as *const N as *mut N\n\n }\n\n}\n\n\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 15, "score": 80155.88934508777 }, { "content": "/// Declares a base class for a native type.\n\npub trait NativeBase<Base> {\n\n fn base(&self) -> &Base {\n\n unsafe { &*(self as *const Self as *const Base) }\n\n }\n\n\n\n fn base_mut(&mut self) -> &mut Base {\n\n unsafe { &mut *(self as *mut Self as *mut Base) }\n\n }\n\n}\n\n\n\npub struct Sendable<H: ConditionallySend>(H);\n\nunsafe impl<H: ConditionallySend> Send for Sendable<H> {}\n\n\n\nimpl<H: ConditionallySend> Sendable<H> {\n\n pub fn unwrap(self) -> H {\n\n self.0\n\n }\n\n}\n\n\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 16, "score": 80152.55054440322 }, { "content": "pub trait ConditionallySend: Sized {\n\n /// Returns `true` if the handle can be sent to another thread.\n\n fn can_send(&self) -> bool;\n\n /// Wrap the handle in a type that can be sent to another thread and unwrapped there.\n\n ///\n\n /// Guaranteed to succeed of can_send() returns `true`.\n\n fn wrap_send(self) -> Result<Sendable<Self>, Self>;\n\n}\n\n\n\n/// Functions that are (supposedly) _safer_ variants of the ones Rust provides.\n\npub(crate) mod safer {\n\n use core::slice;\n\n use std::ptr;\n\n\n\n /// Invokes [slice::from_raw_parts] with the `ptr` only when `len` != 0, otherwise passes\n\n /// `ptr::NonNull::dangling()` as recommended.\n\n ///\n\n /// Panics if `len` != 0 and `ptr` is `null`.\n\n pub unsafe fn from_raw_parts<'a, T>(ptr: *const T, len: usize) -> &'a [T] {\n\n let ptr = if len == 0 {\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 17, "score": 80152.55054440322 }, { "content": "pub fn try_find_image_file_in_directory(path: PathBuf) -> Option<PathBuf> {\n\n let files = fs::read_dir(&path).unwrap();\n\n let image_files: Vec<PathBuf> = files\n\n .filter_map(Result::ok)\n\n .filter(|d| {\n\n if let Some(e) = d.path().extension() {\n\n e == \"image\"\n\n } else {\n\n false\n\n }\n\n })\n\n .map(|d| d.path().to_path_buf())\n\n .collect();\n\n\n\n match image_files.len() {\n\n 1 => Some(image_files[0].clone()),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "vm-client/src/image_finder.rs", "rank": 18, "score": 79673.88713165271 }, { "content": "#[no_mangle]\n\npub fn gtoolkit_vm_get_receiver_signaller(\n\n gt_vm_ptr: *const Mutex<Option<GToolkitVM>>,\n\n thunk_ret_ptr: &mut *const c_void\n\n) -> unsafe extern \"C\" fn(*const c_void) {\n\n *thunk_ret_ptr = gt_vm_ptr as *const c_void;\n\n gt_vm_ptr.with(\n\n || gtoolkit_null_receiver_signaller as unsafe extern \"C\" fn(*const c_void),\n\n |gt_vm| gtoolkit_receiver_signaller as unsafe extern \"C\" fn(*const c_void)\n\n )\n\n}\n\n\n", "file_path": "experimental/src/lib.rs", "rank": 19, "score": 79512.59502286835 }, { "content": "#[no_mangle]\n\npub fn gtoolkit_vm_main_thread_callout(\n\n gt_vm_ptr: *const Mutex<Option<GToolkitVM>>,\n\n cif: *mut ffi_cif,\n\n func: *mut c_void,\n\n args: *mut c_void,\n\n result: *mut c_void,\n\n semaphore: sqInt,\n\n) {\n\n gt_vm_ptr.with_not_null(|gt_vm| {\n\n gt_vm.call(GToolkitCallout {\n\n cif,\n\n func: CodePtr(func),\n\n args: Some(args as *mut *mut c_void),\n\n result: Some(result),\n\n semaphore,\n\n })\n\n });\n\n}\n\n\n", "file_path": "experimental/src/lib.rs", "rank": 20, "score": 79512.59502286835 }, { "content": "#[no_mangle]\n\npub fn gtoolkit_vm_get_semaphore_signaller(\n\n gt_vm_ptr: *const Mutex<Option<GToolkitVM>>,\n\n thunk_ret_ptr: &mut *const c_void\n\n) -> unsafe extern \"C\" fn(usize, *const c_void) {\n\n *thunk_ret_ptr = gt_vm_ptr as *const c_void;\n\n gt_vm_ptr.with(\n\n || gtoolkit_null_semaphore_signaller as unsafe extern \"C\" fn(usize, *const c_void),\n\n |gt_vm| gt_vm.get_semaphore_signaller(),\n\n )\n\n}\n\n\n", "file_path": "experimental/src/lib.rs", "rank": 21, "score": 79512.59502286835 }, { "content": "#[no_mangle]\n\npub fn primitiveGetAddressOfGToolkitVM() {\n\n unsafe {\n\n let gtvm_raw = unsafe { Arc::into_raw(GTVM.clone()) };\n\n\n\n let gt_lock = GTVM.lock().unwrap();\n\n let gt = gt_lock.as_ref().unwrap();\n\n\n\n let mut stack_pointer = gt.get_stack_pointer();\n\n\n\n let external_address = gt.new_external_address_from_pointer(gtvm_raw);\n\n\n\n *stack_pointer = external_address.into_native();\n\n }\n\n}\n\n\n", "file_path": "experimental/src/lib.rs", "rank": 22, "score": 77905.19631997513 }, { "content": "#[allow(dead_code)]\n\npub fn pick_image_with_dialog(default_path: Option<PathBuf>) -> Option<PathBuf> {\n\n let mut dialog = dialog();\n\n let mut dialog_ref = &mut dialog;\n\n if let Some(ref default_path) = default_path {\n\n dialog_ref = dialog_ref.default_path(default_path);\n\n }\n\n dialog_ref = dialog_ref.filter(\"image\");\n\n\n\n let result = dialog_ref.open().unwrap_or_else(|e| {\n\n panic!(\"{}\", e);\n\n });\n\n\n\n match result {\n\n Response::Okay(file_name) => {\n\n let file_path = PathBuf::new().join(file_name);\n\n if file_path.exists() {\n\n Some(file_path)\n\n } else {\n\n None\n\n }\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "vm-client/src/image_finder.rs", "rank": 23, "score": 77113.60105693713 }, { "content": "#[no_mangle]\n\npub fn primitiveMainThreadCalloutGToolkitVM() {\n\n unsafe {\n\n let gt_lock = GTVM.lock().unwrap();\n\n let gt = gt_lock.as_ref().unwrap();\n\n\n\n let external_function_oop = gt.stack_object_value(StackOffset::from_native_c(\n\n TFPrimitiveCallout::ExternalFunction as sqInt,\n\n ));\n\n let external_function = gt.get_handler(external_function_oop);\n\n\n\n let cif_oop = gt.object_field_at(\n\n external_function_oop,\n\n ObjectFieldIndex::from_native_c(TFExternalFunction::Definition as sqInt),\n\n );\n\n\n\n let cif_ptr = gt.get_handler(cif_oop) as *mut ffi_cif;\n\n let cif: &ffi_cif = transmute(cif_ptr);\n\n\n\n let semaphore_index = gt.stack_integer_value(StackOffset::from_native_c(\n\n TFPrimitiveCallout::SemaphoreIndex as sqInt,\n", "file_path": "experimental/src/lib.rs", "rank": 24, "score": 76400.83599547249 }, { "content": "#[no_mangle]\n\npub fn primitiveExtractReturnValueGToolkitVM() {\n\n unsafe {\n\n let gt_lock = GTVM.lock().unwrap();\n\n let gt = gt_lock.as_ref().unwrap();\n\n\n\n let callout_address_oop = gt.stack_object_value(StackOffset::from_native_c(\n\n TFPrimitiveReturnValue::CalloutAddress as sqInt,\n\n ));\n\n let callout_address = gt.read_address(callout_address_oop) as *mut GToolkitCallout;\n\n\n\n let mut callout = Box::from_raw(callout_address);\n\n\n\n if callout.result.is_some() {\n\n let return_holder = callout.result.unwrap();\n\n\n\n gt.marshall_and_push_return_value_of_type_popping(\n\n return_holder,\n\n callout.return_type(),\n\n 2, // one for the argument + one for the receiver\n\n );\n", "file_path": "experimental/src/lib.rs", "rank": 25, "score": 76400.83599547249 }, { "content": "#[allow(dead_code)]\n\npub fn search_image_file_within_directories(directories: Vec<PathBuf>) -> Option<PathBuf> {\n\n for directory in directories {\n\n if let Some(image) = try_find_image_file_in_directory(directory) {\n\n return Some(image);\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "vm-client/src/image_finder.rs", "rank": 26, "score": 76116.11437664709 }, { "content": "/// Try to change the working directory back to the original location\n\npub fn un_translocate() -> Result<()> {\n\n let translocation = MacTranslocation::new()?;\n\n if let Some(original_location) = translocation.original_location()? {\n\n std::env::set_current_dir(\n\n original_location\n\n .parent()\n\n .ok_or_else(|| ApplicationError::NoParentDirectory(original_location.clone()))?,\n\n )?;\n\n }\n\n Ok(())\n\n}\n\n\n\npub struct MacTranslocation {\n\n library: Library,\n\n}\n\n\n\nimpl MacTranslocation {\n\n pub fn new() -> Result<Self> {\n\n unsafe {\n\n let security_lib =\n", "file_path": "vm-client/src/platform/mac/translocation.rs", "rank": 27, "score": 74889.5535426516 }, { "content": "pub trait IndexSetter<I, O: Copy> {\n\n fn set(&mut self, index: I, value: O) -> &mut Self;\n\n}\n\n\n\nimpl<T, I, O: Copy> IndexSetter<I, O> for T\n\nwhere\n\n T: IndexMut<I, Output = O> + IndexSet,\n\n{\n\n fn set(&mut self, index: I, value: O) -> &mut Self {\n\n self[index] = value;\n\n self\n\n }\n\n}\n\n\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 28, "score": 73597.61857461008 }, { "content": "pub trait IndexGetter<I, O: Copy> {\n\n fn get(&self, index: I) -> O;\n\n}\n\n\n\nimpl<T, I, O: Copy> IndexGetter<I, O> for T\n\nwhere\n\n T: Index<I, Output = O> + IndexGet,\n\n{\n\n fn get(&self, index: I) -> O {\n\n self[index]\n\n }\n\n}\n\n\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 29, "score": 73597.61857461008 }, { "content": "/// Trait to use native types that as a rust type\n\n/// _inplace_ with the same size and field layout.\n\npub trait NativeTransmutable<NT: Sized>: Sized {\n\n /// Provides access to the native value through a\n\n /// transmuted reference to the Rust value.\n\n fn native(&self) -> &NT {\n\n unsafe { transmute_ref(self) }\n\n }\n\n\n\n /// Provides mutable access to the native value through a\n\n /// transmuted reference to the Rust value.\n\n fn native_mut(&mut self) -> &mut NT {\n\n unsafe { transmute_ref_mut(self) }\n\n }\n\n\n\n /// Copies the native value to an equivalent Rust value.\n\n ///\n\n /// The `_c` suffix is to remind callers that functions that return a native value from a C++\n\n /// ABI can't be used. For example, C++ member functions must be wrapped in a extern \"C\" function.\n\n fn from_native_c(nt: NT) -> Self {\n\n let r = unsafe { mem::transmute_copy::<NT, Self>(&nt) };\n\n // don't drop, the Rust type takes over.\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 30, "score": 71986.92291484107 }, { "content": "///\n\n/// Possible parameters\n\n/// - VM_CLIENT_VMMAKER to use a specific VM to run a VM Maker image\n\nfn main() {\n\n let builder = match std::env::consts::OS {\n\n \"linux\" => LinuxBuilder::default().boxed(),\n\n \"macos\" => MacBuilder::default().boxed(),\n\n \"windows\" => WindowsBuilder::default().boxed(),\n\n _ => {\n\n panic!(\"The platform you're compiling for is not supported\");\n\n }\n\n };\n\n\n\n println!(\"About to build a vm using {:?}\", &builder);\n\n builder.ensure_build_tools();\n\n\n\n builder.compile_sources();\n\n\n\n if !builder.is_compiled() {\n\n panic!(\"Failed to compile {:?}\", builder.vm_binary().display())\n\n }\n\n\n\n builder.link_libraries();\n\n builder.generate_bindings();\n\n builder.export_shared_libraries();\n\n}\n", "file_path": "vm-bindings/build.rs", "rank": 31, "score": 47975.1304016979 }, { "content": "fn main() {\n\n app_main()\n\n}\n", "file_path": "experimental/src/main.rs", "rank": 32, "score": 47972.165901496075 }, { "content": "fn main() {\n\n match std::env::var(\"VM_CLIENT_EMBED_RESOURCES\") {\n\n Ok(resources) => {\n\n for resource in resources.split(\",\") {\n\n embed_resource::compile(resource);\n\n println!(\"cargo:rerun-if-changed={}\", resource);\n\n }\n\n }\n\n Err(_) => {}\n\n }\n\n}\n", "file_path": "vm-client/build.rs", "rank": 33, "score": 47972.165901496075 }, { "content": "fn main() {\n\n if let Err(error) = run() {\n\n let error: Box<dyn std::error::Error> = Box::new(error);\n\n let user_facing_error: UserFacingError = error.into();\n\n user_facing_error.print_and_exit();\n\n }\n\n}\n", "file_path": "vm-client/src/main.rs", "rank": 34, "score": 46804.18557467917 }, { "content": "fn main() {\n\n let matches = App::new(\"Virtual Machine\")\n\n .version(\"1.0\")\n\n .author(\"feenk gmbh. <[email protected]>\")\n\n .setting(AppSettings::AllowExternalSubcommands)\n\n .setting(AppSettings::ColoredHelp)\n\n .arg(\n\n Arg::new(\"image\")\n\n .value_name(\"image\")\n\n .index(1)\n\n .required(true)\n\n .about(\"A path to an image file to run\"),\n\n )\n\n .arg(\n\n Arg::new(\"interactive\")\n\n .long(\"interactive\")\n\n .about(\"Start image in the interactive (UI) mode\"),\n\n )\n\n .get_matches();\n\n\n", "file_path": "vm-client/src/main_cli.rs", "rank": 35, "score": 45725.36405143606 }, { "content": "fn run() -> Result<()> {\n\n // we should read options and canonicalize the image path before changing current directory\n\n let mut options: AppOptions = AppOptions::parse();\n\n options.canonicalize()?;\n\n\n\n #[cfg(target_os = \"macos\")]\n\n if let Err(error) = platform::mac::translocation::un_translocate() {\n\n error!(\"Failed to un-translocate the app due to {}\", error);\n\n }\n\n\n\n let application = Application::new(options)?;\n\n application.start()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "vm-client/src/main.rs", "rank": 36, "score": 43725.3581640106 }, { "content": "use crate::build_support::Builder;\n\n\n\nuse file_matcher::OneEntry;\n\nuse std::fmt;\n\nuse std::fmt::{Debug, Formatter};\n\nuse std::path::PathBuf;\n\n\n\n#[derive(Default, Clone)]\n\npub struct LinuxBuilder;\n\n\n\nimpl Debug for LinuxBuilder {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n self.print_directories(f)\n\n }\n\n}\n\n\n\nimpl Builder for LinuxBuilder {\n\n fn vm_binary(&self) -> PathBuf {\n\n self.compiled_libraries_directory()\n\n .join(\"libPharoVMCore.so\")\n", "file_path": "vm-bindings/build_support/linux.rs", "rank": 48, "score": 14.53324544924781 }, { "content": "use crate::build_support::Builder;\n\n\n\nuse file_matcher::OneEntry;\n\nuse std::fmt;\n\nuse std::fmt::{Debug, Formatter};\n\nuse std::path::PathBuf;\n\n\n\n#[derive(Default, Clone)]\n\npub struct MacBuilder;\n\n\n\nimpl Debug for MacBuilder {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n self.print_directories(f)\n\n }\n\n}\n\n\n\nimpl Builder for MacBuilder {\n\n fn vm_binary(&self) -> PathBuf {\n\n self.compiled_libraries_directory()\n\n .join(\"libPharoVMCore.dylib\")\n", "file_path": "vm-bindings/build_support/mac.rs", "rank": 49, "score": 14.44993252893391 }, { "content": "use crate::build_support::Builder;\n\nuse file_matcher::{FileNamed, OneEntry};\n\nuse std::fmt;\n\nuse std::fmt::{Debug, Formatter};\n\nuse std::path::{Path, PathBuf};\n\nuse std::process::Command;\n\nuse titlecase::titlecase;\n\n\n\n#[derive(Default, Clone)]\n\npub struct WindowsBuilder {}\n\n\n\nimpl Debug for WindowsBuilder {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n self.print_directories(f)\n\n }\n\n}\n\n\n\nimpl WindowsBuilder {\n\n fn pthreads_directory(&self) -> PathBuf {\n\n self.output_directory().join(\"pthreads\")\n", "file_path": "vm-bindings/build_support/windows.rs", "rank": 50, "score": 14.206576320637982 }, { "content": "mod builder;\n\npub use builder::Builder;\n\n\n\nmod linux;\n\nmod mac;\n\nmod windows;\n\n\n\npub use linux::LinuxBuilder;\n\npub use mac::MacBuilder;\n\npub use windows::WindowsBuilder;\n", "file_path": "vm-bindings/build_support/mod.rs", "rank": 51, "score": 11.730431658216018 }, { "content": " None\n\n }\n\n }\n\n}\n\n\n\npub(crate) trait IfBoolSome {\n\n fn if_true_some<V>(self, v: V) -> Option<V>;\n\n fn if_false_some<V>(self, v: V) -> Option<V>;\n\n fn if_true_then_some<V>(self, f: impl FnOnce() -> V) -> Option<V>;\n\n fn if_false_then_some<V>(self, f: impl FnOnce() -> V) -> Option<V>;\n\n}\n\n\n\nimpl IfBoolSome for bool {\n\n fn if_true_some<V>(self, v: V) -> Option<V> {\n\n self.into_option().and(Some(v))\n\n }\n\n\n\n fn if_false_some<V>(self, v: V) -> Option<V> {\n\n (!self).if_true_some(v)\n\n }\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 52, "score": 9.676471785680036 }, { "content": "use crate::bindings::{\n\n vm_parameters_destroy, vm_parameters_parse, VMParameterVector as NativeVMParameterVector,\n\n VMParameters as NativeVMParameters,\n\n};\n\nuse crate::parameter_vector::{ImageParameters, VirtualMachineParameters};\n\nuse crate::prelude::{Handle, NativeAccess, NativeDrop};\n\nuse std::ffi::{CStr, CString};\n\nuse std::fmt;\n\nuse std::mem::forget;\n\nuse std::os::raw::{c_char, c_void};\n\n\n\npub type VMParameters = Handle<NativeVMParameters>;\n\n\n\nimpl NativeDrop for NativeVMParameters {\n\n fn drop(&mut self) {\n\n unsafe {\n\n vm_parameters_destroy(self);\n\n }\n\n }\n\n}\n", "file_path": "vm-bindings/src/parameters.rs", "rank": 53, "score": 8.940896088065458 }, { "content": "\n\n pub fn arguments(&self) -> Vec<String> {\n\n let args_ptr = self.native().processArgv as *mut *mut c_char;\n\n let args_length = self.native().processArgc as usize;\n\n\n\n let arg_ptrs: Vec<*mut c_char> =\n\n unsafe { Vec::from_raw_parts(args_ptr, args_length, args_length) };\n\n let arguments: Vec<String> = arg_ptrs\n\n .iter()\n\n .map(|each| unsafe { CStr::from_ptr(*each).to_string_lossy().into_owned() })\n\n .collect();\n\n\n\n std::mem::forget(arg_ptrs);\n\n arguments\n\n }\n\n\n\n pub fn has_arguments(&self) -> bool {\n\n self.native().processArgc > 0\n\n }\n\n\n", "file_path": "vm-bindings/src/parameters.rs", "rank": 54, "score": 8.338797389708354 }, { "content": "\n\n pub fn eden_size(&self) -> usize {\n\n self.native().edenSize as usize\n\n }\n\n\n\n pub fn image_parameters(&self) -> &ImageParameters {\n\n ImageParameters::borrow_from_native(&self.native().imageParameters)\n\n }\n\n\n\n pub fn virtual_machine_parameters(&self) -> &VirtualMachineParameters {\n\n VirtualMachineParameters::borrow_from_native(&self.native().imageParameters)\n\n }\n\n}\n\n\n\nimpl fmt::Debug for VMParameters {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"VMParameters\")\n\n .field(\"image_file_name\", &self.image_file_name())\n\n .field(\"arguments\", &self.arguments())\n\n .field(\"is_default_image\", &self.is_default_image())\n", "file_path": "vm-bindings/src/parameters.rs", "rank": 55, "score": 8.185986932457476 }, { "content": " use std::ffi::OsString;\n\n use std::os::windows::ffi::{OsStrExt, OsStringExt};\n\n\n\n let mut path = path.as_ref().to_path_buf().into_os_string();\n\n\n\n let wchars = path\n\n .encode_wide()\n\n .map(|wchar| {\n\n if wchar == b'\\\\' as u16 {\n\n '/' as u16\n\n } else {\n\n wchar\n\n }\n\n })\n\n .collect::<Vec<_>>();\n\n path = OsString::from_wide(&wchars);\n\n PathBuf::from(path)\n\n }\n\n #[cfg(not(windows))]\n\n {\n", "file_path": "vm-bindings/build_support/windows.rs", "rank": 56, "score": 8.016696847023802 }, { "content": "\n\n let c_str: &CStr = unsafe { CStr::from_ptr(self.native().imageFileName) };\n\n let str_slice: &str = c_str.to_str().unwrap();\n\n str_slice.to_owned()\n\n }\n\n\n\n pub fn set_image_file_name<P: Into<String>>(&mut self, file_name: P) {\n\n let new_image_name: String = file_name.into();\n\n\n\n if self.image_file_name() == new_image_name {\n\n return;\n\n }\n\n\n\n let previous_file_name = self.native().imageFileName as *mut c_void;\n\n unsafe { crate::bindings::free(previous_file_name) };\n\n\n\n let c_str = CString::new(new_image_name).unwrap();\n\n self.native_mut().imageFileName = c_str.into_raw();\n\n self.native_mut().isDefaultImage = false;\n\n }\n", "file_path": "vm-bindings/src/parameters.rs", "rank": 57, "score": 7.812180188406414 }, { "content": " let previous_args = previous_arg_ptrs\n\n .iter()\n\n .map(|each| unsafe { CString::from_raw(*each) })\n\n .collect::<Vec<CString>>();\n\n\n\n drop(previous_args);\n\n drop(previous_arg_ptrs);\n\n }\n\n\n\n self.native_mut().processArgv = args_ptr;\n\n self.native_mut().processArgc = args_length;\n\n }\n\n\n\n pub fn is_default_image(&self) -> bool {\n\n self.native().isDefaultImage\n\n }\n\n\n\n pub fn is_default_image_found(&self) -> bool {\n\n self.native().defaultImageFound\n\n }\n", "file_path": "vm-bindings/src/parameters.rs", "rank": 58, "score": 7.643179845912693 }, { "content": " fn construct(construct: impl FnOnce(*mut NT)) -> Self {\n\n Self::try_construct(|i| {\n\n construct(i);\n\n true\n\n })\n\n .unwrap()\n\n }\n\n\n\n fn try_construct(construct: impl FnOnce(*mut NT) -> bool) -> Option<Self> {\n\n self::try_construct(construct).map(Self::from_native_c)\n\n }\n\n}\n\n\n\npub(crate) trait NativeTransmutableSliceAccess<NT: Sized> {\n\n fn native(&self) -> &[NT];\n\n fn native_mut(&mut self) -> &mut [NT];\n\n}\n\n\n\nimpl<NT, ElementT> NativeTransmutableSliceAccess<NT> for [ElementT]\n\nwhere\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 59, "score": 7.591846208244408 }, { "content": "use crate::{ApplicationError, Result};\n\nuse clap::{AppSettings, Clap};\n\nuse std::path::{Path, PathBuf};\n\n\n\n#[derive(Clap, Clone, Debug)]\n\n#[clap(version = \"1.0\", author = \"feenk gmbh <[email protected]>\")]\n\n#[clap(setting = AppSettings::ColoredHelp)]\n\npub struct AppOptions {\n\n /// A path to a custom Pharo .image to use instead of automatically detecting one\n\n #[clap(long, parse(from_os_str))]\n\n image: Option<PathBuf>,\n\n}\n\n\n\nimpl AppOptions {\n\n pub fn canonicalize(&mut self) -> Result<()> {\n\n if let Some(ref image) = self.image {\n\n if !image.exists() {\n\n return ApplicationError::ImageFileDoesNotExist(image.clone()).into();\n\n }\n\n self.image = Some(to_absolute::canonicalize(image)?);\n\n }\n\n Ok(())\n\n }\n\n\n\n pub fn image(&self) -> Option<&Path> {\n\n self.image.as_ref().map(|image| image.as_path())\n\n }\n\n}\n", "file_path": "vm-client/src/application_options.rs", "rank": 60, "score": 7.579784528965964 }, { "content": " let mut parameters = VMParameters::from_args(vm_args);\n\n parameters.set_image_file_name(self.image.as_os_str().to_str().unwrap().to_owned());\n\n parameters.set_is_interactive_session(true);\n\n\n\n VM::start(parameters).unwrap();\n\n Ok(())\n\n }\n\n\n\n pub fn executable_name(&self) -> Result<String> {\n\n let executable_path = self.executable_path()?;\n\n let executable_name = executable_path\n\n .file_name()\n\n .ok_or_else(|| ApplicationError::FailedToGetFileName(executable_path.clone()))?;\n\n let executable_name = executable_name.to_str().ok_or_else(|| {\n\n ApplicationError::FailedToConvertOsString(executable_name.to_os_string())\n\n })?;\n\n Ok(executable_name.to_string())\n\n }\n\n\n\n pub fn executable_path(&self) -> Result<PathBuf> {\n", "file_path": "vm-client/src/application.rs", "rank": 61, "score": 7.473186109399805 }, { "content": "use crate::bindings::{\n\n firstIndexableField, getHandler, instantiateClassindexableSize, isVMRunOnWorkerThread,\n\n readAddress, signalSemaphoreWithIndex, sqInt, usqInt, BytesPerWord, VirtualMachine,\n\n};\n\n\n\nuse std::sync::mpsc::{Sender, Receiver, RecvError};\n\n\n\nuse crate::cointerp::{\n\n instantiateClassindexableSizeisPinned, marshallAndPushReturnValueFromofTypepoping,\n\n marshallArgumentFromatIndexintoofTypewithSize, numSlotsOf,\n\n};\n\nuse crate::prelude::NativeTransmutable;\n\nuse libc::c_char;\n\nuse libc::c_int;\n\nuse libffi::low::{ffi_cif, ffi_type, CodePtr};\n\n\n\nuse std::intrinsics::transmute;\n\nuse std::os::raw::c_void;\n\nuse std::sync::{Arc, Mutex};\n\nuse std::fmt::Debug;\n\n\n\n#[derive(Debug)]\n\npub struct GToolkitVM {\n\n sender: Sender<GToolkitVMRequest>,\n\n receiver: &'static Receiver<GToolkitVMRequest>,\n\n interpreter: VirtualMachine,\n\n}\n\n\n", "file_path": "experimental/src/vm.rs", "rank": 62, "score": 7.2979503047799215 }, { "content": "use std::ffi::OsString;\n\nuse std::path::PathBuf;\n\nuse thiserror::Error;\n\n\n\npub type Result<T> = core::result::Result<T, ApplicationError>;\n\n\n\n#[derive(Error, Debug)]\n\npub enum ApplicationError {\n\n #[error(\"Input/Output error\")]\n\n IoError(#[from] std::io::Error),\n\n #[cfg(target_os = \"macos\")]\n\n #[error(\"Failed to load the library\")]\n\n LibLoadingError(#[from] libloading::Error),\n\n #[error(\"Failed to canonicalize a path\")]\n\n CanonicalizationError(#[from] to_absolute::Error),\n\n #[error(\"Failed to detect if the executable is translocated\")]\n\n FailedToDetectIfTranslocated,\n\n #[error(\"Failed to detect the original translocated path\")]\n\n FailedToDetectOriginalTranslocatedPath,\n\n #[error(\"Failed to open terminal\")]\n", "file_path": "vm-client/src/error.rs", "rank": 63, "score": 7.117065901206686 }, { "content": "use nfd2::{dialog, Response};\n\nuse std::fs;\n\nuse std::path::PathBuf;\n\n\n", "file_path": "vm-client/src/image_finder.rs", "rank": 64, "score": 7.116893635940901 }, { "content": " let mut bool_is_translocated = false;\n\n\n\n if is_translocated_fn(\n\n url,\n\n &mut bool_is_translocated as *mut _,\n\n std::ptr::null_mut(),\n\n ) {\n\n } else {\n\n return Err(ApplicationError::FailedToDetectIfTranslocated);\n\n }\n\n Ok(bool_is_translocated)\n\n }\n\n }\n\n\n\n pub fn original_location(&self) -> Result<Option<PathBuf>> {\n\n unsafe {\n\n let original_path_fn: Symbol<\n\n unsafe extern \"C\" fn(CFURLRef, *mut CFErrorRef) -> CFURLRef,\n\n > = match self.library.get(b\"SecTranslocateCreateOriginalPathForURL\") {\n\n Ok(func) => func,\n", "file_path": "vm-client/src/platform/mac/translocation.rs", "rank": 65, "score": 7.0137316362942235 }, { "content": "#[macro_use]\n\nextern crate lazy_static;\n\n\n\npub mod bindings;\n\npub mod cointerp;\n\npub mod prelude;\n\npub mod vm;\n\n\n\nuse std::ffi::CString;\n\n\n\nuse libc::c_char;\n\nuse libc::c_int;\n\nuse libffi::low::{ffi_cif, ffi_type, CodePtr};\n\n\n\nuse crate::bindings::{\n\n calloc, checkedLongAtput, firstIndexableField, free, instantiateClassindexableSize,\n\n isVMRunOnWorkerThread, loadModuleHandle, malloc, memcpy, methodArgumentCount,\n\n signalSemaphoreWithIndex, sqGetInterpreterProxy, sqInt, stackObjectValue, usqInt,\n\n vm_main_with_parameters, vm_parameters_parse, vm_run_interpreter, BytesPerWord,\n\n VMParameterVector, VirtualMachine, TRUE,\n", "file_path": "experimental/src/lib.rs", "rank": 66, "score": 6.918152678530301 }, { "content": " }\n\n\n\n /// Converts a pointer to a mutable native value into a pointer to the mutable Rust value.\n\n #[allow(unused)]\n\n pub(crate) fn from_native_ptr_mut(np: *mut N) -> *mut Self {\n\n np as _\n\n }\n\n\n\n /// Constructs a C++ object in place by calling a\n\n /// function that expects a pointer that points to\n\n /// uninitialized memory of the native type.\n\n pub(crate) fn construct(construct: impl FnOnce(*mut N)) -> Self {\n\n Self::try_construct(|i| {\n\n construct(i);\n\n true\n\n })\n\n .unwrap()\n\n }\n\n\n\n pub(crate) fn try_construct(construct: impl FnOnce(*mut N) -> bool) -> Option<Self> {\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 67, "score": 6.842626127212889 }, { "content": "use std::path::{Path, PathBuf};\n\n\n\nuse vm_bindings::{VMParameters, VM};\n\n\n\nuse crate::image_finder::{pick_image_with_dialog, search_image_file_within_directories};\n\nuse crate::working_directory::executable_working_directory;\n\nuse crate::AppOptions;\n\nuse crate::{ApplicationError, Result};\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Application {\n\n options: AppOptions,\n\n working_directory: PathBuf,\n\n image: PathBuf,\n\n}\n\n\n\nimpl Application {\n\n pub fn new(options: AppOptions) -> Result<Self> {\n\n let mut image = options.image().map(|image| image.to_path_buf());\n\n if image.is_none() {\n", "file_path": "vm-client/src/application.rs", "rank": 68, "score": 6.838919998154634 }, { "content": "mod bindings;\n\nmod parameter_vector;\n\nmod parameters;\n\nmod prelude;\n\nmod vm;\n\n\n\npub use parameters::VMParameters;\n\npub use vm::VM;\n", "file_path": "vm-bindings/src/lib.rs", "rank": 69, "score": 6.565824578021897 }, { "content": " ptr::NonNull::new(ptr).map(Self)\n\n }\n\n\n\n pub(crate) fn into_ptr(self) -> *mut N {\n\n let p = self.0.as_ptr();\n\n mem::forget(self);\n\n p\n\n }\n\n}\n\n\n\n/// A trait that consumes self and converts it to a ptr to the native type.\n\npub(crate) trait IntoPtr<N> {\n\n fn into_ptr(self) -> *mut N;\n\n}\n\n\n\n/// A trait that consumes self and converts it to a ptr to the native type or null.\n\npub(crate) trait IntoPtrOrNull<N> {\n\n fn into_ptr_or_null(self) -> *mut N;\n\n}\n\n\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 70, "score": 6.551185333087829 }, { "content": "use crate::bindings::VMParameterVector as NativeVMParameterVector;\n\nuse crate::prelude::{transmute_ref, NativeAccess};\n\nuse std::ffi::CStr;\n\n\n\n#[repr(transparent)]\n\npub struct VirtualMachineParameters(NativeVMParameterVector);\n\n\n\nimpl NativeAccess<NativeVMParameterVector> for VirtualMachineParameters {\n\n fn native(&self) -> &NativeVMParameterVector {\n\n &self.0\n\n }\n\n\n\n fn native_mut(&mut self) -> &mut NativeVMParameterVector {\n\n &mut self.0\n\n }\n\n}\n\n\n\nimpl VirtualMachineParameters {\n\n pub fn len(&self) -> usize {\n\n self.native().count as usize\n", "file_path": "vm-bindings/src/parameter_vector.rs", "rank": 71, "score": 6.412048478162866 }, { "content": " .parent()\n\n .ok_or_else(|| ApplicationError::NoParentDirectory(image.clone()))?\n\n .to_path_buf();\n\n\n\n Ok(Self {\n\n options,\n\n working_directory,\n\n image,\n\n })\n\n }\n\n\n\n pub fn start(&self) -> Result<()> {\n\n std::env::set_current_dir(self.working_directory.as_path())?;\n\n\n\n let executable_path = std::env::current_exe()?;\n\n\n\n let mut vm_args: Vec<String> = vec![];\n\n vm_args.push(executable_path.as_os_str().to_str().unwrap().to_owned());\n\n vm_args.push(self.image.as_os_str().to_str().unwrap().to_owned());\n\n\n", "file_path": "vm-client/src/application.rs", "rank": 72, "score": 6.373046432397253 }, { "content": "// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\n// SOFTWARE.\n\n\n\n// See https://github.com/rust-skia/rust-skia\n\n// Licence https://github.com/rust-skia/rust-skia/blob/master/LICENSE\n\n#![allow(dead_code)]\n\n\n\nuse std::hash::{Hash, Hasher};\n\nuse std::mem::MaybeUninit;\n\nuse std::ops::{Deref, DerefMut, Index, IndexMut};\n\nuse std::{mem, ptr, slice};\n\n// Re-export TryFrom / TryInto to make them available in all modules that use prelude::*.\n\npub use std::convert::{TryFrom, TryInto};\n\nuse std::marker::PhantomData;\n\n\n\n/// Swiss army knife to convert any reference into any other.\n\npub(crate) unsafe fn transmute_ref<FromT, ToT>(from: &FromT) -> &ToT {\n\n // TODO: can we do this statically for all instantiations of transmute_ref?\n\n debug_assert_eq!(mem::size_of::<FromT>(), mem::size_of::<ToT>());\n\n &*(from as *const FromT as *const ToT)\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 73, "score": 6.363357406346259 }, { "content": "}\n\n\n\nimpl<N: NativeDrop> ReplaceWith<Handle<N>> for N {\n\n fn replace_with(&mut self, other: Handle<N>) -> Handle<N> {\n\n other.replace_native(self)\n\n }\n\n}\n\n\n\n/// Constructs a C++ object in place by calling a lambda that is meant to initialize\n\n/// the pointer to the Rust memory provided as a pointer.\n\npub(crate) fn construct<N>(construct: impl FnOnce(*mut N)) -> N {\n\n try_construct(|i| {\n\n construct(i);\n\n true\n\n })\n\n .unwrap()\n\n}\n\n\n\npub(crate) fn try_construct<N>(construct: impl FnOnce(*mut N) -> bool) -> Option<N> {\n\n let mut instance = MaybeUninit::uninit();\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 74, "score": 6.325061687981472 }, { "content": " }\n\n }\n\n\n\n pub fn as_vec(&self) -> Vec<String> {\n\n self.iter().collect()\n\n }\n\n\n\n pub(crate) fn borrow_from_native(native: &NativeVMParameterVector) -> &Self {\n\n unsafe { transmute_ref(native) }\n\n }\n\n}\n\n\n\npub struct ParametersVectorIterator<'a> {\n\n vector: &'a NativeVMParameterVector,\n\n index: usize,\n\n}\n\n\n\nimpl<'a> Iterator for ParametersVectorIterator<'a> {\n\n type Item = String;\n\n fn next(&mut self) -> Option<String> {\n", "file_path": "vm-bindings/src/parameter_vector.rs", "rank": 75, "score": 6.290588231557357 }, { "content": "#![windows_subsystem = \"windows\"]\n\n\n\nextern crate dirs;\n\nextern crate nfd2;\n\nextern crate thiserror;\n\nextern crate vm_bindings;\n\n#[macro_use]\n\nextern crate log;\n\n\n\nmod error;\n\nmod image_finder;\n\n\n\nmod application;\n\nmod application_options;\n\nmod platform;\n\nmod working_directory;\n\n\n\nuse clap::Clap;\n\n\n\nuse user_error::{UserFacingError, UFE};\n\n\n\npub use crate::application::Application;\n\npub use crate::application_options::AppOptions;\n\npub use crate::error::*;\n\n\n", "file_path": "vm-client/src/main.rs", "rank": 76, "score": 6.273934776635405 }, { "content": " Library::new(\"/System/Library/Frameworks/Security.framework/Security\")?;\n\n Ok(Self {\n\n library: security_lib,\n\n })\n\n }\n\n }\n\n\n\n pub fn is_translocated(&self) -> Result<bool> {\n\n unsafe {\n\n let is_translocated_fn: Symbol<\n\n unsafe extern \"C\" fn(CFURLRef, *mut bool, *mut CFErrorRef) -> bool,\n\n > = match self.library.get(b\"SecTranslocateIsTranslocatedURL\") {\n\n Ok(func) => func,\n\n Err(_) => {\n\n return Ok(false);\n\n }\n\n };\n\n\n\n let bundle = CFBundleGetMainBundle();\n\n let url = CFBundleCopyBundleURL(bundle);\n", "file_path": "vm-client/src/platform/mac/translocation.rs", "rank": 77, "score": 6.168321896013727 }, { "content": "use crate::bindings::vm_main_with_parameters;\n\nuse crate::prelude::NativeAccess;\n\nuse crate::VMParameters;\n\nuse std::error::Error;\n\n\n\npub struct VM {}\n\n\n\nimpl VM {\n\n pub fn start(mut parameters: VMParameters) -> Result<(), Box<dyn Error>> {\n\n unsafe {\n\n vm_main_with_parameters(parameters.native_mut());\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "vm-bindings/src/vm.rs", "rank": 78, "score": 6.149296125447023 }, { "content": " if self.index < self.vector.count as usize {\n\n let slice = unsafe {\n\n std::slice::from_raw_parts_mut(self.vector.parameters, self.vector.count as usize)\n\n };\n\n let chars = unsafe { CStr::from_ptr(slice[self.index]) };\n\n let string = String::from(chars.to_string_lossy());\n\n self.index += 1;\n\n Some(string)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n", "file_path": "vm-bindings/src/parameter_vector.rs", "rank": 79, "score": 6.051763449085561 }, { "content": "#![cfg(target_os = \"macos\")]\n\n\n\nuse crate::error::ApplicationError;\n\nuse crate::Result;\n\nuse core_foundation::base::{Boolean, CFIndex};\n\nuse core_foundation::bundle::{CFBundleCopyBundleURL, CFBundleGetMainBundle};\n\nuse core_foundation::error::CFErrorRef;\n\nuse core_foundation::url::{CFURLGetFileSystemRepresentation, CFURLRef};\n\nuse libc::{c_char, strlen, PATH_MAX};\n\nuse libloading::{Library, Symbol};\n\nuse std::path::PathBuf;\n\n\n\n#[cfg(unix)]\n\nuse std::ffi::OsStr;\n\n#[cfg(unix)]\n\nuse std::os::unix::ffi::OsStrExt;\n\n\n\n/// Try to change the working directory back to the original location\n", "file_path": "vm-client/src/platform/mac/translocation.rs", "rank": 80, "score": 5.850766977340543 }, { "content": "\n\n pub fn is_interactive_session(&self) -> bool {\n\n self.native().isInteractiveSession\n\n }\n\n\n\n pub fn set_is_interactive_session(&mut self, is_interactive_session: bool) {\n\n self.native_mut().isInteractiveSession = is_interactive_session;\n\n }\n\n\n\n pub fn max_stack_frames_to_print(&self) -> usize {\n\n self.native().maxStackFramesToPrint as usize\n\n }\n\n\n\n pub fn max_old_space_size(&self) -> usize {\n\n self.native().maxOldSpaceSize as usize\n\n }\n\n\n\n pub fn max_code_size(&self) -> usize {\n\n self.native().maxCodeSize as usize\n\n }\n", "file_path": "vm-bindings/src/parameters.rs", "rank": 81, "score": 5.835081883621386 }, { "content": "\n\n fn if_true_then_some<V>(self, f: impl FnOnce() -> V) -> Option<V> {\n\n self.into_option().map(|()| f())\n\n }\n\n\n\n fn if_false_then_some<V>(self, f: impl FnOnce() -> V) -> Option<V> {\n\n (!self).into_option().map(|()| f())\n\n }\n\n}\n\n\n\n/// Trait that enables access to a native representation of a wrapper type.\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 82, "score": 5.8251219762315385 }, { "content": "}\n\n\n\npub(crate) unsafe fn transmute_ref_mut<FromT, ToT>(from: &mut FromT) -> &mut ToT {\n\n // TODO: can we do this statically for all instantiations of transmute_ref_mut?\n\n debug_assert_eq!(mem::size_of::<FromT>(), mem::size_of::<ToT>());\n\n &mut *(from as *mut FromT as *mut ToT)\n\n}\n\n\n\npub(crate) trait IntoOption {\n\n type Target;\n\n fn into_option(self) -> Option<Self::Target>;\n\n}\n\n\n\nimpl<T> IntoOption for *const T {\n\n type Target = *const T;\n\n\n\n fn into_option(self) -> Option<Self::Target> {\n\n if !self.is_null() {\n\n Some(self)\n\n } else {\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 83, "score": 5.779191617124769 }, { "content": " println!(\"GT woke up\");\n\n }\n\n }\n\n }\n\n\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct GToolkitCallout {\n\n pub(crate) cif: *mut ffi_cif,\n\n pub(crate) func: CodePtr,\n\n pub(crate) args: Option<*mut *mut c_void>,\n\n pub(crate) result: Option<*mut c_void>,\n\n pub(crate) semaphore: sqInt,\n\n}\n\n\n\nimpl GToolkitCallout {\n\n pub fn call(&self) {\n\n unsafe {\n", "file_path": "experimental/src/vm.rs", "rank": 84, "score": 5.500881477030909 }, { "content": " unsafe {\n\n vm_parameters_parse(\n\n default_parameters.native().processArgc,\n\n default_parameters.native().processArgv,\n\n default_parameters.native_mut(),\n\n )\n\n };\n\n }\n\n\n\n default_parameters\n\n }\n\n\n\n pub fn from_env_args() -> Self {\n\n Self::from_args(std::env::args().collect())\n\n }\n\n\n\n pub fn image_file_name(&self) -> String {\n\n if self.native().imageFileName.is_null() {\n\n return \"\".to_string();\n\n }\n", "file_path": "vm-bindings/src/parameters.rs", "rank": 85, "score": 5.402262085225537 }, { "content": "\n\nimpl VMParameters {\n\n pub fn from_args<P: AsRef<str>>(arguments: Vec<P>) -> Self {\n\n let vars = std::env::vars()\n\n .map(|arg| CString::new(format!(\"{}={}\", arg.0, arg.1)).unwrap())\n\n .collect::<Vec<CString>>();\n\n\n\n let mut c_vars = vars\n\n .iter()\n\n .map(|arg| arg.as_ptr())\n\n .collect::<Vec<*const c_char>>();\n\n\n\n let mut default_parameters = Self::default();\n\n default_parameters.set_arguments(arguments);\n\n default_parameters.native_mut().environmentVector = c_vars.as_mut_ptr();\n\n\n\n forget(vars);\n\n forget(c_vars);\n\n\n\n if default_parameters.has_arguments() {\n", "file_path": "vm-bindings/src/parameters.rs", "rank": 86, "score": 5.374439404128891 }, { "content": " }\n\n}\n\n\n\nunsafe impl Send for GToolkitVM {}\n\nunsafe impl Sync for GToolkitVM {}\n\n\n\n#[derive(Copy, Clone, Debug)]\n\n#[repr(transparent)]\n\npub struct ObjectPointer(sqInt);\n\nimpl NativeTransmutable<sqInt> for ObjectPointer {}\n\n\n\n#[derive(Copy, Clone, Debug)]\n\n#[repr(transparent)]\n\npub struct ObjectFieldIndex(sqInt);\n\nimpl NativeTransmutable<sqInt> for ObjectFieldIndex {}\n\n\n\n#[derive(Copy, Clone, Debug)]\n\n#[repr(transparent)]\n\npub struct StackOffset(sqInt);\n\nimpl NativeTransmutable<sqInt> for StackOffset {}\n", "file_path": "experimental/src/vm.rs", "rank": 87, "score": 5.371101044609237 }, { "content": " }\n\n\n\n pub fn iter(&self) -> ParametersVectorIterator {\n\n ParametersVectorIterator {\n\n vector: self.native(),\n\n index: 0,\n\n }\n\n }\n\n\n\n pub fn as_vec(&self) -> Vec<String> {\n\n self.iter().collect()\n\n }\n\n\n\n pub(crate) fn borrow_from_native(native: &NativeVMParameterVector) -> &Self {\n\n unsafe { transmute_ref(native) }\n\n }\n\n}\n\n\n\n#[repr(transparent)]\n\npub struct ImageParameters(NativeVMParameterVector);\n", "file_path": "vm-bindings/src/parameter_vector.rs", "rank": 88, "score": 5.317171756574295 }, { "content": " pub fn set_arguments<P: AsRef<str>>(&mut self, arguments: Vec<P>) {\n\n // create a vector of zero terminated strings\n\n let mut args = arguments\n\n .iter()\n\n .map(|each| each.as_ref().to_string())\n\n .map(|each| CString::into_raw(CString::new(each).unwrap()))\n\n .collect::<Vec<*mut c_char>>();\n\n\n\n args.shrink_to_fit();\n\n\n\n let args_ptr = args.as_ptr() as *mut *const c_char;\n\n let args_length = args.len() as i32;\n\n std::mem::forget(args);\n\n\n\n if !self.native().processArgv.is_null() {\n\n let previous_ptr = self.native().processArgv as *mut *mut c_char;\n\n let previous_length = self.native().processArgc as usize;\n\n\n\n let previous_arg_ptrs: Vec<*mut c_char> =\n\n unsafe { Vec::from_raw_parts(previous_ptr, previous_length, previous_length) };\n", "file_path": "vm-bindings/src/parameters.rs", "rank": 89, "score": 5.316552229126902 }, { "content": " pub unsafe fn release(self) -> H {\n\n self.0\n\n }\n\n}\n\n\n\npub(crate) trait BorrowsFrom: Sized {\n\n fn borrows<D: ?Sized>(self, _dep: &D) -> Borrows<Self>;\n\n}\n\n\n\nimpl<T: Sized> BorrowsFrom for T {\n\n fn borrows<D: ?Sized>(self, _dep: &D) -> Borrows<Self> {\n\n Borrows(self, PhantomData)\n\n }\n\n}\n\n\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 90, "score": 5.074311112392075 }, { "content": " let executable = std::env::args()\n\n .next()\n\n .ok_or_else(|| ApplicationError::NoExecutableInArguments)?;\n\n Ok(PathBuf::from(executable))\n\n }\n\n\n\n pub fn working_directory(&self) -> &Path {\n\n self.working_directory.as_path()\n\n }\n\n\n\n pub fn process_arguments(&self) -> Vec<String> {\n\n std::env::args().collect()\n\n }\n\n}\n", "file_path": "vm-client/src/application.rs", "rank": 91, "score": 5.05731699929757 }, { "content": " }\n\n }\n\n\n\n pub fn is_on_worker_thread(&self) -> bool {\n\n unsafe { isVMRunOnWorkerThread() != 0 }\n\n }\n\n\n\n pub fn send(&self, request: GToolkitVMRequest) {\n\n self.sender.send(request);\n\n }\n\n\n\n pub fn call(&self, callout: GToolkitCallout) {\n\n println!(\"Requesting callout {:?}\", &callout);\n\n self.send(GToolkitVMRequest::Call(callout));\n\n }\n\n\n\n pub fn wake_up(&self) {\n\n println!(\"Sending wake up\");\n\n self.send(GToolkitVMRequest::WakeUp);\n\n }\n", "file_path": "experimental/src/vm.rs", "rank": 92, "score": 5.0532462263455145 }, { "content": "}\n\n\n\npub(crate) trait NativePointerOrNullMut2<N> {\n\n fn native_ptr_or_null_mut(&mut self) -> *mut N;\n\n}\n\n\n\npub(crate) trait NativePointerOrNull2<N> {\n\n fn native_ptr_or_null(&self) -> *const N;\n\n}\n\n\n\nimpl<H, N> NativePointerOrNull2<N> for Option<&H>\n\nwhere\n\n H: NativeTransmutable<N>,\n\n{\n\n fn native_ptr_or_null(&self) -> *const N {\n\n match self {\n\n Some(handle) => handle.native(),\n\n None => ptr::null(),\n\n }\n\n }\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 93, "score": 5.0294010009850245 }, { "content": "use crate::bindings::{sqInt, usqInt};\n\nuse libffi::low::ffi_type;\n\n\n\nextern \"C\" {\n\n pub fn numSlotsOf(oop: sqInt) -> sqInt;\n\n}\n\n\n\nextern \"C\" {\n\n pub fn marshallArgumentFromatIndexintoofTypewithSize(\n\n argumentsArrayOop: sqInt,\n\n i: sqInt,\n\n argHolder: sqInt,\n\n argType: sqInt,\n\n argTypeSize: sqInt,\n\n );\n\n}\n\n\n\nextern \"C\" {\n\n pub fn marshallAndPushReturnValueFromofTypepoping(\n\n returnHolder: sqInt,\n", "file_path": "experimental/src/cointerp.rs", "rank": 94, "score": 4.969066492488202 }, { "content": "extern crate bindgen;\n\nextern crate cmake;\n\nextern crate file_matcher;\n\nextern crate fs_extra;\n\nextern crate titlecase;\n\nextern crate which;\n\n\n\nmod build_support;\n\nuse build_support::*;\n\n\n\n///\n\n/// Possible parameters\n\n/// - VM_CLIENT_VMMAKER to use a specific VM to run a VM Maker image\n", "file_path": "vm-bindings/build.rs", "rank": 95, "score": 4.965120106879324 }, { "content": "/// A trait that supports retrieving a pointer from an Option<Handle<Native>>.\n\n/// Returns a null pointer if the Option is None.\n\npub(crate) trait NativePointerOrNull<N> {\n\n fn native_ptr_or_null(&self) -> *const N;\n\n unsafe fn native_ptr_or_null_mut_force(&self) -> *mut N;\n\n}\n\n\n\npub(crate) trait NativePointerOrNullMut<N> {\n\n fn native_ptr_or_null_mut(&mut self) -> *mut N;\n\n}\n\n\n\nimpl<H, N> NativePointerOrNull<N> for Option<&H>\n\nwhere\n\n H: NativeAccess<N>,\n\n{\n\n fn native_ptr_or_null(&self) -> *const N {\n\n match self {\n\n Some(handle) => handle.native(),\n\n None => ptr::null(),\n\n }\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 96, "score": 4.841823397658942 }, { "content": " let image_path = match validate_user_image_file(matches.value_of(\"image\")) {\n\n None => {\n\n eprintln!(\"Could not find an .image file\");\n\n return;\n\n }\n\n Some(path) => path,\n\n };\n\n\n\n let mut vm_args: Vec<String> = vec![];\n\n vm_args.push(std::env::args().collect::<Vec<String>>()[0].to_owned());\n\n vm_args.push(image_path.as_os_str().to_str().unwrap().to_owned());\n\n\n\n if let Some((external, sub_m)) = matches.subcommand() {\n\n vm_args.push(external.to_owned());\n\n if let Some(values) = sub_m.values_of(\"\") {\n\n for each in values {\n\n vm_args.push(each.to_owned());\n\n }\n\n }\n\n }\n\n\n\n let mut parameters = VMParameters::from_args(vm_args);\n\n parameters.set_image_file_name(image_path.as_os_str().to_str().unwrap().to_owned());\n\n parameters.set_is_interactive_session(matches.is_present(\"interactive\"));\n\n\n\n VM::start(parameters).unwrap();\n\n}\n", "file_path": "vm-client/src/main_cli.rs", "rank": 97, "score": 4.81677438282408 }, { "content": " fn clone(&self) -> Self {\n\n Self::from_native_c(self.0.clone())\n\n }\n\n}\n\n\n\nimpl<N: NativeDrop + NativePartialEq> PartialEq for Handle<N> {\n\n fn eq(&self, rhs: &Self) -> bool {\n\n self.native().eq(rhs.native())\n\n }\n\n}\n\n\n\nimpl<N: NativeDrop + NativeHash> Hash for Handle<N> {\n\n fn hash<H: Hasher>(&self, state: &mut H) {\n\n self.native().hash(state);\n\n }\n\n}\n\n\n\npub(crate) trait NativeSliceAccess<N: NativeDrop> {\n\n fn native(&self) -> &[N];\n\n fn native_mut(&mut self) -> &mut [N];\n", "file_path": "vm-bindings/src/prelude.rs", "rank": 98, "score": 4.756807946374478 }, { "content": "\n\n pub fn instantiate_indexable_class_of_size(\n\n &self,\n\n class: ObjectPointer,\n\n size: usize,\n\n is_pinned: bool,\n\n ) -> ObjectPointer {\n\n let oop = unsafe {\n\n instantiateClassindexableSizeisPinned(\n\n class.into_native(),\n\n size as usqInt,\n\n is_pinned as sqInt,\n\n )\n\n };\n\n\n\n ObjectPointer::from_native_c(oop)\n\n }\n\n\n\n pub fn new_external_address(&self) -> ObjectPointer {\n\n let external_address_class = self.class_external_address();\n", "file_path": "experimental/src/vm.rs", "rank": 99, "score": 4.561465331129403 } ]
Rust
src/mock.rs
fossabot/client-rust
4540c1cdfa9a4d17ac15a1242daf32ef53a17133
use crate::{ pd::{PdClient, PdRpcClient, RetryClient}, request::DispatchHook, Config, Error, Key, Result, Timestamp, }; use fail::fail_point; use futures::future::{ready, BoxFuture, FutureExt}; use grpcio::CallOption; use kvproto::{errorpb, kvrpcpb, metapb, tikvpb::TikvClient}; use std::{future::Future, sync::Arc, time::Duration}; use tikv_client_store::{HasError, KvClient, KvConnect, Region, RegionId, Store}; pub async fn pd_rpc_client() -> PdRpcClient<MockKvConnect, MockCluster> { let config = Config::default(); PdRpcClient::new( &config, |_, _| MockKvConnect, |e, sm| { futures::future::ok(RetryClient::new_with_cluster( e, sm, config.timeout, MockCluster, )) }, ) .await .unwrap() } #[derive(Clone, Eq, PartialEq, Debug)] pub struct MockKvClient { addr: String, } pub struct MockKvConnect; pub struct MockCluster; pub struct MockPdClient; impl KvClient for MockKvClient { fn dispatch<Resp, RpcFuture>( &self, _request_name: &'static str, _fut: grpcio::Result<RpcFuture>, ) -> BoxFuture<'static, Result<Resp>> where RpcFuture: Future<Output = std::result::Result<Resp, ::grpcio::Error>>, Resp: HasError + Sized + Clone + Send + 'static, RpcFuture: Send + 'static, { unimplemented!() } fn get_rpc_client(&self) -> Arc<TikvClient> { unimplemented!() } } impl KvConnect for MockKvConnect { type KvClient = MockKvClient; fn connect(&self, address: &str) -> Result<Self::KvClient> { Ok(MockKvClient { addr: address.to_owned(), }) } } impl MockPdClient { pub fn region1() -> Region { let mut region = Region::default(); region.region.id = 1; region.region.set_start_key(vec![0]); region.region.set_end_key(vec![10]); let mut leader = metapb::Peer::default(); leader.store_id = 41; region.leader = Some(leader); region } pub fn region2() -> Region { let mut region = Region::default(); region.region.id = 2; region.region.set_start_key(vec![10]); region.region.set_end_key(vec![250, 250]); let mut leader = metapb::Peer::default(); leader.store_id = 42; region.leader = Some(leader); region } } impl PdClient for MockPdClient { type KvClient = MockKvClient; fn map_region_to_store( self: Arc<Self>, region: Region, ) -> BoxFuture<'static, Result<Store<Self::KvClient>>> { Box::pin(ready(Ok(Store::new( region, MockKvClient { addr: String::new(), }, Duration::from_secs(60), )))) } fn region_for_key(&self, key: &Key) -> BoxFuture<'static, Result<Region>> { let bytes: &[_] = key.into(); let region = if bytes.is_empty() || bytes[0] < 10 { Self::region1() } else { Self::region2() }; Box::pin(ready(Ok(region))) } fn region_for_id(&self, id: RegionId) -> BoxFuture<'static, Result<Region>> { let result = match id { 1 => Ok(Self::region1()), 2 => Ok(Self::region2()), _ => Err(Error::region_not_found(id)), }; Box::pin(ready(result)) } fn get_timestamp(self: Arc<Self>) -> BoxFuture<'static, Result<Timestamp>> { unimplemented!() } } impl DispatchHook for kvrpcpb::ResolveLockRequest { fn dispatch_hook( &self, _opt: CallOption, ) -> Option<BoxFuture<'static, Result<kvrpcpb::ResolveLockResponse>>> { fail_point!("region-error", |_| { let mut resp = kvrpcpb::ResolveLockResponse::default(); resp.region_error = Some(errorpb::Error::default()); Some(ready(Ok(resp)).boxed()) }); Some(ready(Ok(kvrpcpb::ResolveLockResponse::default())).boxed()) } }
use crate::{ pd::{PdClient, PdRpcClient, RetryClient}, request::DispatchHook, Config, Error, Key, Result, Timestamp, }; use fail::fail_point; use futures::future::{ready, BoxFuture, FutureExt}; use grpcio::CallOption; use kvproto::{errorpb, kvrpcpb, metapb, tikvpb::TikvClient}; use std::{future::Future, sync::Arc, time::Duration}; use tikv_client_store::{HasError, KvClient, KvConnect, Region, RegionId, Store}; pub async fn pd_rpc_client() -> PdRpcClient<MockKvConnect, MockCluster> { let config = Config::default(); PdRpcClient::new( &config, |_, _| MockKvConnect, |e, sm| { futures::future::ok(RetryClient::new_with_cluster( e, sm, config.timeout, MockCluster, )) }, ) .await .unwrap() } #[derive(Clone, Eq, PartialEq, Debug)] pub struct MockKvClient { addr: String, } pub struct MockKvConnect; pub struct MockCluster; pub struct MockPdClient; impl KvClient for MockKvClient { fn dispatch<Resp, RpcFuture>( &self, _request_name: &'st
_ => Err(Error::region_not_found(id)), }; Box::pin(ready(result)) } fn get_timestamp(self: Arc<Self>) -> BoxFuture<'static, Result<Timestamp>> { unimplemented!() } } impl DispatchHook for kvrpcpb::ResolveLockRequest { fn dispatch_hook( &self, _opt: CallOption, ) -> Option<BoxFuture<'static, Result<kvrpcpb::ResolveLockResponse>>> { fail_point!("region-error", |_| { let mut resp = kvrpcpb::ResolveLockResponse::default(); resp.region_error = Some(errorpb::Error::default()); Some(ready(Ok(resp)).boxed()) }); Some(ready(Ok(kvrpcpb::ResolveLockResponse::default())).boxed()) } }
atic str, _fut: grpcio::Result<RpcFuture>, ) -> BoxFuture<'static, Result<Resp>> where RpcFuture: Future<Output = std::result::Result<Resp, ::grpcio::Error>>, Resp: HasError + Sized + Clone + Send + 'static, RpcFuture: Send + 'static, { unimplemented!() } fn get_rpc_client(&self) -> Arc<TikvClient> { unimplemented!() } } impl KvConnect for MockKvConnect { type KvClient = MockKvClient; fn connect(&self, address: &str) -> Result<Self::KvClient> { Ok(MockKvClient { addr: address.to_owned(), }) } } impl MockPdClient { pub fn region1() -> Region { let mut region = Region::default(); region.region.id = 1; region.region.set_start_key(vec![0]); region.region.set_end_key(vec![10]); let mut leader = metapb::Peer::default(); leader.store_id = 41; region.leader = Some(leader); region } pub fn region2() -> Region { let mut region = Region::default(); region.region.id = 2; region.region.set_start_key(vec![10]); region.region.set_end_key(vec![250, 250]); let mut leader = metapb::Peer::default(); leader.store_id = 42; region.leader = Some(leader); region } } impl PdClient for MockPdClient { type KvClient = MockKvClient; fn map_region_to_store( self: Arc<Self>, region: Region, ) -> BoxFuture<'static, Result<Store<Self::KvClient>>> { Box::pin(ready(Ok(Store::new( region, MockKvClient { addr: String::new(), }, Duration::from_secs(60), )))) } fn region_for_key(&self, key: &Key) -> BoxFuture<'static, Result<Region>> { let bytes: &[_] = key.into(); let region = if bytes.is_empty() || bytes[0] < 10 { Self::region1() } else { Self::region2() }; Box::pin(ready(Ok(region))) } fn region_for_id(&self, id: RegionId) -> BoxFuture<'static, Result<Region>> { let result = match id { 1 => Ok(Self::region1()), 2 => Ok(Self::region2()),
random
[ { "content": "pub fn new_mvcc_get_request(key: impl Into<Key>, timestamp: Timestamp) -> kvrpcpb::GetRequest {\n\n let mut req = kvrpcpb::GetRequest::default();\n\n req.set_key(key.into().into());\n\n req.set_version(timestamp.version());\n\n req\n\n}\n\n\n\nimpl KvRequest for kvrpcpb::BatchGetRequest {\n\n type Result = Vec<KvPair>;\n\n type RpcResponse = kvrpcpb::BatchGetResponse;\n\n type KeyData = Vec<Key>;\n\n const REQUEST_NAME: &'static str = \"kv_batch_get\";\n\n const RPC_FN: RpcFnType<Self, Self::RpcResponse> = TikvClient::kv_batch_get_async_opt;\n\n\n\n fn make_rpc_request<KvC: KvClient>(&self, keys: Self::KeyData, store: &Store<KvC>) -> Self {\n\n let mut req = self.request_from_store(store);\n\n req.set_keys(keys.into_iter().map(Into::into).collect());\n\n req.set_version(self.version);\n\n\n\n req\n", "file_path": "src/transaction/requests.rs", "rank": 0, "score": 260098.61649734195 }, { "content": "fn extract_errors(error_iter: impl Iterator<Item = Option<kvrpcpb::KeyError>>) -> Option<Error> {\n\n let errors: Vec<Error> = error_iter.flatten().map(Into::into).collect();\n\n if errors.is_empty() {\n\n None\n\n } else if errors.len() == 1 {\n\n Some(errors.into_iter().next().unwrap())\n\n } else {\n\n Some(Error::multiple_errors(errors))\n\n }\n\n}\n", "file_path": "tikv-client-store/src/errors.rs", "rank": 1, "score": 216581.07915234807 }, { "content": "pub fn new_cleanup_request(key: impl Into<Key>, start_version: u64) -> kvrpcpb::CleanupRequest {\n\n let mut req = kvrpcpb::CleanupRequest::default();\n\n req.set_key(key.into().into());\n\n req.set_start_version(start_version);\n\n\n\n req\n\n}\n\n\n\nimpl KvRequest for kvrpcpb::PrewriteRequest {\n\n type Result = ();\n\n type RpcResponse = kvrpcpb::PrewriteResponse;\n\n type KeyData = Vec<kvrpcpb::Mutation>;\n\n const REQUEST_NAME: &'static str = \"kv_prewrite\";\n\n const RPC_FN: RpcFnType<Self, Self::RpcResponse> = TikvClient::kv_prewrite_async_opt;\n\n\n\n fn make_rpc_request<KvC: KvClient>(\n\n &self,\n\n mutations: Self::KeyData,\n\n store: &Store<KvC>,\n\n ) -> Self {\n", "file_path": "src/transaction/requests.rs", "rank": 2, "score": 204042.57826495828 }, { "content": "pub fn pd_addrs() -> Vec<String> {\n\n var(ENV_PD_ADDRS)\n\n .expect(&format!(\"Expected {}:\", ENV_PD_ADDRS))\n\n .split(\",\")\n\n .map(From::from)\n\n .collect()\n\n}\n\n*/\n", "file_path": "src/proptests/mod.rs", "rank": 3, "score": 175032.26189665328 }, { "content": "pub fn store_stream_for_key<KeyData, PdC>(\n\n key_data: KeyData,\n\n pd_client: Arc<PdC>,\n\n) -> BoxStream<'static, Result<(KeyData, Store<PdC::KvClient>)>>\n\nwhere\n\n KeyData: AsRef<Key> + Send + 'static,\n\n PdC: PdClient,\n\n{\n\n pd_client\n\n .store_for_key(key_data.as_ref())\n\n .map_ok(move |store| (key_data, store))\n\n .into_stream()\n\n .boxed()\n\n}\n\n\n", "file_path": "src/request.rs", "rank": 4, "score": 156882.96163810973 }, { "content": "pub fn store_stream_for_keys<KeyData, IntoKey, I, PdC>(\n\n key_data: I,\n\n pd_client: Arc<PdC>,\n\n) -> BoxStream<'static, Result<(Vec<KeyData>, Store<PdC::KvClient>)>>\n\nwhere\n\n KeyData: AsRef<Key> + Send + Sync + 'static,\n\n IntoKey: Into<KeyData> + 'static,\n\n I: IntoIterator<Item = IntoKey>,\n\n I::IntoIter: Send + Sync + 'static,\n\n PdC: PdClient,\n\n{\n\n pd_client\n\n .clone()\n\n .group_keys_by_region(key_data.into_iter().map(Into::into))\n\n .and_then(move |(region_id, key)| {\n\n pd_client\n\n .clone()\n\n .store_for_id(region_id)\n\n .map_ok(move |store| (key, store))\n\n })\n\n .boxed()\n\n}\n\n\n", "file_path": "src/request.rs", "rank": 5, "score": 154802.94968153807 }, { "content": "pub trait HasRegionError {\n\n fn region_error(&mut self) -> Option<Error>;\n\n}\n\n\n", "file_path": "tikv-client-store/src/errors.rs", "rank": 6, "score": 150951.52425535006 }, { "content": "pub trait HasError: HasRegionError {\n\n fn error(&mut self) -> Option<Error>;\n\n}\n\n\n\nmacro_rules! has_region_error {\n\n ($type:ty) => {\n\n impl HasRegionError for $type {\n\n fn region_error(&mut self) -> Option<Error> {\n\n if self.has_region_error() {\n\n Some(self.take_region_error().into())\n\n } else {\n\n None\n\n }\n\n }\n\n }\n\n };\n\n}\n\n\n\nhas_region_error!(kvrpcpb::GetResponse);\n\nhas_region_error!(kvrpcpb::ScanResponse);\n", "file_path": "tikv-client-store/src/errors.rs", "rank": 7, "score": 150775.58786043568 }, { "content": "fn pd_addrs() -> Vec<String> {\n\n env::var(ENV_PD_ADDRS)\n\n .expect(&format!(\"Expected {}:\", ENV_PD_ADDRS))\n\n .split(\",\")\n\n .map(From::from)\n\n .collect()\n\n}\n", "file_path": "tests/integration_tests.rs", "rank": 8, "score": 135692.97672929495 }, { "content": "pub fn get_tag_from_thread_name() -> Option<String> {\n\n thread::current()\n\n .name()\n\n .and_then(|name| name.split(\"::\").skip(1).last())\n\n .map(From::from)\n\n}\n\n\n\n/// Convert Duration to seconds.\n", "file_path": "tikv-client-common/src/util.rs", "rank": 9, "score": 133105.05496380126 }, { "content": "pub fn arb_batch<T: core::fmt::Debug>(\n\n single_strategy: impl Strategy<Value = T>,\n\n max_batch_size: impl Into<Option<usize>>,\n\n) -> impl Strategy<Value = Vec<T>> {\n\n let max_batch_size = max_batch_size.into().unwrap_or(PROPTEST_BATCH_SIZE_MAX);\n\n proptest::collection::vec(single_strategy, 0..max_batch_size)\n\n}\n\n\n", "file_path": "src/proptests/mod.rs", "rank": 10, "score": 131747.3568874893 }, { "content": "pub fn store_stream_for_range<PdC: PdClient>(\n\n range: BoundRange,\n\n pd_client: Arc<PdC>,\n\n) -> BoxStream<'static, Result<((Key, Key), Store<PdC::KvClient>)>> {\n\n pd_client\n\n .stores_for_range(range)\n\n .map_ok(move |store| {\n\n // FIXME should be bounded by self.range\n\n let range = store.region.range();\n\n (range, store)\n\n })\n\n .into_stream()\n\n .boxed()\n\n}\n\n\n", "file_path": "src/request.rs", "rank": 11, "score": 117572.34931375597 }, { "content": "pub fn store_stream_for_ranges<PdC: PdClient>(\n\n ranges: Vec<BoundRange>,\n\n pd_client: Arc<PdC>,\n\n) -> BoxStream<'static, Result<(Vec<BoundRange>, Store<PdC::KvClient>)>> {\n\n pd_client\n\n .clone()\n\n .group_ranges_by_region(ranges)\n\n .and_then(move |(region_id, range)| {\n\n pd_client\n\n .clone()\n\n .store_for_id(region_id)\n\n .map_ok(move |store| (range, store))\n\n })\n\n .into_stream()\n\n .boxed()\n\n}\n\n\n", "file_path": "src/request.rs", "rank": 12, "score": 117572.34931375597 }, { "content": "pub fn stream_fn<S, T, A, F, E>(initial_state: S, mut func: F) -> LoopFn<A, F>\n\nwhere\n\n F: FnMut(S) -> A,\n\n A: Future<Output = Result<Option<(S, T)>, E>>,\n\n{\n\n LoopFn {\n\n future: func(initial_state),\n\n func,\n\n }\n\n}\n\n\n\nimpl<S, T, A, F, E> Stream for LoopFn<A, F>\n\nwhere\n\n F: FnMut(S) -> A,\n\n A: Future<Output = Result<Option<(S, T)>, E>>,\n\n{\n\n type Item = Result<T, E>;\n\n fn poll_next(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {\n\n unsafe {\n\n let this = Pin::get_unchecked_mut(self);\n", "file_path": "tikv-client-common/src/compat.rs", "rank": 13, "score": 109543.58324741272 }, { "content": "type GrpcResult<T> = std::result::Result<T, grpcio::Error>;\n\n\n", "file_path": "tikv-client-pd/src/cluster.rs", "rank": 14, "score": 106418.63234670997 }, { "content": "pub trait KvClient {\n\n fn dispatch<Resp, RpcFuture>(\n\n &self,\n\n request_name: &'static str,\n\n fut: ::grpcio::Result<RpcFuture>,\n\n ) -> BoxFuture<'static, Result<Resp>>\n\n where\n\n RpcFuture: Future<Output = std::result::Result<Resp, ::grpcio::Error>>,\n\n Resp: HasError + Sized + Clone + Send + 'static,\n\n RpcFuture: Send + 'static;\n\n\n\n fn get_rpc_client(&self) -> Arc<TikvClient>;\n\n}\n\n\n\n/// This client handles requests for a single TiKV node. It converts the data\n\n/// types and abstractions of the client program into the grpc data types.\n\n#[derive(new, Clone)]\n\npub struct KvRpcClient {\n\n pub rpc_client: Arc<TikvClient>,\n\n}\n", "file_path": "tikv-client-store/src/lib.rs", "rank": 15, "score": 102651.26076965961 }, { "content": "pub fn new_prewrite_request(\n\n mutations: Vec<kvrpcpb::Mutation>,\n\n primary_lock: Key,\n\n start_version: u64,\n\n lock_ttl: u64,\n\n) -> kvrpcpb::PrewriteRequest {\n\n let mut req = kvrpcpb::PrewriteRequest::default();\n\n req.set_mutations(mutations);\n\n req.set_primary_lock(primary_lock.into());\n\n req.set_start_version(start_version);\n\n req.set_lock_ttl(lock_ttl);\n\n // TODO: Lite resolve lock is currently disabled\n\n req.set_txn_size(std::u64::MAX);\n\n\n\n req\n\n}\n\n\n\nimpl KvRequest for kvrpcpb::CommitRequest {\n\n type Result = ();\n\n type RpcResponse = kvrpcpb::CommitResponse;\n", "file_path": "src/transaction/requests.rs", "rank": 16, "score": 99519.3054609654 }, { "content": "pub fn new_commit_request(\n\n keys: Vec<Key>,\n\n start_version: u64,\n\n commit_version: u64,\n\n) -> kvrpcpb::CommitRequest {\n\n let mut req = kvrpcpb::CommitRequest::default();\n\n req.set_keys(keys.into_iter().map(Into::into).collect());\n\n req.set_start_version(start_version);\n\n req.set_commit_version(commit_version);\n\n\n\n req\n\n}\n\n\n\nimpl KvRequest for kvrpcpb::BatchRollbackRequest {\n\n type Result = ();\n\n type RpcResponse = kvrpcpb::BatchRollbackResponse;\n\n type KeyData = Vec<Vec<u8>>;\n\n const REQUEST_NAME: &'static str = \"kv_batch_rollback\";\n\n const RPC_FN: RpcFnType<Self, Self::RpcResponse> = TikvClient::kv_batch_rollback_async_opt;\n\n\n", "file_path": "src/transaction/requests.rs", "rank": 17, "score": 99519.3054609654 }, { "content": "fn convert_to_bound_key<K: Into<Key>>(b: Bound<K>) -> Bound<Key> {\n\n match b {\n\n Bound::Included(k) => Bound::Included(k.into()),\n\n Bound::Excluded(k) => Bound::Excluded(k.into()),\n\n Bound::Unbounded => Bound::Unbounded,\n\n }\n\n}\n", "file_path": "tikv-client-common/src/kv/bound_range.rs", "rank": 18, "score": 98677.8716817233 }, { "content": "pub fn new_raw_scan_request(\n\n range: impl Into<BoundRange>,\n\n limit: u32,\n\n key_only: bool,\n\n cf: Option<ColumnFamily>,\n\n) -> kvrpcpb::RawScanRequest {\n\n let (start_key, end_key) = range.into().into_keys();\n\n let mut req = kvrpcpb::RawScanRequest::default();\n\n req.set_start_key(start_key.into());\n\n req.set_end_key(end_key.unwrap_or_default().into());\n\n req.set_limit(limit);\n\n req.set_key_only(key_only);\n\n req.maybe_set_cf(cf);\n\n\n\n req\n\n}\n\n\n\nimpl KvRequest for kvrpcpb::RawBatchScanRequest {\n\n type Result = Vec<KvPair>;\n\n type RpcResponse = kvrpcpb::RawBatchScanResponse;\n", "file_path": "src/raw/requests.rs", "rank": 19, "score": 97471.57530574285 }, { "content": "pub fn new_raw_put_request(\n\n key: impl Into<Key>,\n\n value: impl Into<Value>,\n\n cf: Option<ColumnFamily>,\n\n) -> kvrpcpb::RawPutRequest {\n\n let mut req = kvrpcpb::RawPutRequest::default();\n\n req.set_key(key.into().into());\n\n req.set_value(value.into());\n\n req.maybe_set_cf(cf);\n\n\n\n req\n\n}\n\n\n\nimpl KvRequest for kvrpcpb::RawBatchPutRequest {\n\n type Result = ();\n\n type RpcResponse = kvrpcpb::RawBatchPutResponse;\n\n type KeyData = Vec<KvPair>;\n\n const REQUEST_NAME: &'static str = \"raw_batch_put\";\n\n const RPC_FN: RpcFnType<Self, Self::RpcResponse> = TikvClient::raw_batch_put_async_opt;\n\n\n", "file_path": "src/raw/requests.rs", "rank": 20, "score": 97471.57530574285 }, { "content": "pub fn new_raw_get_request(\n\n key: impl Into<Key>,\n\n cf: Option<ColumnFamily>,\n\n) -> kvrpcpb::RawGetRequest {\n\n let mut req = kvrpcpb::RawGetRequest::default();\n\n req.set_key(key.into().into());\n\n req.maybe_set_cf(cf);\n\n\n\n req\n\n}\n\n\n\nimpl KvRequest for kvrpcpb::RawBatchGetRequest {\n\n type Result = Vec<KvPair>;\n\n type RpcResponse = kvrpcpb::RawBatchGetResponse;\n\n type KeyData = Vec<Key>;\n\n const REQUEST_NAME: &'static str = \"raw_batch_get\";\n\n const RPC_FN: RpcFnType<Self, Self::RpcResponse> = TikvClient::raw_batch_get_async_opt;\n\n\n\n fn make_rpc_request<KvC: KvClient>(&self, keys: Self::KeyData, store: &Store<KvC>) -> Self {\n\n let mut req = self.request_from_store(store);\n", "file_path": "src/raw/requests.rs", "rank": 21, "score": 97471.57530574285 }, { "content": "pub fn new_batch_rollback_request(\n\n keys: Vec<Key>,\n\n start_version: u64,\n\n) -> kvrpcpb::BatchRollbackRequest {\n\n let mut req = kvrpcpb::BatchRollbackRequest::default();\n\n req.set_keys(keys.into_iter().map(Into::into).collect());\n\n req.set_start_version(start_version);\n\n\n\n req\n\n}\n\n\n\nimpl HasLocks for kvrpcpb::CommitResponse {}\n\nimpl HasLocks for kvrpcpb::CleanupResponse {}\n\nimpl HasLocks for kvrpcpb::BatchRollbackResponse {}\n\nimpl HasLocks for kvrpcpb::ResolveLockResponse {}\n", "file_path": "src/transaction/requests.rs", "rank": 22, "score": 97471.57530574285 }, { "content": "pub fn new_raw_delete_request(\n\n key: impl Into<Key>,\n\n cf: Option<ColumnFamily>,\n\n) -> kvrpcpb::RawDeleteRequest {\n\n let mut req = kvrpcpb::RawDeleteRequest::default();\n\n req.set_key(key.into().into());\n\n req.maybe_set_cf(cf);\n\n\n\n req\n\n}\n\n\n\nimpl KvRequest for kvrpcpb::RawBatchDeleteRequest {\n\n type Result = ();\n\n type RpcResponse = kvrpcpb::RawBatchDeleteResponse;\n\n type KeyData = Vec<Key>;\n\n const REQUEST_NAME: &'static str = \"raw_batch_delete\";\n\n const RPC_FN: RpcFnType<Self, Self::RpcResponse> = TikvClient::raw_batch_delete_async_opt;\n\n\n\n fn make_rpc_request<KvC: KvClient>(&self, keys: Self::KeyData, store: &Store<KvC>) -> Self {\n\n let mut req = self.request_from_store(store);\n", "file_path": "src/raw/requests.rs", "rank": 23, "score": 97471.57530574285 }, { "content": "pub fn new_mvcc_scan_request(\n\n range: impl Into<BoundRange>,\n\n timestamp: Timestamp,\n\n limit: u32,\n\n key_only: bool,\n\n) -> kvrpcpb::ScanRequest {\n\n let (start_key, end_key) = range.into().into_keys();\n\n let mut req = kvrpcpb::ScanRequest::default();\n\n req.set_start_key(start_key.into());\n\n req.set_end_key(end_key.unwrap_or_default().into());\n\n req.set_limit(limit);\n\n req.set_key_only(key_only);\n\n req.set_version(timestamp.version());\n\n req\n\n}\n\n\n\nimpl HasLocks for kvrpcpb::ScanResponse {\n\n fn take_locks(&mut self) -> Vec<kvrpcpb::LockInfo> {\n\n self.pairs\n\n .iter_mut()\n", "file_path": "src/transaction/requests.rs", "rank": 24, "score": 97471.57530574285 }, { "content": "pub fn new_resolve_lock_request(\n\n context: kvrpcpb::Context,\n\n start_version: u64,\n\n commit_version: u64,\n\n) -> kvrpcpb::ResolveLockRequest {\n\n let mut req = kvrpcpb::ResolveLockRequest::default();\n\n req.set_context(context);\n\n req.set_start_version(start_version);\n\n req.set_commit_version(commit_version);\n\n\n\n req\n\n}\n\n\n\n// TODO: Add lite resolve lock (resolve specified locks only)\n\n\n\nimpl KvRequest for kvrpcpb::CleanupRequest {\n\n /// Commit version if the key is committed, 0 otherwise.\n\n type Result = u64;\n\n type RpcResponse = kvrpcpb::CleanupResponse;\n\n type KeyData = Key;\n", "file_path": "src/transaction/requests.rs", "rank": 25, "score": 97471.57530574285 }, { "content": "pub fn new_raw_batch_scan_request(\n\n ranges: impl IntoIterator<Item = impl Into<BoundRange>>,\n\n each_limit: u32,\n\n key_only: bool,\n\n cf: Option<ColumnFamily>,\n\n) -> kvrpcpb::RawBatchScanRequest {\n\n let mut req = kvrpcpb::RawBatchScanRequest::default();\n\n req.set_ranges(ranges.into_iter().map(Into::into).map(Into::into).collect());\n\n req.set_each_limit(each_limit);\n\n req.set_key_only(key_only);\n\n req.maybe_set_cf(cf);\n\n\n\n req\n\n}\n\n\n\nmacro_rules! impl_raw_rpc_request {\n\n ($name: ident) => {\n\n impl RawRpcRequest for kvrpcpb::$name {\n\n fn set_cf(&mut self, cf: String) {\n\n self.set_cf(cf);\n", "file_path": "src/raw/requests.rs", "rank": 26, "score": 95551.57547472473 }, { "content": "pub fn new_raw_batch_delete_request(\n\n keys: impl IntoIterator<Item = impl Into<Key>>,\n\n cf: Option<ColumnFamily>,\n\n) -> kvrpcpb::RawBatchDeleteRequest {\n\n let mut req = kvrpcpb::RawBatchDeleteRequest::default();\n\n req.set_keys(keys.into_iter().map(Into::into).map(Into::into).collect());\n\n req.maybe_set_cf(cf);\n\n\n\n req\n\n}\n\n\n\nimpl KvRequest for kvrpcpb::RawDeleteRangeRequest {\n\n type Result = ();\n\n type RpcResponse = kvrpcpb::RawDeleteRangeResponse;\n\n type KeyData = (Key, Key);\n\n const REQUEST_NAME: &'static str = \"raw_delete_range\";\n\n const RPC_FN: RpcFnType<Self, Self::RpcResponse> = TikvClient::raw_delete_range_async_opt;\n\n\n\n fn make_rpc_request<KvC: KvClient>(\n\n &self,\n", "file_path": "src/raw/requests.rs", "rank": 27, "score": 95551.57547472473 }, { "content": "pub fn new_raw_batch_get_request(\n\n keys: impl IntoIterator<Item = impl Into<Key>>,\n\n cf: Option<ColumnFamily>,\n\n) -> kvrpcpb::RawBatchGetRequest {\n\n let mut req = kvrpcpb::RawBatchGetRequest::default();\n\n req.set_keys(keys.into_iter().map(Into::into).map(Into::into).collect());\n\n req.maybe_set_cf(cf);\n\n\n\n req\n\n}\n\n\n\nimpl KvRequest for kvrpcpb::RawPutRequest {\n\n type Result = ();\n\n type RpcResponse = kvrpcpb::RawPutResponse;\n\n type KeyData = KvPair;\n\n const REQUEST_NAME: &'static str = \"raw_put\";\n\n const RPC_FN: RpcFnType<Self, Self::RpcResponse> = TikvClient::raw_put_async_opt;\n\n\n\n fn make_rpc_request<KvC: KvClient>(&self, key: Self::KeyData, store: &Store<KvC>) -> Self {\n\n let mut req = self.request_from_store(store);\n", "file_path": "src/raw/requests.rs", "rank": 28, "score": 95551.57547472473 }, { "content": "pub fn new_raw_batch_put_request(\n\n pairs: impl IntoIterator<Item = impl Into<KvPair>>,\n\n cf: Option<ColumnFamily>,\n\n) -> kvrpcpb::RawBatchPutRequest {\n\n let mut req = kvrpcpb::RawBatchPutRequest::default();\n\n req.set_pairs(pairs.into_iter().map(Into::into).map(Into::into).collect());\n\n req.maybe_set_cf(cf);\n\n\n\n req\n\n}\n\n\n\nimpl KvRequest for kvrpcpb::RawDeleteRequest {\n\n type Result = ();\n\n type RpcResponse = kvrpcpb::RawDeleteResponse;\n\n type KeyData = Key;\n\n const REQUEST_NAME: &'static str = \"raw_delete\";\n\n const RPC_FN: RpcFnType<Self, Self::RpcResponse> = TikvClient::raw_delete_async_opt;\n\n\n\n fn make_rpc_request<KvC: KvClient>(&self, key: Self::KeyData, store: &Store<KvC>) -> Self {\n\n let mut req = self.request_from_store(store);\n", "file_path": "src/raw/requests.rs", "rank": 29, "score": 95551.57547472473 }, { "content": "pub fn new_raw_delete_range_request(\n\n range: impl Into<BoundRange>,\n\n cf: Option<ColumnFamily>,\n\n) -> kvrpcpb::RawDeleteRangeRequest {\n\n let (start_key, end_key) = range.into().into_keys();\n\n let mut req = kvrpcpb::RawDeleteRangeRequest::default();\n\n req.set_start_key(start_key.into());\n\n req.set_end_key(end_key.unwrap_or_default().into());\n\n req.maybe_set_cf(cf);\n\n\n\n req\n\n}\n\n\n\nimpl KvRequest for kvrpcpb::RawScanRequest {\n\n type Result = Vec<KvPair>;\n\n type RpcResponse = kvrpcpb::RawScanResponse;\n\n type KeyData = (Key, Key);\n\n const REQUEST_NAME: &'static str = \"raw_scan\";\n\n const RPC_FN: RpcFnType<Self, Self::RpcResponse> = TikvClient::raw_scan_async_opt;\n\n\n", "file_path": "src/raw/requests.rs", "rank": 30, "score": 95551.57547472473 }, { "content": "pub fn new_mvcc_get_batch_request(\n\n keys: Vec<Key>,\n\n timestamp: Timestamp,\n\n) -> kvrpcpb::BatchGetRequest {\n\n let mut req = kvrpcpb::BatchGetRequest::default();\n\n req.set_keys(keys.into_iter().map(Into::into).collect());\n\n req.set_version(timestamp.version());\n\n req\n\n}\n\n\n\nimpl KvRequest for kvrpcpb::ScanRequest {\n\n type Result = Vec<KvPair>;\n\n type RpcResponse = kvrpcpb::ScanResponse;\n\n type KeyData = (Key, Key);\n\n const REQUEST_NAME: &'static str = \"kv_scan\";\n\n const RPC_FN: RpcFnType<Self, Self::RpcResponse> = TikvClient::kv_scan_async_opt;\n\n\n\n fn make_rpc_request<KvC: KvClient>(\n\n &self,\n\n (start_key, end_key): Self::KeyData,\n", "file_path": "src/transaction/requests.rs", "rank": 31, "score": 95551.57547472473 }, { "content": "fn allocate_timestamps(\n\n resp: &TsoResponse,\n\n pending_requests: &mut VecDeque<RequestGroup>,\n\n) -> Result<()> {\n\n // PD returns the timestamp with the biggest logical value. We can send back timestamps\n\n // whose logical value is from `logical - count + 1` to `logical` using the senders\n\n // in `pending`.\n\n let tail_ts = resp\n\n .timestamp\n\n .as_ref()\n\n .ok_or_else(|| Error::internal_error(\"No timestamp in TsoResponse\"))?;\n\n\n\n let mut offset = resp.count;\n\n if let Some(RequestGroup {\n\n tso_request,\n\n requests,\n\n }) = pending_requests.pop_front()\n\n {\n\n if tso_request.count != offset {\n\n return Err(Error::internal_error(\n", "file_path": "tikv-client-pd/src/timestamp.rs", "rank": 32, "score": 94280.08148720067 }, { "content": "pub fn start_mock_pd_server() -> Server {\n\n let env = Arc::new(Environment::new(1));\n\n let mut server = ServerBuilder::new(env)\n\n .register_service(create_pd(MockPd::new()))\n\n .bind(\"localhost\", MOCK_PD_PORT)\n\n .build()\n\n .unwrap();\n\n server.start();\n\n server\n\n}\n\n\n\nimpl Pd for MockPd {\n\n fn get_members(\n\n &mut self,\n\n ctx: ::grpcio::RpcContext,\n\n req: GetMembersRequest,\n\n sink: ::grpcio::UnarySink<GetMembersResponse>,\n\n ) {\n\n let member = Member {\n\n name: \"mock tikv\".to_owned(),\n", "file_path": "mock-tikv/src/pd.rs", "rank": 33, "score": 90902.7359119496 }, { "content": "pub fn start_mock_tikv_server() -> Server {\n\n let env = Arc::new(Environment::new(1));\n\n let mut server = ServerBuilder::new(env)\n\n .register_service(create_tikv(MockTikv::new(KvStore::new())))\n\n .bind(\"localhost\", MOCK_TIKV_PORT)\n\n .build()\n\n .unwrap();\n\n server.start();\n\n server\n\n}\n\n\n\n#[derive(Debug, Clone, new)]\n\npub struct MockTikv {\n\n inner: KvStore,\n\n}\n\n\n\nimpl Tikv for MockTikv {\n\n fn kv_get(\n\n &mut self,\n\n _ctx: grpcio::RpcContext,\n", "file_path": "mock-tikv/src/server.rs", "rank": 34, "score": 90902.7359119496 }, { "content": "struct HexRepr<'a>(pub &'a [u8]);\n\n\n\nimpl<'a> fmt::Display for HexRepr<'a> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n for byte in self.0 {\n\n write!(f, \"{:02X}\", byte)?;\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "tikv-client-common/src/kv/mod.rs", "rank": 35, "score": 90602.0883311353 }, { "content": "struct RequestGroup {\n\n tso_request: TsoRequest,\n\n requests: Vec<TimestampRequest>,\n\n}\n\n\n", "file_path": "tikv-client-pd/src/timestamp.rs", "rank": 36, "score": 90322.98639560648 }, { "content": "fn region_from_response(\n\n resp: pdpb::GetRegionResponse,\n\n err: impl FnOnce() -> Error,\n\n) -> Result<Region> {\n\n let region = resp.region.ok_or_else(err)?;\n\n Ok(Region::new(region, resp.leader))\n\n}\n\n\n\n// A node-like thing that can be connected to.\n", "file_path": "src/pd/retry.rs", "rank": 37, "score": 89316.14073356931 }, { "content": "pub trait TimestampExt {\n\n fn version(&self) -> u64;\n\n fn from_version(version: u64) -> Self;\n\n}\n\n\n\nimpl TimestampExt for Timestamp {\n\n fn version(&self) -> u64 {\n\n ((self.physical << PHYSICAL_SHIFT_BITS) + self.logical)\n\n .try_into()\n\n .expect(\"Overflow converting timestamp to version\")\n\n }\n\n\n\n fn from_version(version: u64) -> Self {\n\n let version = version as i64;\n\n Self {\n\n physical: version >> PHYSICAL_SHIFT_BITS,\n\n logical: version & LOGICAL_MASK,\n\n }\n\n }\n\n}\n", "file_path": "tikv-client-common/src/timestamp.rs", "rank": 38, "score": 88786.24944477205 }, { "content": "#[inline]\n\npub fn duration_to_sec(d: Duration) -> f64 {\n\n let nanos = f64::from(d.subsec_nanos());\n\n // In most cases, we can't have so large Duration, so here just panic if overflow now.\n\n d.as_secs() as f64 + (nanos / 1_000_000_000.0)\n\n}\n", "file_path": "tikv-client-common/src/util.rs", "rank": 39, "score": 86380.16820263525 }, { "content": "struct TsoRequestStream<'a> {\n\n cluster_id: u64,\n\n request_rx: Pin<&'a mut mpsc::Receiver<oneshot::Sender<Timestamp>>>,\n\n pending_requests: Rc<RefCell<VecDeque<RequestGroup>>>,\n\n self_waker: Rc<AtomicWaker>,\n\n}\n\n\n\nimpl<'a> Stream for TsoRequestStream<'a> {\n\n type Item = (TsoRequest, WriteFlags);\n\n\n\n fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {\n\n let pending_requests = self.pending_requests.clone();\n\n let mut pending_requests = pending_requests.borrow_mut();\n\n let mut requests = Vec::new();\n\n\n\n while requests.len() < MAX_BATCH_SIZE && pending_requests.len() < MAX_PENDING_COUNT {\n\n match self.request_rx.as_mut().poll_next(cx) {\n\n Poll::Ready(Some(sender)) => {\n\n requests.push(sender);\n\n }\n", "file_path": "tikv-client-pd/src/timestamp.rs", "rank": 40, "score": 86026.98876324628 }, { "content": "#[allow(dead_code)]\n\npub fn observe_tso_batch(batch_size: usize) {\n\n PD_TSO_BATCH_SIZE_HISTOGRAM.observe(batch_size as f64);\n\n}\n\n\n\nlazy_static! {\n\n static ref TIKV_REQUEST_DURATION_HISTOGRAM_VEC: HistogramVec = register_histogram_vec!(\n\n \"tikv_request_duration_seconds\",\n\n \"Bucketed histogram of TiKV requests duration\",\n\n &[\"type\"]\n\n )\n\n .unwrap();\n\n static ref TIKV_REQUEST_COUNTER_VEC: IntCounterVec = register_int_counter_vec!(\n\n \"tikv_request_total\",\n\n \"Total number of requests sent to TiKV\",\n\n &[\"type\"]\n\n )\n\n .unwrap();\n\n static ref TIKV_FAILED_REQUEST_DURATION_HISTOGRAM_VEC: HistogramVec = register_histogram_vec!(\n\n \"tikv_failed_request_duration_seconds\",\n\n \"Bucketed histogram of failed TiKV requests duration\",\n", "file_path": "tikv-client-common/src/stats.rs", "rank": 41, "score": 85176.4086178349 }, { "content": "/// A trait for connecting to TiKV stores.\n\npub trait KvConnect: Sized + Send + Sync + 'static {\n\n type KvClient: KvClient + Clone + Send + Sync + 'static;\n\n\n\n fn connect(&self, address: &str) -> Result<Self::KvClient>;\n\n}\n\n\n\npub type RpcFnType<Req, Resp> =\n\n for<'a, 'b> fn(\n\n &'a TikvClient,\n\n &'b Req,\n\n CallOption,\n\n )\n\n -> std::result::Result<::grpcio::ClientUnaryReceiver<Resp>, ::grpcio::Error>;\n\n\n\n#[derive(new, Clone)]\n\npub struct TikvConnect {\n\n env: Arc<Environment>,\n\n security_mgr: Arc<SecurityManager>,\n\n}\n\n\n\nimpl KvConnect for TikvConnect {\n\n type KvClient = KvRpcClient;\n\n\n\n fn connect(&self, address: &str) -> Result<KvRpcClient> {\n\n self.security_mgr\n\n .connect(self.env.clone(), address, TikvClient::new)\n\n .map(|c| KvRpcClient::new(Arc::new(c)))\n\n }\n\n}\n\n\n", "file_path": "tikv-client-store/src/lib.rs", "rank": 42, "score": 85131.43852800652 }, { "content": "pub fn parse_args(app_name: &str) -> CommandArgs {\n\n let matches = App::new(app_name)\n\n .version(crate_version!())\n\n .author(\"The TiKV Project Authors\")\n\n .arg(\n\n Arg::with_name(\"pd\")\n\n .long(\"pd\")\n\n .aliases(&[\"pd-endpoint\", \"pd-endpoints\"])\n\n .value_name(\"PD_URL\")\n\n .help(\"Sets PD endpoints\")\n\n .long_help(\"Sets PD endpoints. Uses `,` to separate multiple PDs\")\n\n .takes_value(true)\n\n .multiple(true)\n\n .value_delimiter(\",\")\n\n .required(true),\n\n )\n\n // A cyclic dependency between CA, cert and key is made\n\n // to ensure that no security options are missing.\n\n .arg(\n\n Arg::with_name(\"ca\")\n", "file_path": "examples/common/mod.rs", "rank": 43, "score": 84682.22115557893 }, { "content": "fn check_pem_file(tag: &str, path: &Path) -> Result<File> {\n\n File::open(path)\n\n .map_err(|e| internal_err!(\"failed to open {} to load {}: {:?}\", path.display(), tag, e))\n\n}\n\n\n", "file_path": "tikv-client-common/src/security.rs", "rank": 44, "score": 81781.14182236612 }, { "content": "pub fn pd_stats(cmd: &'static str) -> RequestStats {\n\n RequestStats::new(\n\n cmd,\n\n &PD_REQUEST_DURATION_HISTOGRAM_VEC,\n\n &PD_REQUEST_COUNTER_VEC,\n\n &PD_FAILED_REQUEST_DURATION_HISTOGRAM_VEC,\n\n &PD_FAILED_REQUEST_COUNTER_VEC,\n\n )\n\n}\n\n\n", "file_path": "tikv-client-common/src/stats.rs", "rank": 45, "score": 79741.78107838113 }, { "content": "pub fn tikv_stats(cmd: &'static str) -> RequestStats {\n\n RequestStats::new(\n\n cmd,\n\n &TIKV_REQUEST_DURATION_HISTOGRAM_VEC,\n\n &TIKV_REQUEST_COUNTER_VEC,\n\n &TIKV_FAILED_REQUEST_DURATION_HISTOGRAM_VEC,\n\n &TIKV_FAILED_REQUEST_COUNTER_VEC,\n\n )\n\n}\n\n\n", "file_path": "tikv-client-common/src/stats.rs", "rank": 46, "score": 79741.78107838113 }, { "content": "fn load_pem_file(tag: &str, path: &Path) -> Result<Vec<u8>> {\n\n let mut file = check_pem_file(tag, path)?;\n\n let mut key = vec![];\n\n file.read_to_end(&mut key)\n\n .map_err(|e| {\n\n internal_err!(\n\n \"failed to load {} from path {}: {:?}\",\n\n tag,\n\n path.display(),\n\n e\n\n )\n\n })\n\n .map(|_| key)\n\n}\n\n\n\n#[derive(Default)]\n\npub struct SecurityManager {\n\n ca: Vec<u8>,\n\n cert: Vec<u8>,\n\n key: PathBuf,\n", "file_path": "tikv-client-common/src/security.rs", "rank": 47, "score": 78727.69657090389 }, { "content": "use derive_new::new;\n\nuse kvproto::{kvrpcpb, metapb};\n\nuse tikv_client_common::{Error, Key, Result};\n\n\n\npub type RegionId = u64;\n\npub type StoreId = u64;\n\n\n\n#[derive(Eq, PartialEq, Hash, Clone, Default, Debug)]\n\npub struct RegionVerId {\n\n pub id: RegionId,\n\n pub conf_ver: u64,\n\n pub ver: u64,\n\n}\n\n\n\n#[derive(new, Clone, Default, Debug, PartialEq)]\n\npub struct Region {\n\n pub region: metapb::Region,\n\n pub leader: Option<metapb::Peer>,\n\n}\n\n\n", "file_path": "tikv-client-store/src/region.rs", "rank": 48, "score": 68925.14310325815 }, { "content": "impl Region {\n\n #[allow(dead_code)]\n\n pub fn switch_peer(&mut self, _to: StoreId) -> Result<()> {\n\n unimplemented!()\n\n }\n\n\n\n pub fn contains(&self, key: &Key) -> bool {\n\n let key: &[u8] = key.into();\n\n let start_key = self.region.get_start_key();\n\n let end_key = self.region.get_end_key();\n\n key >= start_key && (key < end_key || end_key.is_empty())\n\n }\n\n\n\n pub fn context(&self) -> Result<kvrpcpb::Context> {\n\n self.leader\n\n .as_ref()\n\n .ok_or_else(|| Error::leader_not_found(self.region.get_id()))\n\n .map(|l| {\n\n let mut ctx = kvrpcpb::Context::default();\n\n ctx.set_region_id(self.region.get_id());\n", "file_path": "tikv-client-store/src/region.rs", "rank": 49, "score": 68915.23865356245 }, { "content": " let region = &self.region;\n\n let epoch = region.get_region_epoch();\n\n RegionVerId {\n\n id: region.get_id(),\n\n conf_ver: epoch.get_conf_ver(),\n\n ver: epoch.get_version(),\n\n }\n\n }\n\n\n\n pub fn id(&self) -> RegionId {\n\n self.region.get_id()\n\n }\n\n\n\n pub fn get_store_id(&self) -> Result<StoreId> {\n\n self.leader\n\n .as_ref()\n\n .cloned()\n\n .ok_or_else(|| Error::leader_not_found(self.id()))\n\n .map(|s| s.get_store_id())\n\n }\n\n}\n", "file_path": "tikv-client-store/src/region.rs", "rank": 50, "score": 68906.37553538021 }, { "content": " ctx.set_region_epoch(Clone::clone(self.region.get_region_epoch()));\n\n ctx.set_peer(Clone::clone(l));\n\n ctx\n\n })\n\n }\n\n\n\n pub fn start_key(&self) -> Key {\n\n self.region.get_start_key().to_vec().into()\n\n }\n\n\n\n pub fn end_key(&self) -> Key {\n\n self.region.get_end_key().to_vec().into()\n\n }\n\n\n\n pub fn range(&self) -> (Key, Key) {\n\n (self.start_key(), self.end_key())\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn ver_id(&self) -> RegionVerId {\n", "file_path": "tikv-client-store/src/region.rs", "rank": 51, "score": 68902.38752767966 }, { "content": "macro_rules! has_key_error {\n\n ($type:ty) => {\n\n impl HasError for $type {\n\n fn error(&mut self) -> Option<Error> {\n\n if self.has_error() {\n\n Some(self.take_error().into())\n\n } else {\n\n None\n\n }\n\n }\n\n }\n\n };\n\n}\n\n\n\nhas_key_error!(kvrpcpb::GetResponse);\n\nhas_key_error!(kvrpcpb::CommitResponse);\n\nhas_key_error!(kvrpcpb::BatchRollbackResponse);\n\nhas_key_error!(kvrpcpb::CleanupResponse);\n\nhas_key_error!(kvrpcpb::ScanLockResponse);\n\nhas_key_error!(kvrpcpb::ResolveLockResponse);\n", "file_path": "tikv-client-store/src/errors.rs", "rank": 52, "score": 68729.74380596535 }, { "content": "has_region_error!(kvrpcpb::PrewriteResponse);\n\nhas_region_error!(kvrpcpb::CommitResponse);\n\nhas_region_error!(kvrpcpb::ImportResponse);\n\nhas_region_error!(kvrpcpb::BatchRollbackResponse);\n\nhas_region_error!(kvrpcpb::CleanupResponse);\n\nhas_region_error!(kvrpcpb::BatchGetResponse);\n\nhas_region_error!(kvrpcpb::ScanLockResponse);\n\nhas_region_error!(kvrpcpb::ResolveLockResponse);\n\nhas_region_error!(kvrpcpb::GcResponse);\n\nhas_region_error!(kvrpcpb::RawGetResponse);\n\nhas_region_error!(kvrpcpb::RawBatchGetResponse);\n\nhas_region_error!(kvrpcpb::RawPutResponse);\n\nhas_region_error!(kvrpcpb::RawBatchPutResponse);\n\nhas_region_error!(kvrpcpb::RawDeleteResponse);\n\nhas_region_error!(kvrpcpb::RawBatchDeleteResponse);\n\nhas_region_error!(kvrpcpb::DeleteRangeResponse);\n\nhas_region_error!(kvrpcpb::RawDeleteRangeResponse);\n\nhas_region_error!(kvrpcpb::RawScanResponse);\n\nhas_region_error!(kvrpcpb::RawBatchScanResponse);\n\n\n", "file_path": "tikv-client-store/src/errors.rs", "rank": 53, "score": 68727.87369215695 }, { "content": "has_key_error!(kvrpcpb::GcResponse);\n\n\n\nmacro_rules! has_str_error {\n\n ($type:ty) => {\n\n impl HasError for $type {\n\n fn error(&mut self) -> Option<Error> {\n\n if self.get_error().is_empty() {\n\n None\n\n } else {\n\n Some(Error::kv_error(self.take_error()))\n\n }\n\n }\n\n }\n\n };\n\n}\n\n\n\nhas_str_error!(kvrpcpb::RawGetResponse);\n\nhas_str_error!(kvrpcpb::RawPutResponse);\n\nhas_str_error!(kvrpcpb::RawBatchPutResponse);\n\nhas_str_error!(kvrpcpb::RawDeleteResponse);\n", "file_path": "tikv-client-store/src/errors.rs", "rank": 54, "score": 68727.52064475758 }, { "content": "has_str_error!(kvrpcpb::RawBatchDeleteResponse);\n\nhas_str_error!(kvrpcpb::RawDeleteRangeResponse);\n\nhas_str_error!(kvrpcpb::ImportResponse);\n\nhas_str_error!(kvrpcpb::DeleteRangeResponse);\n\n\n\nimpl HasError for kvrpcpb::ScanResponse {\n\n fn error(&mut self) -> Option<Error> {\n\n extract_errors(self.pairs.iter_mut().map(|pair| pair.error.take()))\n\n }\n\n}\n\n\n\nimpl HasError for kvrpcpb::BatchGetResponse {\n\n fn error(&mut self) -> Option<Error> {\n\n extract_errors(self.pairs.iter_mut().map(|pair| pair.error.take()))\n\n }\n\n}\n\n\n\nimpl HasError for kvrpcpb::RawBatchGetResponse {\n\n fn error(&mut self) -> Option<Error> {\n\n extract_errors(self.pairs.iter_mut().map(|pair| pair.error.take()))\n", "file_path": "tikv-client-store/src/errors.rs", "rank": 55, "score": 68725.90623259556 }, { "content": " }\n\n}\n\n\n\nimpl HasError for kvrpcpb::RawScanResponse {\n\n fn error(&mut self) -> Option<Error> {\n\n extract_errors(self.kvs.iter_mut().map(|pair| pair.error.take()))\n\n }\n\n}\n\n\n\nimpl HasError for kvrpcpb::RawBatchScanResponse {\n\n fn error(&mut self) -> Option<Error> {\n\n extract_errors(self.kvs.iter_mut().map(|pair| pair.error.take()))\n\n }\n\n}\n\n\n\nimpl HasError for kvrpcpb::PrewriteResponse {\n\n fn error(&mut self) -> Option<Error> {\n\n extract_errors(self.take_errors().into_iter().map(Some))\n\n }\n\n}\n\n\n", "file_path": "tikv-client-store/src/errors.rs", "rank": 56, "score": 68725.8060185347 }, { "content": "// Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\nuse kvproto::kvrpcpb;\n\nuse tikv_client_common::Error;\n\n\n", "file_path": "tikv-client-store/src/errors.rs", "rank": 57, "score": 68724.27503032933 }, { "content": "type TimestampRequest = oneshot::Sender<Timestamp>;\n\n\n\n/// The timestamp oracle (TSO) which provides monotonically increasing timestamps.\n\n#[derive(Clone)]\n\npub(crate) struct TimestampOracle {\n\n /// The transmitter of a bounded channel which transports requests of getting a single\n\n /// timestamp to the TSO working thread. A bounded channel is used to prevent using\n\n /// too much memory unexpectedly.\n\n /// In the working thread, the `TimestampRequest`, which is actually a one channel sender,\n\n /// is used to send back the timestamp result.\n\n request_tx: mpsc::Sender<TimestampRequest>,\n\n}\n\n\n\nimpl TimestampOracle {\n\n pub(crate) fn new(cluster_id: u64, pd_client: &PdClient) -> Result<TimestampOracle> {\n\n let (request_tx, request_rx) = mpsc::channel(MAX_BATCH_SIZE);\n\n // FIXME: use tso_opt\n\n let (rpc_sender, rpc_receiver) = pd_client.tso()?;\n\n\n\n // Start a background thread to handle TSO requests and responses\n", "file_path": "tikv-client-pd/src/timestamp.rs", "rank": 58, "score": 64617.97704410757 }, { "content": "#[derive(new)]\n\nstruct TwoPhaseCommitter {\n\n mutations: Vec<kvrpcpb::Mutation>,\n\n start_version: u64,\n\n bg_worker: ThreadPool,\n\n rpc: Arc<PdRpcClient>,\n\n #[new(default)]\n\n committed: bool,\n\n #[new(default)]\n\n undetermined: bool,\n\n}\n\n\n\nimpl TwoPhaseCommitter {\n\n async fn commit(mut self) -> Result<()> {\n\n if self.mutations.is_empty() {\n\n self.committed = true;\n\n return Ok(());\n\n }\n\n self.prewrite().await?;\n\n match self.commit_primary().await {\n\n Ok(commit_version) => {\n", "file_path": "src/transaction/transaction.rs", "rank": 59, "score": 59239.78138709422 }, { "content": "pub trait HasLocks {\n\n fn take_locks(&mut self) -> Vec<kvrpcpb::LockInfo> {\n\n Vec::new()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::mock::MockPdClient;\n\n\n\n use futures::executor;\n\n\n\n #[test]\n\n fn test_resolve_lock_with_retry() {\n\n // Test resolve lock within retry limit\n\n fail::cfg(\"region-error\", \"9*return\").unwrap();\n\n let client = Arc::new(MockPdClient);\n\n let key: Key = vec![1].into();\n\n let region1 = MockPdClient::region1();\n", "file_path": "src/transaction/lock.rs", "rank": 60, "score": 51114.87470686478 }, { "content": "/// Permits easy mocking of rpc calls.\n\npub trait DispatchHook: KvRequest {\n\n fn dispatch_hook(\n\n &self,\n\n _opt: CallOption,\n\n ) -> Option<BoxFuture<'static, Result<Self::RpcResponse>>> {\n\n None\n\n }\n\n}\n\n\n\nimpl<T: KvRequest> DispatchHook for T {\n\n #[cfg(test)]\n\n default fn dispatch_hook(\n\n &self,\n\n _opt: CallOption,\n\n ) -> Option<BoxFuture<'static, Result<Self::RpcResponse>>> {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/request.rs", "rank": 61, "score": 47497.25452811817 }, { "content": "pub trait KvRpcRequest: Default {\n\n fn set_context(&mut self, context: kvrpcpb::Context);\n\n}\n\n\n\nmacro_rules! impl_kv_rpc_request {\n\n ($name: ident) => {\n\n impl KvRpcRequest for kvrpcpb::$name {\n\n fn set_context(&mut self, context: kvrpcpb::Context) {\n\n self.set_context(context);\n\n }\n\n }\n\n };\n\n}\n\n\n\nimpl_kv_rpc_request!(RawGetRequest);\n\nimpl_kv_rpc_request!(RawBatchGetRequest);\n\nimpl_kv_rpc_request!(RawPutRequest);\n\nimpl_kv_rpc_request!(RawBatchPutRequest);\n\nimpl_kv_rpc_request!(RawDeleteRequest);\n\nimpl_kv_rpc_request!(RawBatchDeleteRequest);\n", "file_path": "src/request.rs", "rank": 62, "score": 47497.25452811817 }, { "content": "/// An extension crate to make using our combinator functions more ergonomic.\n\npub trait ClientFutureExt {\n\n /// This function is similar to `map_ok` combinator. Provide a function which\n\n /// is applied after the `self` future is resolved, only if that future\n\n /// resolves to `Ok`. Similar to `Result::and_then`, the supplied function\n\n /// must return a Result (c.f., `map_ok`, which returns the underlying type,\n\n /// `T`).\n\n ///\n\n /// Note that unlike `and_then`, the supplied function returns a resolved\n\n /// value, not a closure.\n\n fn ok_and_then<U, T, F, E>(self, func: F) -> OkAndThen<Self, F>\n\n where\n\n F: FnMut(U) -> Result<T, E>,\n\n Self: Future<Output = Result<U, E>> + Sized,\n\n {\n\n OkAndThen { future: self, func }\n\n }\n\n}\n\n\n\nimpl<T: TryFuture> ClientFutureExt for T {}\n", "file_path": "tikv-client-common/src/compat.rs", "rank": 63, "score": 46879.25444292762 }, { "content": "/// A convenience trait for converting ranges of borrowed types into a `BoundRange`.\n\npub trait ToOwnedRange {\n\n /// Transform a borrowed range of some form into an owned `BoundRange`.\n\n fn to_owned(self) -> BoundRange;\n\n}\n\n\n\nimpl<T: Into<Key> + Borrow<U>, U: ToOwned<Owned = T> + ?Sized> ToOwnedRange for Range<&U> {\n\n fn to_owned(self) -> BoundRange {\n\n From::from(Range {\n\n start: self.start.to_owned(),\n\n end: self.end.to_owned(),\n\n })\n\n }\n\n}\n\n\n\nimpl<T: Into<Key> + Borrow<U>, U: ToOwned<Owned = T> + ?Sized> ToOwnedRange for RangeFrom<&U> {\n\n fn to_owned(self) -> BoundRange {\n\n From::from(RangeFrom {\n\n start: self.start.to_owned(),\n\n })\n\n }\n", "file_path": "tikv-client-common/src/kv/bound_range.rs", "rank": 64, "score": 45970.808582868696 }, { "content": "pub trait Backoff: Clone + Send + 'static {\n\n // Returns the delay period for next retry. If the maximum retry count is hit returns None.\n\n fn next_delay_duration(&mut self) -> Option<Duration>;\n\n}\n\n\n\n// Exponential backoff means that the retry delay should multiply a constant\n\n// after each attempt, up to a maximum value. After each attempt, the new retry\n\n// delay should be:\n\n//\n\n// new_delay = min(max_delay, base_delay * 2 ** attempts)\n\n#[derive(Clone)]\n\npub struct NoJitterBackoff {\n\n current_attempts: u32,\n\n max_attempts: u32,\n\n current_delay_ms: u64,\n\n max_delay_ms: u64,\n\n}\n\n\n\nimpl NoJitterBackoff {\n\n pub const fn new(base_delay_ms: u64, max_delay_ms: u64, max_attempts: u32) -> Self {\n", "file_path": "src/backoff.rs", "rank": 65, "score": 45343.79078038966 }, { "content": "pub trait PdClient: Send + Sync + 'static {\n\n type KvClient: KvClient + Send + Sync + 'static;\n\n\n\n fn map_region_to_store(\n\n self: Arc<Self>,\n\n region: Region,\n\n ) -> BoxFuture<'static, Result<Store<Self::KvClient>>>;\n\n\n\n fn region_for_key(&self, key: &Key) -> BoxFuture<'static, Result<Region>>;\n\n\n\n fn region_for_id(&self, id: RegionId) -> BoxFuture<'static, Result<Region>>;\n\n\n\n fn get_timestamp(self: Arc<Self>) -> BoxFuture<'static, Result<Timestamp>>;\n\n\n\n fn store_for_key(\n\n self: Arc<Self>,\n\n key: &Key,\n\n ) -> BoxFuture<'static, Result<Store<Self::KvClient>>> {\n\n self.region_for_key(key)\n\n .and_then(move |region| self.map_region_to_store(region))\n", "file_path": "src/pd/client.rs", "rank": 66, "score": 43363.145863591104 }, { "content": "use crate::Region;\n\nuse std::time::Duration;\n\n\n\npub struct StoreBuilder {\n\n pub region: Region,\n\n pub address: String,\n\n pub timeout: Duration,\n\n}\n\n\n\nimpl StoreBuilder {\n\n pub fn new(region: Region, address: String, timeout: Duration) -> StoreBuilder {\n\n StoreBuilder {\n\n region,\n\n address,\n\n timeout,\n\n }\n\n }\n\n}\n", "file_path": "tikv-client-store/src/store_builder.rs", "rank": 67, "score": 42534.182666794455 }, { "content": "pub trait KvRequest: Sync + Send + 'static + Sized {\n\n type Result;\n\n type RpcResponse: HasError + HasLocks + Clone + Send + 'static;\n\n /// A single `KvRequest` can be divided into a number of RPC requests because the keys span\n\n /// several regions or a single RPC request is too large. Most of the fields in these requests\n\n /// share the same content while `KeyData`, which contains keys (and associated data if any),\n\n /// is the part which differs among the requests.\n\n type KeyData;\n\n const REQUEST_NAME: &'static str;\n\n const RPC_FN: RpcFnType<Self, Self::RpcResponse>;\n\n\n\n fn execute(self, pd_client: Arc<impl PdClient>) -> BoxFuture<'static, Result<Self::Result>> {\n\n Self::reduce(\n\n self.response_stream(pd_client)\n\n .and_then(|mut response| match response.error() {\n\n Some(e) => future::err(e),\n\n None => future::ok(response),\n\n })\n\n .map_ok(Self::map_result)\n\n .boxed(),\n", "file_path": "src/request.rs", "rank": 68, "score": 42493.18326849167 }, { "content": "// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\nuse kvproto::kvrpcpb::KvPair;\n\nuse std::{\n\n collections::HashMap,\n\n sync::{Arc, RwLock},\n\n};\n\n\n\n#[derive(Debug, Clone)]\n\npub struct KvStore {\n\n data: Arc<RwLock<HashMap<Vec<u8>, Vec<u8>>>>,\n\n}\n\n\n\nimpl Default for KvStore {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl KvStore {\n", "file_path": "mock-tikv/src/store.rs", "rank": 69, "score": 35570.94635045846 }, { "content": " pub fn new() -> KvStore {\n\n KvStore {\n\n data: Arc::new(RwLock::new(HashMap::new())),\n\n }\n\n }\n\n\n\n pub fn raw_get(&self, key: &[u8]) -> Option<Vec<u8>> {\n\n let data = self.data.read().unwrap();\n\n data.get(key).map(|v| v.to_vec())\n\n }\n\n\n\n pub fn raw_batch_get(&self, keys: Vec<Vec<u8>>) -> Vec<KvPair> {\n\n let data = self.data.read().unwrap();\n\n keys.into_iter()\n\n .filter_map(|key| {\n\n if data.contains_key(&key) {\n\n let mut pair = KvPair::default();\n\n pair.set_value(data.get(&key).unwrap().to_vec());\n\n pair.set_key(key);\n\n Some(pair)\n", "file_path": "mock-tikv/src/store.rs", "rank": 70, "score": 35562.906283851924 }, { "content": "\n\n pub fn raw_delete(&self, key: &[u8]) {\n\n let mut data = self.data.write().unwrap();\n\n data.remove(key);\n\n }\n\n\n\n pub fn raw_batch_delete(&self, keys: Vec<Vec<u8>>) {\n\n let mut data = self.data.write().unwrap();\n\n keys.iter().for_each(|k| {\n\n data.remove(k);\n\n });\n\n }\n\n}\n", "file_path": "mock-tikv/src/store.rs", "rank": 71, "score": 35560.06255857689 }, { "content": " } else {\n\n None\n\n }\n\n })\n\n .collect()\n\n }\n\n\n\n pub fn raw_put(&self, key: Vec<u8>, value: Vec<u8>) {\n\n let mut data = self.data.write().unwrap();\n\n data.insert(key, value);\n\n }\n\n\n\n pub fn raw_batch_put(&self, pairs: Vec<KvPair>) {\n\n let mut data = self.data.write().unwrap();\n\n data.extend(\n\n pairs\n\n .into_iter()\n\n .map(|mut pair| (pair.take_key(), pair.take_value())),\n\n );\n\n }\n", "file_path": "mock-tikv/src/store.rs", "rank": 72, "score": 35558.89555866763 }, { "content": " /// ```rust\n\n /// # use tikv_client_common::Config;\n\n /// let config = Config::new(vec![\"192.168.0.100:2379\", \"192.168.0.101:2379\"])\n\n /// .with_security(\"root.ca\", \"internal.cert\", \"internal.key\");\n\n /// ```\n\n pub fn with_security(\n\n mut self,\n\n ca_path: impl Into<PathBuf>,\n\n cert_path: impl Into<PathBuf>,\n\n key_path: impl Into<PathBuf>,\n\n ) -> Self {\n\n self.ca_path = Some(ca_path.into());\n\n self.cert_path = Some(cert_path.into());\n\n self.key_path = Some(key_path.into());\n\n self\n\n }\n\n\n\n pub fn timeout(mut self, timeout: Duration) -> Self {\n\n self.timeout = timeout;\n\n self\n\n }\n\n}\n", "file_path": "tikv-client-common/src/config.rs", "rank": 73, "score": 34672.651249354894 }, { "content": " /// ```rust\n\n /// # use tikv_client_common::Config;\n\n /// let config = Config::new(vec![\"192.168.0.100:2379\", \"192.168.0.101:2379\"]);\n\n /// ```\n\n pub fn new(pd_endpoints: impl IntoIterator<Item = impl Into<String>>) -> Self {\n\n Config {\n\n pd_endpoints: pd_endpoints.into_iter().map(Into::into).collect(),\n\n ca_path: None,\n\n cert_path: None,\n\n key_path: None,\n\n timeout: DEFAULT_REQUEST_TIMEOUT,\n\n }\n\n }\n\n\n\n /// Set the certificate authority, certificate, and key locations for the\n\n /// [`Config`](Config).\n\n ///\n\n /// By default, TiKV connections do not utilize transport layer security. Enable it by setting\n\n /// these values.\n\n ///\n", "file_path": "tikv-client-common/src/config.rs", "rank": 74, "score": 34672.353015558365 }, { "content": "/// TiKV does not currently offer encrypted storage (or encryption-at-rest).\n\n#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]\n\n#[serde(default)]\n\n#[serde(rename_all = \"kebab-case\")]\n\npub struct Config {\n\n pub pd_endpoints: Vec<String>,\n\n pub ca_path: Option<PathBuf>,\n\n pub cert_path: Option<PathBuf>,\n\n pub key_path: Option<PathBuf>,\n\n pub timeout: Duration,\n\n}\n\n\n\nconst DEFAULT_REQUEST_TIMEOUT: Duration = Duration::from_secs(2);\n\n\n\nimpl Config {\n\n /// Create a new [`Config`](Config) which coordinates with the given PD endpoints.\n\n ///\n\n /// It's important to **include more than one PD endpoint** (include all, if possible!)\n\n /// This helps avoid having a *single point of failure*.\n\n ///\n", "file_path": "tikv-client-common/src/config.rs", "rank": 75, "score": 34670.542461642624 }, { "content": "// Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\nuse serde_derive::{Deserialize, Serialize};\n\nuse std::{path::PathBuf, time::Duration};\n\n\n\n/// The configuration for either a `raw::Client` or a `transaction::Client`.\n\n///\n\n/// Because TiKV is managed by a [PD](https://github.com/pingcap/pd/) cluster, the endpoints for PD\n\n/// must be provided, **not** the TiKV nodes.\n\n///\n\n/// It's important to **include more than one PD endpoint** (include all, if possible!)\n\n/// This helps avoid having a *single point of failure*.\n\n///\n\n/// By default, this client will use an insecure connection over instead of one protected by\n\n/// Transport Layer Security (TLS). Your deployment may have chosen to rely on security measures\n\n/// such as a private network, or a VPN layer to provid secure transmission.\n\n///\n\n/// To use a TLS secured connection, use the `with_security` function to set the required\n\n/// parameters.\n\n///\n", "file_path": "tikv-client-common/src/config.rs", "rank": 76, "score": 34655.29126296659 }, { "content": " thread::spawn(move || {\n\n block_on(run_tso(\n\n cluster_id,\n\n rpc_sender.sink_err_into(),\n\n rpc_receiver.err_into(),\n\n request_rx,\n\n ))\n\n });\n\n\n\n Ok(TimestampOracle { request_tx })\n\n }\n\n\n\n pub(crate) async fn get_timestamp(mut self) -> Result<Timestamp> {\n\n let (request, response) = oneshot::channel();\n\n self.request_tx\n\n .send(request)\n\n .await\n\n .map_err(|_| Error::internal_error(\"TimestampRequest channel is closed\"))?;\n\n Ok(response.await?)\n\n }\n", "file_path": "tikv-client-pd/src/timestamp.rs", "rank": 77, "score": 34512.8433806994 }, { "content": "}\n\n\n\nasync fn run_tso(\n\n cluster_id: u64,\n\n mut rpc_sender: impl Sink<(TsoRequest, WriteFlags), Error = Error> + Unpin,\n\n mut rpc_receiver: impl Stream<Item = Result<TsoResponse>> + Unpin,\n\n request_rx: mpsc::Receiver<TimestampRequest>,\n\n) {\n\n // The `TimestampRequest`s which are waiting for the responses from the PD server\n\n let pending_requests = Rc::new(RefCell::new(VecDeque::with_capacity(MAX_PENDING_COUNT)));\n\n\n\n // When there are too many pending requests, the `send_request` future will refuse to fetch\n\n // more requests from the bounded channel. This waker is used to wake up the sending future\n\n // if the queue containing pending requests is no longer full.\n\n let sending_future_waker = Rc::new(AtomicWaker::new());\n\n\n\n pin_mut!(request_rx);\n\n let mut request_stream = TsoRequestStream {\n\n cluster_id,\n\n request_rx,\n", "file_path": "tikv-client-pd/src/timestamp.rs", "rank": 78, "score": 34505.11457451827 }, { "content": "//! A timestamp returned from the timestamp oracle.\n\n//!\n\n//! The version used in transactions can be converted from a timestamp.\n\n//! The lower 18 (PHYSICAL_SHIFT_BITS) bits are the logical part of the timestamp.\n\n//! The higher bits of the version are the physical part of the timestamp.\n\n\n\npub use kvproto::pdpb::Timestamp;\n\nuse std::convert::TryInto;\n\n\n\nconst PHYSICAL_SHIFT_BITS: i64 = 18;\n\nconst LOGICAL_MASK: i64 = (1 << PHYSICAL_SHIFT_BITS) - 1;\n\n\n", "file_path": "tikv-client-common/src/timestamp.rs", "rank": 79, "score": 34502.53874850417 }, { "content": "use grpcio::WriteFlags;\n\nuse kvproto::pdpb::*;\n\nuse std::{cell::RefCell, collections::VecDeque, pin::Pin, rc::Rc, thread};\n\nuse tikv_client_common::{Error, Result};\n\n\n\n/// It is an empirical value.\n\nconst MAX_BATCH_SIZE: usize = 64;\n\n\n\n/// TODO: This value should be adjustable.\n\nconst MAX_PENDING_COUNT: usize = 1 << 16;\n\n\n", "file_path": "tikv-client-pd/src/timestamp.rs", "rank": 80, "score": 34500.87057917636 }, { "content": " \"PD gives different number of timestamps than expected\",\n\n ));\n\n }\n\n\n\n for request in requests {\n\n offset -= 1;\n\n let ts = Timestamp {\n\n physical: tail_ts.physical,\n\n logical: tail_ts.logical - offset as i64,\n\n };\n\n let _ = request.send(ts);\n\n }\n\n } else {\n\n return Err(Error::internal_error(\n\n \"PD gives more TsoResponse than expected\",\n\n ));\n\n };\n\n Ok(())\n\n}\n", "file_path": "tikv-client-pd/src/timestamp.rs", "rank": 81, "score": 34498.69343359164 }, { "content": " pending_requests: pending_requests.clone(),\n\n self_waker: sending_future_waker.clone(),\n\n }\n\n .map(Ok);\n\n\n\n let send_requests = rpc_sender.send_all(&mut request_stream);\n\n\n\n let receive_and_handle_responses = async move {\n\n while let Some(Ok(resp)) = rpc_receiver.next().await {\n\n let mut pending_requests = pending_requests.borrow_mut();\n\n\n\n // Wake up the sending future blocked by too many pending requests as we are consuming\n\n // some of them here.\n\n if pending_requests.len() == MAX_PENDING_COUNT {\n\n sending_future_waker.wake();\n\n }\n\n\n\n allocate_timestamps(&resp, &mut pending_requests)?;\n\n }\n\n // TODO: distinguish between unexpected stream termination and expected end of test\n\n info!(\"TSO stream terminated\");\n\n Ok(())\n\n };\n\n\n\n let (send_res, recv_res): (_, Result<()>) = join!(send_requests, receive_and_handle_responses);\n\n info!(\"TSO send termination: {:?}\", send_res);\n\n info!(\"TSO receive termination: {:?}\", recv_res);\n\n}\n\n\n", "file_path": "tikv-client-pd/src/timestamp.rs", "rank": 82, "score": 34498.46254140279 }, { "content": "// Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\n//! This module is the low-level mechanisms for getting timestamps from a PD\n\n//! cluster. It should be used via the `get_timestamp` API in `PdClient`.\n\n//!\n\n//! Once a `TimestampOracle` is created, there will be two futures running in a background working\n\n//! thread created automatically. The `get_timestamp` method creates a oneshot channel whose\n\n//! transmitter is served as a `TimestampRequest`. `TimestampRequest`s are sent to the working\n\n//! thread through a bounded multi-producer, single-consumer channel. Every time the first future\n\n//! is polled, it tries to exhaust the channel to get as many requests as possible and sends a\n\n//! single `TsoRequest` to the PD server. The other future receives `TsoResponse`s from the PD\n\n//! server and allocates timestamps for the requests.\n\n\n\nuse futures::{\n\n channel::{mpsc, oneshot},\n\n executor::block_on,\n\n join, pin_mut,\n\n prelude::*,\n\n task::{AtomicWaker, Context, Poll},\n\n};\n", "file_path": "tikv-client-pd/src/timestamp.rs", "rank": 83, "score": 34498.0636444723 }, { "content": " Poll::Ready(None) => return Poll::Ready(None),\n\n Poll::Pending => break,\n\n }\n\n }\n\n\n\n if !requests.is_empty() {\n\n let req = TsoRequest {\n\n header: Some(RequestHeader {\n\n cluster_id: self.cluster_id,\n\n }),\n\n count: requests.len() as u32,\n\n };\n\n\n\n let request_group = RequestGroup {\n\n tso_request: req.clone(),\n\n requests,\n\n };\n\n pending_requests.push_back(request_group);\n\n\n\n let write_flags = WriteFlags::default().buffer_hint(false);\n", "file_path": "tikv-client-pd/src/timestamp.rs", "rank": 84, "score": 34490.68755660072 }, { "content": " Poll::Ready(Some((req, write_flags)))\n\n } else {\n\n // Set the waker to the context, then the stream can be waked up after the pending queue\n\n // is no longer full.\n\n self.self_waker.register(cx.waker());\n\n Poll::Pending\n\n }\n\n }\n\n}\n\n\n", "file_path": "tikv-client-pd/src/timestamp.rs", "rank": 85, "score": 34490.68755660072 }, { "content": "impl Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n Display::fmt(&self.inner, f)\n\n }\n\n}\n\n\n\nimpl Error {\n\n pub fn kind(&self) -> &ErrorKind {\n\n self.inner.get_context()\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn unimplemented() -> Self {\n\n Error::from(ErrorKind::Unimplemented)\n\n }\n\n\n\n pub fn region_for_key_not_found(key: Vec<u8>) -> Self {\n\n Error::from(ErrorKind::RegionForKeyNotFound { key })\n\n }\n\n\n", "file_path": "tikv-client-common/src/errors.rs", "rank": 86, "score": 34458.337937577344 }, { "content": "\n\nimpl From<std::io::Error> for Error {\n\n fn from(err: std::io::Error) -> Self {\n\n Error::from(ErrorKind::Io(err))\n\n }\n\n}\n\n\n\nimpl From<grpcio::Error> for Error {\n\n fn from(err: grpcio::Error) -> Self {\n\n Error::from(ErrorKind::Grpc(err))\n\n }\n\n}\n\n\n\nimpl From<futures::channel::oneshot::Canceled> for Error {\n\n fn from(err: futures::channel::oneshot::Canceled) -> Self {\n\n Error::from(ErrorKind::Canceled(err))\n\n }\n\n}\n\n\n\nimpl From<kvproto::kvrpcpb::KeyError> for Error {\n\n fn from(err: kvproto::kvrpcpb::KeyError) -> Self {\n\n Error::from(ErrorKind::KeyError(err))\n\n }\n\n}\n\n\n\n/// A result holding an [`Error`](Error).\n\npub type Result<T> = result::Result<T, Error>;\n", "file_path": "tikv-client-common/src/errors.rs", "rank": 87, "score": 34457.75886736263 }, { "content": " pub fn region_error(error: kvproto::errorpb::Error) -> Self {\n\n Error::from(ErrorKind::RegionError(error))\n\n }\n\n\n\n pub fn region_not_found(region_id: u64) -> Self {\n\n Error::from(ErrorKind::RegionNotFound { region_id })\n\n }\n\n\n\n pub fn leader_not_found(region_id: u64) -> Self {\n\n Error::from(ErrorKind::LeaderNotFound { region_id })\n\n }\n\n\n\n pub fn invalid_key_range() -> Self {\n\n Error::from(ErrorKind::InvalidKeyRange)\n\n }\n\n\n\n pub fn max_scan_limit_exceeded(limit: u32, max_limit: u32) -> Self {\n\n Error::from(ErrorKind::MaxScanLimitExceeded { limit, max_limit })\n\n }\n\n\n", "file_path": "tikv-client-common/src/errors.rs", "rank": 88, "score": 34453.00611841118 }, { "content": " pub fn kv_error(message: String) -> Self {\n\n Error::from(ErrorKind::KvError { message })\n\n }\n\n\n\n pub fn internal_error(message: impl Into<String>) -> Self {\n\n Error::from(ErrorKind::InternalError {\n\n message: message.into(),\n\n })\n\n }\n\n\n\n pub fn multiple_errors(errors: Vec<Error>) -> Self {\n\n Error::from(ErrorKind::MultipleErrors(errors))\n\n }\n\n\n\n pub fn undetermined_error(error: Error) -> Self {\n\n Error::from(ErrorKind::UndeterminedError(error))\n\n }\n\n}\n\n\n\nimpl From<errorpb::Error> for Error {\n", "file_path": "tikv-client-common/src/errors.rs", "rank": 89, "score": 34452.61207711862 }, { "content": "// Copyright 2018 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\nuse failure::{Backtrace, Context, Fail};\n\nuse kvproto::errorpb;\n\nuse std::{\n\n fmt::{self, Display},\n\n result,\n\n};\n\n\n\n#[derive(Debug)]\n\npub struct Error {\n\n inner: Box<Context<ErrorKind>>,\n\n}\n\n\n\n/// An error originating from the TiKV client or dependencies.\n\n#[derive(Debug, Fail)]\n\n#[allow(clippy::large_enum_variant)]\n\npub enum ErrorKind {\n\n /// Wraps a `std::io::Error`.\n\n #[fail(display = \"IO error: {}\", _0)]\n", "file_path": "tikv-client-common/src/errors.rs", "rank": 90, "score": 34449.84167060134 }, { "content": " #[fail(display = \"Leader of region {} is not found\", region_id)]\n\n LeaderNotFound { region_id: u64 },\n\n /// Whether the transaction is committed or not is undetermined\n\n #[fail(display = \"Whether the transaction is committed or not is undetermined\")]\n\n UndeterminedError(#[fail(cause)] Error),\n\n /// Invalid key range to scan. Only left bounded intervals are supported.\n\n #[fail(display = \"Only left bounded intervals are supported\")]\n\n InvalidKeyRange,\n\n /// Cannot set an empty value\n\n #[fail(display = \"Cannot set an empty value\")]\n\n EmptyValue,\n\n /// Scan limit exceeds the maximum\n\n #[fail(display = \"Limit {} exceeds max scan limit {}\", limit, max_limit)]\n\n MaxScanLimitExceeded { limit: u32, max_limit: u32 },\n\n /// Wraps `kvproto::kvrpcpb::KeyError`\n\n #[fail(display = \"{:?}\", _0)]\n\n KeyError(kvproto::kvrpcpb::KeyError),\n\n /// A string error returned by TiKV server\n\n #[fail(display = \"Kv error. {}\", message)]\n\n KvError { message: String },\n", "file_path": "tikv-client-common/src/errors.rs", "rank": 91, "score": 34448.91598828679 }, { "content": " Io(#[fail(cause)] std::io::Error),\n\n /// Wraps a `grpcio::Error`.\n\n #[fail(display = \"gRPC error: {}\", _0)]\n\n Grpc(#[fail(cause)] grpcio::Error),\n\n /// Represents that a futures oneshot channel was cancelled.\n\n #[fail(display = \"A futures oneshot channel was canceled. {}\", _0)]\n\n Canceled(#[fail(cause)] futures::channel::oneshot::Canceled),\n\n /// Feature is not implemented.\n\n #[fail(display = \"Unimplemented feature\")]\n\n Unimplemented,\n\n /// No region is found for the given key.\n\n #[fail(display = \"Region is not found for key: {:?}\", key)]\n\n RegionForKeyNotFound { key: Vec<u8> },\n\n /// Errors caused by changed region information\n\n #[fail(display = \"Region error: {:?}\", _0)]\n\n RegionError(kvproto::errorpb::Error),\n\n /// No region is found for the given id.\n\n #[fail(display = \"Region {} is not found\", region_id)]\n\n RegionNotFound { region_id: u64 },\n\n /// No region is found for the given id.\n", "file_path": "tikv-client-common/src/errors.rs", "rank": 92, "score": 34444.9159081805 }, { "content": " fn from(e: errorpb::Error) -> Error {\n\n Error::region_error(e)\n\n }\n\n}\n\n\n\nimpl From<ErrorKind> for Error {\n\n fn from(kind: ErrorKind) -> Error {\n\n Error {\n\n inner: Box::new(Context::new(kind)),\n\n }\n\n }\n\n}\n\n\n\nimpl From<Context<ErrorKind>> for Error {\n\n fn from(inner: Context<ErrorKind>) -> Error {\n\n Error {\n\n inner: Box::new(inner),\n\n }\n\n }\n\n}\n", "file_path": "tikv-client-common/src/errors.rs", "rank": 93, "score": 34443.698342288815 }, { "content": " #[fail(display = \"{}\", message)]\n\n InternalError { message: String },\n\n /// Multiple errors\n\n #[fail(display = \"Multiple errors: {:?}\", _0)]\n\n MultipleErrors(Vec<Error>),\n\n /// Invalid ColumnFamily\n\n #[fail(display = \"Unsupported column family {}\", _0)]\n\n ColumnFamilyError(String),\n\n}\n\n\n\nimpl Fail for Error {\n\n fn cause(&self) -> Option<&dyn Fail> {\n\n self.inner.cause()\n\n }\n\n\n\n fn backtrace(&self) -> Option<&Backtrace> {\n\n self.inner.backtrace()\n\n }\n\n}\n\n\n", "file_path": "tikv-client-common/src/errors.rs", "rank": 94, "score": 34443.11916554209 }, { "content": "// Copyright 2018 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\n#[macro_use]\n\nextern crate log;\n\n\n\nmod errors;\n\npub mod region;\n\nmod store_builder;\n\n\n\npub use crate::errors::{HasError, HasRegionError};\n\n#[doc(inline)]\n\npub use crate::region::{Region, RegionId, RegionVerId, StoreId};\n\n#[doc(inline)]\n\npub use crate::store_builder::StoreBuilder;\n\npub use kvproto::tikvpb::TikvClient;\n\n\n\nuse derive_new::new;\n\nuse futures::{future::BoxFuture, prelude::*};\n\nuse grpcio::{CallOption, Environment};\n\nuse std::{sync::Arc, time::Duration};\n\nuse tikv_client_common::{security::SecurityManager, stats::tikv_stats, ErrorKind, Result};\n\n\n\n/// A trait for connecting to TiKV stores.\n", "file_path": "tikv-client-store/src/lib.rs", "rank": 95, "score": 34308.33699809596 }, { "content": "#[derive(new)]\n\npub struct Store<Client: KvClient> {\n\n pub region: Region,\n\n pub client: Client,\n\n timeout: Duration,\n\n}\n\n\n\nimpl<Client: KvClient> Store<Client> {\n\n pub fn from_builder<T>(builder: StoreBuilder, connect: Arc<T>) -> Result<Store<Client>>\n\n where\n\n Client: KvClient + Clone + Send + Sync + 'static,\n\n T: KvConnect<KvClient = Client>,\n\n {\n\n info!(\"connect to tikv endpoint: {:?}\", &builder.address);\n\n let client = connect.connect(builder.address.as_str())?;\n\n Ok(Store::new(builder.region, client, builder.timeout))\n\n }\n\n\n\n pub fn call_options(&self) -> CallOption {\n\n CallOption::default().timeout(self.timeout)\n", "file_path": "tikv-client-store/src/lib.rs", "rank": 96, "score": 34297.667611530036 }, { "content": " }\n\n\n\n pub fn dispatch<Resp, RpcFuture>(\n\n &self,\n\n request_name: &'static str,\n\n fut: ::grpcio::Result<RpcFuture>,\n\n ) -> BoxFuture<'static, Result<Resp>>\n\n where\n\n RpcFuture: Future<Output = std::result::Result<Resp, ::grpcio::Error>>,\n\n Resp: HasError + Sized + Clone + Send + 'static,\n\n RpcFuture: Send + 'static,\n\n {\n\n self.client.dispatch(request_name, fut)\n\n }\n\n}\n\n\n\nasync fn map_errors_and_trace<Resp, RpcFuture>(\n\n request_name: &'static str,\n\n fut: ::grpcio::Result<RpcFuture>,\n\n) -> Result<Resp>\n", "file_path": "tikv-client-store/src/lib.rs", "rank": 97, "score": 34294.218261570255 }, { "content": "where\n\n RpcFuture: Future<Output = std::result::Result<Resp, ::grpcio::Error>>,\n\n Resp: HasError + Sized + Clone + Send + 'static,\n\n{\n\n let res = match fut {\n\n Ok(f) => f.await,\n\n Err(e) => Err(e),\n\n };\n\n\n\n let context = tikv_stats(request_name);\n\n context.done(res.map_err(|e| ErrorKind::Grpc(e).into()))\n\n}\n", "file_path": "tikv-client-store/src/lib.rs", "rank": 98, "score": 34292.10278563741 }, { "content": "\n\nimpl KvClient for KvRpcClient {\n\n fn dispatch<Resp, RpcFuture>(\n\n &self,\n\n request_name: &'static str,\n\n fut: ::grpcio::Result<RpcFuture>,\n\n ) -> BoxFuture<'static, Result<Resp>>\n\n where\n\n RpcFuture: Future<Output = std::result::Result<Resp, ::grpcio::Error>>,\n\n Resp: HasError + Sized + Clone + Send + 'static,\n\n RpcFuture: Send + 'static,\n\n {\n\n map_errors_and_trace(request_name, fut).boxed()\n\n }\n\n\n\n fn get_rpc_client(&self) -> Arc<TikvClient> {\n\n self.rpc_client.clone()\n\n }\n\n}\n\n\n", "file_path": "tikv-client-store/src/lib.rs", "rank": 99, "score": 34291.5111316441 } ]
Rust
2021/05_hydrothermal-venture/src/main.rs
macisamuele/adventofcode
34d8994493446f951afc4584d6da7f83e85fd8f8
use helpers::input_lines; use scan_fmt::scan_fmt; use std::collections::HashMap; use std::str::FromStr; const INPUT: &str = include_str!("../input.txt"); #[derive(Debug, PartialEq, Eq, Hash)] struct Point { x: usize, y: usize, } #[derive(Debug, PartialEq, Eq, Hash)] struct Line { point1: Point, point2: Point, } impl FromStr for Line { type Err = anyhow::Error; fn from_str(line: &str) -> Result<Self, Self::Err> { let (x1, y1, x2, y2) = scan_fmt!(line, "{},{} -> {},{}", usize, usize, usize, usize)?; Ok(Self { point1: Point { x: x1, y: y1 }, point2: Point { x: x2, y: y2 }, }) } } impl Line { fn to_horizontal_points(&self) -> impl Iterator<Item = Point> { let y: usize = self.point1.y; let (min_x, max_x): (usize, usize) = if self.point1.y == self.point2.y { if self.point1.x < self.point2.x { (self.point1.x, self.point2.x + 1) } else { (self.point2.x, self.point1.x + 1) } } else { (0, 0) }; (min_x..max_x).map(move |x| Point { x, y }) } fn to_vertical_points(&self) -> impl Iterator<Item = Point> { let x: usize = self.point1.x; let (min_y, max_y): (usize, usize) = if self.point1.x == self.point2.x { if self.point1.y < self.point2.y { (self.point1.y, self.point2.y + 1) } else { (self.point2.y, self.point1.y + 1) } } else { (0, 0) }; (min_y..max_y).map(move |y| Point { x, y }) } fn to_diagonal_points(&self) -> impl Iterator<Item = Point> { let (min_x, max_x): (usize, usize) = if self.point1.x < self.point2.x { (self.point1.x, self.point2.x + 1) } else { (self.point2.x, self.point1.x + 1) }; let (min_y, max_y): (usize, usize) = if self.point1.y < self.point2.y { (self.point1.y, self.point2.y + 1) } else { (self.point2.y, self.point1.y + 1) }; let (is_x_negative_increment, is_y_negative_increment, points_in_line) = if max_x - min_x == max_y - min_y { ( self.point1.x > self.point2.x, self.point1.y > self.point2.y, max_x - min_x, ) } else { (false, false, 0) }; let point1_x = self.point1.x; let point1_y = self.point1.y; (0..points_in_line).map(move |point_number| Point { x: if is_x_negative_increment { point1_x - point_number } else { point1_x + point_number }, y: if is_y_negative_increment { point1_y - point_number } else { point1_y + point_number }, }) } } fn register_point(sparse_grid: &mut HashMap<Point, usize>, key: Point) { let current_value = sparse_grid.get(&key).map_or(0, |value| *value); sparse_grid.insert(key, current_value + 1); } fn part01(lines: &[Line]) -> usize { let mut sparse_grid = HashMap::new(); for line in lines { for point in line.to_horizontal_points().chain(line.to_vertical_points()) { register_point(&mut sparse_grid, point); } } sparse_grid.values().filter(|count| **count >= 2).count() } fn part02(lines: &[Line]) -> usize { let mut sparse_grid = HashMap::new(); for line in lines { for point in line .to_horizontal_points() .chain(line.to_vertical_points()) .chain(line.to_diagonal_points()) { register_point(&mut sparse_grid, point); } } sparse_grid.values().filter(|count| **count >= 2).count() } fn main() -> anyhow::Result<()> { let lines: Vec<Line> = input_lines(INPUT)? .iter() .map(|line| line.parse()) .collect::<Result<_, _>>()?; println!("Part 1: {}", part01(&lines)); println!("Part 2: {}", part02(&lines)); Ok(()) }
use helpers::input_lines; use scan_fmt::scan_fmt; use std::collections::HashMap; use std::str::FromStr; const INPUT: &str = include_str!("../input.txt"); #[derive(Debug, PartialEq, Eq, Hash)] struct Point { x: usize, y: usize, } #[derive(Debug, PartialEq, Eq, Hash)] struct Line { point1: Point, point2: Point, } impl FromStr for Line { type Err = anyhow::Error;
} impl Line { fn to_horizontal_points(&self) -> impl Iterator<Item = Point> { let y: usize = self.point1.y; let (min_x, max_x): (usize, usize) = if self.point1.y == self.point2.y { if self.point1.x < self.point2.x { (self.point1.x, self.point2.x + 1) } else { (self.point2.x, self.point1.x + 1) } } else { (0, 0) }; (min_x..max_x).map(move |x| Point { x, y }) } fn to_vertical_points(&self) -> impl Iterator<Item = Point> { let x: usize = self.point1.x; let (min_y, max_y): (usize, usize) = if self.point1.x == self.point2.x { if self.point1.y < self.point2.y { (self.point1.y, self.point2.y + 1) } else { (self.point2.y, self.point1.y + 1) } } else { (0, 0) }; (min_y..max_y).map(move |y| Point { x, y }) } fn to_diagonal_points(&self) -> impl Iterator<Item = Point> { let (min_x, max_x): (usize, usize) = if self.point1.x < self.point2.x { (self.point1.x, self.point2.x + 1) } else { (self.point2.x, self.point1.x + 1) }; let (min_y, max_y): (usize, usize) = if self.point1.y < self.point2.y { (self.point1.y, self.point2.y + 1) } else { (self.point2.y, self.point1.y + 1) }; let (is_x_negative_increment, is_y_negative_increment, points_in_line) = if max_x - min_x == max_y - min_y { ( self.point1.x > self.point2.x, self.point1.y > self.point2.y, max_x - min_x, ) } else { (false, false, 0) }; let point1_x = self.point1.x; let point1_y = self.point1.y; (0..points_in_line).map(move |point_number| Point { x: if is_x_negative_increment { point1_x - point_number } else { point1_x + point_number }, y: if is_y_negative_increment { point1_y - point_number } else { point1_y + point_number }, }) } } fn register_point(sparse_grid: &mut HashMap<Point, usize>, key: Point) { let current_value = sparse_grid.get(&key).map_or(0, |value| *value); sparse_grid.insert(key, current_value + 1); } fn part01(lines: &[Line]) -> usize { let mut sparse_grid = HashMap::new(); for line in lines { for point in line.to_horizontal_points().chain(line.to_vertical_points()) { register_point(&mut sparse_grid, point); } } sparse_grid.values().filter(|count| **count >= 2).count() } fn part02(lines: &[Line]) -> usize { let mut sparse_grid = HashMap::new(); for line in lines { for point in line .to_horizontal_points() .chain(line.to_vertical_points()) .chain(line.to_diagonal_points()) { register_point(&mut sparse_grid, point); } } sparse_grid.values().filter(|count| **count >= 2).count() } fn main() -> anyhow::Result<()> { let lines: Vec<Line> = input_lines(INPUT)? .iter() .map(|line| line.parse()) .collect::<Result<_, _>>()?; println!("Part 1: {}", part01(&lines)); println!("Part 2: {}", part02(&lines)); Ok(()) }
fn from_str(line: &str) -> Result<Self, Self::Err> { let (x1, y1, x2, y2) = scan_fmt!(line, "{},{} -> {},{}", usize, usize, usize, usize)?; Ok(Self { point1: Point { x: x1, y: y1 }, point2: Point { x: x2, y: y2 }, }) }
function_block-full_function
[ { "content": "type Point = (usize, usize);\n\n\n", "file_path": "2021/15_chiton/src/main.rs", "rank": 0, "score": 208321.5886531291 }, { "content": "fn part02(line: &str) -> usize {\n\n let mut game = GameTable::new(\n\n line.as_bytes()\n\n .iter()\n\n .map(|b| (b - b'0') as usize)\n\n .collect::<Vec<_>>()\n\n .as_slice(),\n\n 1_000_000,\n\n );\n\n\n\n game.play(10_000_000);\n\n\n\n game.iter(1).skip(1).take(2).product()\n\n}\n\n\n", "file_path": "2020/23_crab-cups/src/main.rs", "rank": 1, "score": 190194.6895700844 }, { "content": "fn part01(input: &str) -> usize {\n\n find_start(input, 4)\n\n}\n\n\n", "file_path": "2022/06_tuning-trouble/src/main.rs", "rank": 2, "score": 189353.01464357652 }, { "content": "fn part02(input: &str) -> usize {\n\n find_start(input, 14)\n\n}\n\n\n", "file_path": "2022/06_tuning-trouble/src/main.rs", "rank": 3, "score": 189353.01464357652 }, { "content": "fn find_start(input: &str, sequence_length: usize) -> usize {\n\n input\n\n .as_bytes()\n\n .windows(sequence_length)\n\n .enumerate()\n\n .find(|(_, chars)| {\n\n (0..(sequence_length - 1))\n\n .flat_map(|index1| {\n\n (index1 + 1..sequence_length).map(move |index2| (index1, index2))\n\n })\n\n .all(|(index1, index2)| chars[index1] != chars[index2])\n\n })\n\n .map_or(0, |(idx, _)| idx)\n\n + sequence_length\n\n}\n\n\n", "file_path": "2022/06_tuning-trouble/src/main.rs", "rank": 4, "score": 184536.08420694803 }, { "content": "/// # Errors\n\n///\n\n/// Will return `Err` if the `filename` provided in the CLI argumens does not exist\n\n/// or the user does not have permission to read it.\n\npub fn input_lines(input: &str) -> anyhow::Result<Vec<String>> {\n\n let mut res: Vec<String> = match Opts::parse().input_filename.as_deref() {\n\n Some(path) => {\n\n eprintln!(\"Loading input from custom file: {}\", &path);\n\n read_lines(path)?.collect()\n\n }\n\n None => input.split('\\n').map(ToString::to_string).collect(),\n\n };\n\n\n\n if res[res.len() - 1].is_empty() {\n\n res.remove(res.len() - 1);\n\n }\n\n Ok(res)\n\n}\n", "file_path": "helpers/src/lib.rs", "rank": 5, "score": 180345.8576424376 }, { "content": "#[derive(Clone, Eq, Hash, PartialEq)]\n\nstruct Point {\n\n x: i64,\n\n y: i64,\n\n z: i64,\n\n w: i64,\n\n}\n\n\n\nimpl std::fmt::Debug for Point {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(\n\n f,\n\n \"[x={}, y={}, z={}, w={}]\",\n\n self.x, self.y, self.z, self.w\n\n )\n\n }\n\n}\n\n\n\nimpl Point {\n\n #[allow(clippy::needless_lifetimes)]\n\n fn neighbours<'a>(&'a self) -> impl Iterator<Item = Point> + 'a {\n", "file_path": "2020/17_conway-cubes/src/main.rs", "rank": 7, "score": 164557.5492696818 }, { "content": "#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]\n\nstruct Point {\n\n x: i64,\n\n y: i64,\n\n}\n\n\n\nimpl std::ops::Add for Point {\n\n type Output = Self;\n\n fn add(self, rhs: Self) -> Self {\n\n Self {\n\n x: self.x + rhs.x,\n\n y: self.y + rhs.y,\n\n }\n\n }\n\n}\n\n\n\nimpl std::ops::Sub for Point {\n\n type Output = Self;\n\n fn sub(self, rhs: Self) -> Self {\n\n Self {\n\n x: self.x - rhs.x,\n", "file_path": "2020/24_lobby-layout/src/main.rs", "rank": 8, "score": 164557.3980580591 }, { "content": "#[derive(Debug)]\n\nstruct Point {\n\n east: i64,\n\n north: i64,\n\n}\n\n\n\nimpl Point {\n\n fn rotate(&mut self, degrees: i64) {\n\n let rotation_in_quarters = degree_to_quarters(degrees);\n\n match rotation_in_quarters {\n\n 0 => {\n\n // (x, y) => (x, y)\n\n }\n\n 1 => {\n\n // (x, y) => (y, -x)\n\n std::mem::swap(&mut self.east, &mut self.north);\n\n self.east *= -1;\n\n }\n\n 2 => {\n\n // (x, y) => (-x, -y)\n\n self.east *= -1;\n", "file_path": "2020/12_rain-risk/src/main.rs", "rank": 9, "score": 164547.60213872825 }, { "content": "#[derive(Clone, Debug)]\n\nstruct Input {\n\n supply_stacks: SupplyStacks,\n\n moves: Vec<Move>,\n\n}\n\n\n\nimpl TryFrom<&[String]> for Input {\n\n type Error = anyhow::Error;\n\n fn try_from(lines: &[String]) -> Result<Self, Self::Error> {\n\n let (idx, _) = lines\n\n .iter()\n\n .enumerate()\n\n .find(|(_, line)| line.is_empty())\n\n .ok_or_else(|| anyhow::anyhow!(\"Expected empty line, but it is missing\"))?;\n\n Ok(Self {\n\n supply_stacks: SupplyStacks::try_from(&lines[..idx])?,\n\n moves: lines[(idx + 1)..]\n\n .iter()\n\n .map(|line| line.parse())\n\n .collect::<Result<_, _>>()?,\n\n })\n\n }\n\n}\n\n\n", "file_path": "2022/05_supply-stacks/src/main.rs", "rank": 11, "score": 163409.16499618354 }, { "content": "#[derive(Debug)]\n\nstruct Input {\n\n messages: Vec<String>,\n\n rules: HashMap<usize, Rule>,\n\n}\n\n\n\nimpl std::fmt::Display for Input {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n for (index, rule) in &self.rules {\n\n writeln!(f, \"{}: {}\", index, rule)?;\n\n }\n\n writeln!(f)?;\n\n for message in &self.messages {\n\n writeln!(f, \"{}\", message)?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Clone for Input {\n\n fn clone(&self) -> Self {\n", "file_path": "2020/19_monster-messages/src/main.rs", "rank": 12, "score": 163409.16499618354 }, { "content": "#[derive(Clone, Debug)]\n\nstruct Input {\n\n monkeys: BTreeMap<usize, Monkey>,\n\n}\n\n\n\nimpl TryFrom<&Vec<String>> for Input {\n\n type Error = anyhow::Error;\n\n fn try_from(lines: &Vec<String>) -> Result<Self, Self::Error> {\n\n Ok(Self {\n\n monkeys: (0..lines.len())\n\n .step_by(7)\n\n .map(|monkey_start| Monkey::try_from(&lines[monkey_start..monkey_start + 6]))\n\n .collect::<Result<Vec<_>, _>>()?\n\n .into_iter()\n\n .map(|monkey| (monkey.id, monkey))\n\n .collect(),\n\n })\n\n }\n\n}\n\n\n", "file_path": "2022/11_monkey-in-the-middle/src/main.rs", "rank": 13, "score": 163409.16499618354 }, { "content": "#[derive(Debug)]\n\nstruct Input {\n\n image_enhancement_algorithm: ImageEnhancementAlgorithm,\n\n base_image: Image,\n\n}\n\n\n\nimpl TryFrom<Vec<String>> for Input {\n\n type Error = anyhow::Error;\n\n\n\n fn try_from(lines: Vec<String>) -> Result<Self, Self::Error> {\n\n anyhow::ensure!(lines.len() > 3);\n\n Ok(Self {\n\n image_enhancement_algorithm: lines[0].parse()?,\n\n base_image: Image::try_from(&lines[2..])?,\n\n })\n\n }\n\n}\n\n\n\nimpl ImageEnhancementAlgorithm {\n\n fn is_light(&self, position: usize) -> bool {\n\n debug_assert!(position < 512);\n", "file_path": "2021/20_trench-map/src/main.rs", "rank": 14, "score": 163409.16499618354 }, { "content": "#[derive(Debug)]\n\nstruct Input {\n\n template: Vec<char>,\n\n insertion_rules: HashMap<(char, char), char>,\n\n}\n\n\n\nimpl TryFrom<Vec<String>> for Input {\n\n type Error = anyhow::Error;\n\n\n\n fn try_from(lines: Vec<String>) -> Result<Self, Self::Error> {\n\n anyhow::ensure!(lines.len() > 2);\n\n let template = lines[0].chars().collect();\n\n let insertion_rules = lines[2..]\n\n .iter()\n\n .map(|line| {\n\n if let Some((source, destination)) = line.split_once(\" -> \") {\n\n let bytes_source: Vec<char> = source.chars().collect();\n\n anyhow::ensure!(\n\n bytes_source.len() == 2,\n\n \"source ({source}) is expected to have only 2 elements\",\n\n source = source\n", "file_path": "2021/14_extended-polymerization/src/main.rs", "rank": 15, "score": 163409.16499618354 }, { "content": "#[derive(Debug)]\n\nstruct Input {\n\n scanner_id_to_rotation_to_scanner: HashMap<ScannerId, HashMap<Rotation, Scanner>>,\n\n}\n\n\n\nimpl TryFrom<Vec<String>> for Input {\n\n type Error = anyhow::Error;\n\n\n\n fn try_from(lines: Vec<String>) -> Result<Self, Self::Error> {\n\n let read_scanners: Vec<Scanner> = std::iter::once(0)\n\n .chain(lines.iter().enumerate().filter_map(|(line_number, line)| {\n\n if line.is_empty() {\n\n Some(line_number)\n\n } else {\n\n None\n\n }\n\n }))\n\n .chain(std::iter::once(lines.len()))\n\n .collect::<Vec<_>>()\n\n .windows(2)\n\n .map(|pair| {\n", "file_path": "2021/19_beacon-scanner/src/main.rs", "rank": 16, "score": 163409.16499618354 }, { "content": "#[derive(Debug)]\n\nstruct Input {\n\n start: Position,\n\n end: Position,\n\n n_rows: usize,\n\n n_columns: usize,\n\n map: Vec<Vec<u8>>,\n\n}\n\n\n\nimpl TryFrom<&Vec<String>> for Input {\n\n type Error = anyhow::Error;\n\n\n\n fn try_from(lines: &Vec<String>) -> Result<Self, Self::Error> {\n\n if lines.is_empty() {\n\n Err(anyhow::anyhow!(\"Expected to have at least one row\"))\n\n } else if lines\n\n .iter()\n\n .skip(1)\n\n .any(|line| lines[0].len() != line.len())\n\n {\n\n Err(anyhow::anyhow!(\n", "file_path": "2022/12_hill-climbing-algorithm/src/main.rs", "rank": 18, "score": 161614.76511174836 }, { "content": "struct Input {\n\n cmds: Vec<Cmd>,\n\n}\n\nimpl TryFrom<&[String]> for Input {\n\n type Error = anyhow::Error;\n\n fn try_from(lines: &[String]) -> Result<Self, Self::Error> {\n\n let mut lines_iter = lines.iter().map(String::as_str).peekable();\n\n let mut cmds: Vec<Cmd> = vec![];\n\n while lines_iter.peek().is_some() {\n\n let cmd = Cmd::from_shell_content(&mut lines_iter, cmds.last().map(Cmd::cwd))?;\n\n cmds.push(cmd);\n\n }\n\n Ok(Self { cmds })\n\n }\n\n}\n\nimpl Input {\n\n fn directory_to_size(&self) -> BTreeMap<&Path, usize> {\n\n let mut directory_to_size: BTreeMap<&Path, usize> =\n\n self.cmds.iter().map(|cmd| (cmd.cwd(), 0)).collect();\n\n\n", "file_path": "2022/07_no-space-left-on-device/src/main.rs", "rank": 19, "score": 161614.76511174836 }, { "content": "#[derive(Debug)]\n\nstruct Input<'a> {\n\n field_to_ranges: HashMap<&'a str, Vec<RangeInclusive<usize>>>,\n\n nearby_tickets: Vec<Ticket>,\n\n ticket: Ticket,\n\n}\n\n\n\nimpl<'a> From<&'a Vec<String>> for Input<'a> {\n\n fn from(lines: &'a Vec<String>) -> Self {\n\n let mut lines_iter = lines.iter();\n\n\n\n let field_to_ranges = lines_iter\n\n .by_ref()\n\n .take_while(|line| !line.is_empty())\n\n .map(|line| {\n\n let line_parts: Vec<_> = line.split(\": \").collect();\n\n let field = line_parts[0];\n\n let ranges = line_parts[1]\n\n .split(\" or \")\n\n .map(|range| {\n\n let (start, end) = scan_fmt!(range, \"{}-{}\", usize, usize).unwrap();\n", "file_path": "2020/16_ticket-translation/src/main.rs", "rank": 20, "score": 159150.54287603975 }, { "content": "#[derive(Clone, Debug)]\n\nstruct Input<'a> {\n\n allergen_to_ingredients: HashMap<&'a str, HashSet<&'a str>>,\n\n ingredients: Vec<&'a str>,\n\n}\n\n\n\nimpl<'a> From<&Vec<&'a str>> for Input<'a> {\n\n fn from(lines: &Vec<&'a str>) -> Self {\n\n let mut allergen_to_ingredients: HashMap<&'a str, HashSet<&'a str>> = HashMap::new();\n\n let mut all_ingredients = Vec::new();\n\n\n\n for line in lines {\n\n let (column_index, _) = line\n\n .as_bytes()\n\n .iter()\n\n .enumerate()\n\n .find(|(_, character)| *character == &b'(')\n\n .unwrap();\n\n let current_ingredients: HashSet<_> = line[..column_index]\n\n .split(' ')\n\n .filter(|ingredient| !ingredient.is_empty())\n", "file_path": "2020/21_allergen-assessment/src/main.rs", "rank": 21, "score": 159150.54287603975 }, { "content": "type BingoNumber = u8; // Using u8 as numbers in the input are in the range 0-99\n\n\n", "file_path": "2021/04_giant-squid/src/main.rs", "rank": 22, "score": 157801.174963204 }, { "content": "fn part01(line: &str) -> String {\n\n let mut game = GameTable::new(\n\n line.as_bytes()\n\n .iter()\n\n .map(|b| (b - b'0') as usize)\n\n .collect::<Vec<_>>()\n\n .as_slice(),\n\n 9,\n\n );\n\n\n\n game.play(100);\n\n\n\n game.cups_in_string(1)[1..].to_string()\n\n}\n\n\n", "file_path": "2020/23_crab-cups/src/main.rs", "rank": 23, "score": 149561.94672182022 }, { "content": "type BagRules<'a> = Graph<&'a str>;\n\n\n", "file_path": "2020/07_handy-haversacks/src/main.rs", "rank": 24, "score": 146552.6711704515 }, { "content": "#[derive(Debug, Default)]\n\nstruct DeterministicDice<const N: u8> {\n\n value: u8,\n\n roll_count: u16,\n\n}\n\n\n\nimpl<const N: u8> Dice for DeterministicDice<N> {\n\n fn roll(&mut self) -> u8 {\n\n self.roll_count += 1;\n\n let result = self.value + 1;\n\n self.value = (self.value + 1) % N;\n\n result\n\n }\n\n\n\n fn roll_count(&self) -> u16 {\n\n self.roll_count\n\n }\n\n\n\n fn extraction_3_rolls_statistics() -> HashMap<u16, usize> {\n\n let mut dice = Self::default();\n\n let mut extractions_histogram: HashMap<(u8, u8, u8), usize> = HashMap::new();\n", "file_path": "2021/21_dirac-dice/src/main.rs", "rank": 25, "score": 146258.41547678533 }, { "content": "#[derive(Debug, Default)]\n\nstruct RandomDice<const N: u8> {\n\n roll_count: u16,\n\n}\n\n\n\nimpl<const N: u8> Dice for RandomDice<N> {\n\n fn roll(&mut self) -> u8 {\n\n self.roll_count += 1;\n\n rand::random::<u8>() % N + 1\n\n }\n\n\n\n fn roll_count(&self) -> u16 {\n\n self.roll_count\n\n }\n\n\n\n fn extraction_3_rolls_statistics() -> HashMap<u16, usize> {\n\n (1..=N)\n\n .flat_map(|d1| {\n\n (1..=N).flat_map(move |d2| {\n\n (1..=N).map(move |d3| u16::from(d1) + u16::from(d2) + u16::from(d3))\n\n })\n\n })\n\n .fold(HashMap::new(), |mut map, value| {\n\n *map.entry(value).or_default() += 1;\n\n map\n\n })\n\n }\n\n}\n\n\n", "file_path": "2021/21_dirac-dice/src/main.rs", "rank": 26, "score": 146258.41547678533 }, { "content": "fn part01(input: &Input) -> usize {\n\n input\n\n .messages\n\n .iter()\n\n .filter(|message| input.is_valid(0, message))\n\n .count()\n\n}\n\n\n", "file_path": "2020/19_monster-messages/src/main.rs", "rank": 27, "score": 133111.6637042059 }, { "content": "fn part02(input: &Input) -> usize {\n\n let mut cloned_input: Input = input.clone();\n\n\n\n cloned_input.rules.extend(\n\n [\"8: 42 | 42 8\", \"11: 42 31 | 42 11 31\"]\n\n .iter()\n\n .map(|rule_str| Rule::rule_id_and_rule(rule_str)),\n\n );\n\n\n\n cloned_input\n\n .messages\n\n .iter()\n\n .filter(|message| cloned_input.is_valid(0, message))\n\n .count()\n\n}\n\n\n", "file_path": "2020/19_monster-messages/src/main.rs", "rank": 28, "score": 133111.6637042059 }, { "content": "fn part02(input: &Input) -> usize {\n\n fn update_possible_fields_per_column<'a>(\n\n ticket: &Ticket,\n\n input: &Input,\n\n possible_fields_per_column: &mut BTreeMap<usize, HashSet<&'a str>>,\n\n certanly_allocated_field_names: &mut HashSet<&'a str>,\n\n ) {\n\n if let Some(possible_columns_for_fields) =\n\n ticket.possible_columns_for_fields(&input.field_to_ranges)\n\n {\n\n for (general_possible_fields, possible_field_for_ticket) in possible_fields_per_column\n\n .values_mut()\n\n .zip(possible_columns_for_fields.iter())\n\n {\n\n general_possible_fields.retain(|value| possible_field_for_ticket.contains(value));\n\n if general_possible_fields.len() == 1 {\n\n certanly_allocated_field_names\n\n .insert(*general_possible_fields.iter().next().unwrap());\n\n }\n\n }\n", "file_path": "2020/16_ticket-translation/src/main.rs", "rank": 29, "score": 133111.6637042059 }, { "content": "fn part02(input: &Input) -> usize {\n\n let mut processed_image = input.base_image.clone();\n\n processed_image.apply_image_enhancement_algorithm(&input.image_enhancement_algorithm, 50);\n\n processed_image.light_pixels_count()\n\n}\n\n\n\nimpl std::fmt::Display for Image {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let ranges = &self.ranges;\n\n for row_index in ranges.min_row..=ranges.max_row {\n\n for column_index in ranges.min_column..=ranges.max_column {\n\n write!(\n\n f,\n\n \"{}\",\n\n if self.is_light(row_index, column_index) {\n\n '#'\n\n } else {\n\n '.'\n\n }\n\n )?;\n\n }\n\n writeln!(f)?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "2021/20_trench-map/src/main.rs", "rank": 30, "score": 133111.6637042059 }, { "content": "fn part02(input: &Input) -> usize {\n\n let final_occurrences = input.occurrences_after_iterations(40);\n\n\n\n let min = final_occurrences.values().min().unwrap_or(&0);\n\n let max = final_occurrences.values().max().unwrap_or(&usize::MAX);\n\n\n\n max - min\n\n}\n\n\n", "file_path": "2021/14_extended-polymerization/src/main.rs", "rank": 31, "score": 133111.6637042059 }, { "content": "fn part01(input: &Input) -> usize {\n\n input\n\n .try_merge_all_scanners()\n\n .map_or(0, |(merged_scanner, _)| merged_scanner.visible_beacons())\n\n}\n\n\n", "file_path": "2021/19_beacon-scanner/src/main.rs", "rank": 32, "score": 133111.6637042059 }, { "content": "fn part01(input: &Input) -> usize {\n\n let final_occurrences = input.occurrences_after_iterations(10);\n\n\n\n let min = final_occurrences.values().min().unwrap_or(&0);\n\n let max = final_occurrences.values().max().unwrap_or(&usize::MAX);\n\n\n\n max - min\n\n}\n\n\n", "file_path": "2021/14_extended-polymerization/src/main.rs", "rank": 33, "score": 133111.6637042059 }, { "content": "fn part01(input: &Input) -> usize {\n\n input\n\n .nearby_tickets\n\n .iter()\n\n .map(|ticket| {\n\n ticket\n\n .invalid_field_values(&input.field_to_ranges)\n\n .iter()\n\n .sum::<usize>()\n\n })\n\n .sum()\n\n}\n\n\n", "file_path": "2020/16_ticket-translation/src/main.rs", "rank": 34, "score": 133111.6637042059 }, { "content": "fn part01(input: &Input) -> usize {\n\n let mut processed_image = input.base_image.clone();\n\n processed_image.apply_image_enhancement_algorithm(&input.image_enhancement_algorithm, 2);\n\n processed_image.light_pixels_count()\n\n}\n\n\n", "file_path": "2021/20_trench-map/src/main.rs", "rank": 35, "score": 133111.6637042059 }, { "content": "fn part02(input: &Input) -> usize {\n\n input\n\n .try_merge_all_scanners()\n\n .map_or(0, |(_, scanner_id_to_offset)| {\n\n let offsets: HashSet<Point3D> = scanner_id_to_offset.values().cloned().collect();\n\n pair_of_values(&offsets)\n\n .map(|(offset1, offset2)| offset1.manhattan_distance(offset2))\n\n .max()\n\n .unwrap_or(0)\n\n })\n\n}\n\n\n", "file_path": "2021/19_beacon-scanner/src/main.rs", "rank": 36, "score": 133111.6637042059 }, { "content": "fn part01(input: &Input) -> usize {\n\n let ingredients_with_allergene: HashSet<_> =\n\n input.allergen_to_ingredients.values().flatten().collect();\n\n\n\n let ingredients_with_no_allergene_count = input\n\n .ingredients\n\n .iter()\n\n .filter(|ingredient| !ingredients_with_allergene.contains(ingredient))\n\n .count();\n\n\n\n ingredients_with_no_allergene_count\n\n}\n\n\n", "file_path": "2020/21_allergen-assessment/src/main.rs", "rank": 37, "score": 133111.6637042059 }, { "content": "fn part02(input: &Input) -> usize {\n\n let directory_to_size = input.directory_to_size();\n\n let space_needed_for_update = 30_000_000;\n\n let disk_size = 70_000_000;\n\n let space_to_release = directory_to_size[&Path::new(\"/\")] + space_needed_for_update - disk_size;\n\n directory_to_size\n\n .values()\n\n .filter(|size| size >= &&space_to_release)\n\n .min()\n\n .copied()\n\n .unwrap_or(0)\n\n}\n\n\n", "file_path": "2022/07_no-space-left-on-device/src/main.rs", "rank": 38, "score": 131711.10540014462 }, { "content": "fn part01(input: &Input) -> usize {\n\n input\n\n .directory_to_size()\n\n .values()\n\n .filter(|size| size < &&100_000)\n\n .sum()\n\n}\n\n\n", "file_path": "2022/07_no-space-left-on-device/src/main.rs", "rank": 39, "score": 131711.10540014462 }, { "content": "fn part01(input: &Input) -> usize {\n\n shortest_path_length(input, &input.start, &input.end).unwrap_or(0)\n\n}\n\n\n", "file_path": "2022/12_hill-climbing-algorithm/src/main.rs", "rank": 40, "score": 131711.10540014462 }, { "content": "fn part02(input: &Input) -> usize {\n\n input\n\n .map\n\n .iter()\n\n .enumerate()\n\n .flat_map(|(row_id, row)| {\n\n row.iter()\n\n .enumerate()\n\n .map(move |(column_id, cell)| (row_id, column_id, cell))\n\n })\n\n .filter_map(|(row_id, column_id, cell)| {\n\n if *cell == b'a' {\n\n Some(Position {\n\n row: row_id,\n\n column: column_id,\n\n })\n\n } else {\n\n None\n\n }\n\n })\n\n .filter_map(|start_position| shortest_path_length(input, &start_position, &input.end))\n\n .min()\n\n .unwrap_or(0)\n\n}\n\n\n", "file_path": "2022/12_hill-climbing-algorithm/src/main.rs", "rank": 41, "score": 131711.10540014462 }, { "content": "fn part01(input: &[usize]) -> usize {\n\n input\n\n .windows(2)\n\n .map(|elements| usize::from(elements.last().unwrap() > elements.first().unwrap()))\n\n .sum()\n\n}\n\n\n", "file_path": "2021/01_sonar-sweep/src/main.rs", "rank": 44, "score": 124163.54655046045 }, { "content": "fn part01(input: &[usize]) -> usize {\n\n game(input, 2_020)\n\n}\n\n\n", "file_path": "2020/15_rambunctious-recitation/src/main.rs", "rank": 45, "score": 124163.54655046045 }, { "content": "fn part02(input: &[usize]) -> usize {\n\n input\n\n .windows(4)\n\n .map(|elements| usize::from(elements.last().unwrap() > elements.first().unwrap()))\n\n .sum()\n\n}\n\n\n", "file_path": "2021/01_sonar-sweep/src/main.rs", "rank": 46, "score": 124163.54655046045 }, { "content": "fn part02(input: &[usize]) -> usize {\n\n game(input, 30_000_000)\n\n}\n\n\n", "file_path": "2020/15_rambunctious-recitation/src/main.rs", "rank": 47, "score": 124163.54655046045 }, { "content": "fn game(input: &[usize], turns: usize) -> usize {\n\n if turns < input.len() {\n\n input[turns]\n\n } else {\n\n let mut values: Vec<usize> = vec![0; turns];\n\n let mut value_to_turns: HashMap<usize, CircularBuffer<usize>> = HashMap::new();\n\n\n\n macro_rules! play {\n\n ($turn:expr, $value: expr) => {\n\n values[$turn] = $value;\n\n value_to_turns\n\n .entry($value)\n\n .or_insert_with(|| CircularBuffer::new(2))\n\n .add($turn);\n\n };\n\n }\n\n\n\n for (turn, value) in input.iter().enumerate() {\n\n play!(turn, *value);\n\n }\n", "file_path": "2020/15_rambunctious-recitation/src/main.rs", "rank": 48, "score": 122960.69602155156 }, { "content": "// Considering the input we should need around 10 bits to represent the number\n\n// Even assuming translations we should be able to make it work within 16 bits\n\n// Doing this reduces the memory of each point from\n\n// 24 bytes (with isize) to 6 bytes (with i16)\n\ntype PointInt = i16;\n\n\n", "file_path": "2021/19_beacon-scanner/src/main.rs", "rank": 49, "score": 121910.22561167256 }, { "content": "fn part01(input: &[InputEntry]) -> usize {\n\n input\n\n .iter()\n\n .filter_map(|entry| entry.digits().ok())\n\n .map(|digits| {\n\n digits\n\n .iter()\n\n .filter(|digit| matches!(digit, 1 | 4 | 7 | 8))\n\n .count()\n\n })\n\n .sum()\n\n}\n\n\n", "file_path": "2021/08_seven-segment-search/src/main.rs", "rank": 50, "score": 121547.75944330488 }, { "content": "fn part02(input: &[InputEntry]) -> usize {\n\n input\n\n .iter()\n\n .filter_map(|entry| entry.to_number().ok())\n\n .sum()\n\n}\n\n\n", "file_path": "2021/08_seven-segment-search/src/main.rs", "rank": 51, "score": 121547.75944330488 }, { "content": "#[derive(Debug)]\n\nstruct InputEntry {\n\n one_representation: Vec<char>,\n\n four_representation: Vec<char>,\n\n four_displays: [Vec<char>; 4],\n\n}\n\n\n\nimpl FromStr for InputEntry {\n\n type Err = anyhow::Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n if let Some((combinations, displays)) = s.split_once('|') {\n\n const EMPTY: Vec<char> = vec![];\n\n let (one_representation, four_representation) = {\n\n let (maybe_one_representation, maybe_four_representation) =\n\n combinations.split_whitespace().fold(\n\n (None, None),\n\n move |(maybe_one_representation, maybe_four_representation), segments| {\n\n let values: Vec<char> = segments.chars().collect();\n\n\n\n match values.len() {\n", "file_path": "2021/08_seven-segment-search/src/main.rs", "rank": 52, "score": 121382.30933189527 }, { "content": "fn evaluate_operation_part02(operation_str: &str) -> usize {\n\n fn eval_rec(operation: &mut Operation) -> usize {\n\n let mut stack = LinkedList::<Token>::new();\n\n\n\n for token in &mut operation.tokens {\n\n // Resolve sub operations\n\n if let Token::SubOperation(sub_operation) = token {\n\n *token = Token::Literal(eval_rec(sub_operation));\n\n }\n\n }\n\n\n\n for token in &operation.tokens {\n\n // Resolve additions (have precedence)\n\n match token {\n\n Token::Literal(n) => {\n\n if let Some(last_token) = stack.back() {\n\n if last_token == &Token::Add {\n\n stack.pop_back(); // Operator\n\n if let Some(Token::Literal(previous_n)) = stack.pop_back() {\n\n stack.push_back(Token::Literal(previous_n + n));\n", "file_path": "2020/18_operation-order/src/main.rs", "rank": 53, "score": 121166.84600459505 }, { "content": "fn evaluate_operation_part01(operation_str: &str) -> usize {\n\n fn eval_rec(iter: &mut dyn Iterator<Item = &Token>) -> usize {\n\n let mut current_result = 0;\n\n let mut algebric_operation = AlgebricOperation::Add;\n\n\n\n for token in iter {\n\n match token {\n\n Token::Literal(n) => match algebric_operation {\n\n AlgebricOperation::Add => {\n\n current_result += n;\n\n }\n\n AlgebricOperation::Mul => {\n\n current_result *= n;\n\n }\n\n },\n\n Token::Add => {\n\n algebric_operation = AlgebricOperation::Add;\n\n }\n\n Token::Mul => {\n\n algebric_operation = AlgebricOperation::Mul;\n", "file_path": "2020/18_operation-order/src/main.rs", "rank": 54, "score": 121166.84600459505 }, { "content": "type CellId = usize;\n\n\n", "file_path": "2021/12_passage-pathing/src/main.rs", "rank": 55, "score": 120977.33563537356 }, { "content": "fn shortest_path_length(input: &Input, start: &Position, end: &Position) -> Option<usize> {\n\n dijkstra(\n\n /* start */ start,\n\n /* successors */\n\n |position| {\n\n let mut successors = vec![];\n\n if position.row > 0\n\n && input.map[position.row - 1][position.column]\n\n <= input.map[position.row][position.column] + 1\n\n {\n\n // Up is possible\n\n successors.push((\n\n Position {\n\n row: position.row - 1,\n\n column: position.column,\n\n },\n\n 1,\n\n ));\n\n }\n\n if position.row < input.n_rows - 1\n", "file_path": "2022/12_hill-climbing-algorithm/src/main.rs", "rank": 56, "score": 117321.67224822905 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct SparseGrid(HashMap<isize, HashSet<isize>>);\n\n\n\nimpl SparseGrid {\n\n fn rows_range(&self) -> RangeInclusive<isize> {\n\n let min_row = *self\n\n .0\n\n .keys()\n\n .min()\n\n .expect(\"Expected to have at least one row\");\n\n let max_row = *self\n\n .0\n\n .keys()\n\n .max()\n\n .expect(\"Expected to have at least one row\");\n\n min_row..=max_row\n\n }\n\n\n\n fn columns_range(&self) -> RangeInclusive<isize> {\n\n let min_columns = *self\n\n .0\n", "file_path": "2021/13_transparent-origami/src/main.rs", "rank": 57, "score": 117314.25010571301 }, { "content": "#[derive(Debug)]\n\nstruct Range(RangeInclusive<usize>);\n\nimpl Deref for Range {\n\n type Target = RangeInclusive<usize>;\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\nimpl From<RangeInclusive<usize>> for Range {\n\n fn from(value: RangeInclusive<usize>) -> Self {\n\n Self(value)\n\n }\n\n}\n\nimpl Range {\n\n fn fully_includes(&self, other: &Range) -> bool {\n\n self.contains(other.start()) && self.contains(other.end())\n\n }\n\n\n\n fn overlaps(&self, other: &Range) -> bool {\n\n !(self.end() < other.start() || self.start() > other.end())\n\n }\n\n}\n\n\n", "file_path": "2022/04_camp-cleanup/src/main.rs", "rank": 58, "score": 115151.10888357894 }, { "content": "fn value_in_every_loop(subject: usize) -> impl Iterator<Item = usize> {\n\n std::iter::successors(Some(1), move |value| Some((value * subject) % 20_201_227))\n\n}\n\n\n", "file_path": "2020/25_combo-breaker/src/main.rs", "rank": 59, "score": 114258.03547540553 }, { "content": "fn read_lines(filename: &str) -> std::io::Result<Box<dyn Iterator<Item = String>>> {\n\n Ok(Box::new(\n\n BufReader::new(File::open(filename)?)\n\n .lines()\n\n .into_iter()\n\n .filter_map(Result::ok),\n\n ))\n\n}\n\n\n", "file_path": "helpers/src/lib.rs", "rank": 60, "score": 97432.74297347374 }, { "content": "/// Retrieve all the unordered pair of values within a given set.\n\n///\n\n/// NOTES:\n\n/// * The following pairs are considered as the same: `(A, B)` is equivalent to `(B, A)`\n\n/// * Having values in a set ensures that the pair `(A, A)` is not possible\n\nfn pair_of_values<T: PartialOrd>(values: &HashSet<T>) -> impl Iterator<Item = (&T, &T)> {\n\n values.iter().flat_map(move |value1| {\n\n values.iter().filter_map(move |value2| {\n\n if value1 > value2 {\n\n Some((value1, value2))\n\n } else {\n\n None\n\n }\n\n })\n\n })\n\n}\n\n\n", "file_path": "2021/19_beacon-scanner/src/main.rs", "rank": 61, "score": 94176.96553555112 }, { "content": "fn part01(input: &Input) -> u64 {\n\n let inspections: BTreeSet<_> = number_of_inspections(input, 20, 3)\n\n .values()\n\n .copied()\n\n .collect();\n\n inspections.iter().rev().take(2).product()\n\n}\n\n\n", "file_path": "2022/11_monkey-in-the-middle/src/main.rs", "rank": 62, "score": 92478.92085594175 }, { "content": "fn part02(input: &Input) -> String {\n\n let mut cloned_input = input.clone();\n\n\n\n // BTreeMap to avoid explicit sorting <- as it does not change complexity\n\n let mut allergen_to_ingredient: BTreeMap<&str, &str> = BTreeMap::new();\n\n let mut used_ingredients: HashSet<&str> = HashSet::new();\n\n\n\n while allergen_to_ingredient.len() != cloned_input.allergen_to_ingredients.len() {\n\n for (allergen, ingredients) in &cloned_input.allergen_to_ingredients {\n\n if ingredients.len() == 1 && !allergen_to_ingredient.contains_key(allergen) {\n\n used_ingredients.extend(ingredients.iter());\n\n\n\n allergen_to_ingredient.insert(allergen, ingredients.iter().next().unwrap());\n\n }\n\n }\n\n\n\n for ingredients in &mut cloned_input.allergen_to_ingredients.values_mut() {\n\n ingredients.retain(|ingredient| !used_ingredients.contains(ingredient));\n\n }\n\n }\n\n\n\n allergen_to_ingredient\n\n .values()\n\n .copied()\n\n .collect::<Vec<_>>()\n\n .join(\",\")\n\n}\n\n\n", "file_path": "2020/21_allergen-assessment/src/main.rs", "rank": 63, "score": 92478.92085594175 }, { "content": "fn part02(input: &Input) -> u64 {\n\n let inspections: BTreeSet<_> = number_of_inspections(input, 10000, 1)\n\n .values()\n\n .copied()\n\n .collect();\n\n\n\n inspections.iter().rev().take(2).product()\n\n}\n\n\n", "file_path": "2022/11_monkey-in-the-middle/src/main.rs", "rank": 64, "score": 92478.92085594175 }, { "content": "fn part02(inputs: &[usize]) -> usize {\n\n calculate_o2_value(inputs, 0) * calculate_co2_value(inputs, 0)\n\n}\n\n\n", "file_path": "2021/03_binary-diagnostic/src/main.rs", "rank": 65, "score": 91470.99733296926 }, { "content": "fn part01(inputs: &[usize]) -> usize {\n\n let rate = inputs.iter().fold(Rate::default(), |mut rate, value| {\n\n rate.add_value(*value);\n\n rate\n\n });\n\n rate.gamma_rate() * rate.epsilon_rate()\n\n}\n\n\n", "file_path": "2021/03_binary-diagnostic/src/main.rs", "rank": 66, "score": 91470.99733296926 }, { "content": "fn part01(arrival_time: usize, line_numbers: &[Option<usize>]) -> usize {\n\n let earliest_departure_to_line_number: HashMap<usize, &usize> = line_numbers\n\n .iter()\n\n .filter_map(|maybe_line_number| {\n\n maybe_line_number.as_ref().map(|line_number| {\n\n match arrival_time.checked_rem(*line_number) {\n\n Some(0) => (arrival_time, line_number),\n\n Some(value) => (arrival_time - value + line_number, line_number),\n\n None => unreachable!(\"Not possible value\"),\n\n }\n\n })\n\n })\n\n .collect();\n\n earliest_departure_to_line_number\n\n .keys()\n\n .min()\n\n .map_or(0, |earliest_departure| {\n\n let line_number = *earliest_departure_to_line_number[earliest_departure];\n\n (earliest_departure - arrival_time) * line_number\n\n })\n\n}\n\n\n", "file_path": "2020/13_shuttle-search/src/main.rs", "rank": 67, "score": 89889.1025941042 }, { "content": "fn calculate_o2_value(inputs: &[usize], bit_index: usize) -> usize {\n\n if inputs.len() == 1 {\n\n inputs[0]\n\n } else {\n\n let (inputs_with_0_bit, inputs_with_1_bit) = inputs.iter().fold(\n\n (vec![], vec![]),\n\n |(mut inputs_with_0_bit, mut inputs_with_1_bit), value| {\n\n if (*value) & 1 << (NUMBER_OF_BITS - 1 - bit_index) == 0 {\n\n inputs_with_0_bit.push(*value);\n\n } else {\n\n inputs_with_1_bit.push(*value);\n\n }\n\n (inputs_with_0_bit, inputs_with_1_bit)\n\n },\n\n );\n\n\n\n if inputs_with_0_bit.len() > inputs_with_1_bit.len() {\n\n calculate_o2_value(&inputs_with_0_bit, bit_index + 1)\n\n } else {\n\n calculate_o2_value(&inputs_with_1_bit, bit_index + 1)\n\n }\n\n }\n\n}\n\n\n", "file_path": "2021/03_binary-diagnostic/src/main.rs", "rank": 68, "score": 89763.07599346753 }, { "content": "fn calculate_co2_value(inputs: &[usize], bit_index: usize) -> usize {\n\n if inputs.len() == 1 {\n\n inputs[0]\n\n } else {\n\n let (inputs_with_0_bit, inputs_with_1_bit) = inputs.iter().fold(\n\n (vec![], vec![]),\n\n |(mut inputs_with_0_bit, mut inputs_with_1_bit), value| {\n\n if (*value) & 1 << (NUMBER_OF_BITS - 1 - bit_index) == 0 {\n\n inputs_with_0_bit.push(*value);\n\n } else {\n\n inputs_with_1_bit.push(*value);\n\n }\n\n (inputs_with_0_bit, inputs_with_1_bit)\n\n },\n\n );\n\n\n\n if inputs_with_0_bit.len() > inputs_with_1_bit.len() {\n\n calculate_co2_value(&inputs_with_1_bit, bit_index + 1)\n\n } else {\n\n calculate_co2_value(&inputs_with_0_bit, bit_index + 1)\n\n }\n\n }\n\n}\n\n\n", "file_path": "2021/03_binary-diagnostic/src/main.rs", "rank": 69, "score": 89763.07599346753 }, { "content": "fn part02(mut input: Input) -> anyhow::Result<String> {\n\n for move_ in input.moves {\n\n input.supply_stacks.apply_crate_mover_9001(&move_)?;\n\n }\n\n Ok(input\n\n .supply_stacks\n\n .stacks\n\n .iter()\n\n .filter_map(VecDeque::back)\n\n .collect())\n\n}\n\n\n", "file_path": "2022/05_supply-stacks/src/main.rs", "rank": 70, "score": 87048.18817725425 }, { "content": "fn part01(mut input: Input) -> anyhow::Result<String> {\n\n for move_ in input.moves {\n\n input.supply_stacks.apply_crate_mover_9000(&move_)?;\n\n }\n\n Ok(input\n\n .supply_stacks\n\n .stacks\n\n .iter()\n\n .filter_map(VecDeque::back)\n\n .collect())\n\n}\n\n\n", "file_path": "2022/05_supply-stacks/src/main.rs", "rank": 71, "score": 87048.18817725425 }, { "content": "fn part01(_lines: &[String]) -> usize {\n\n 0\n\n}\n\n\n", "file_path": "2021/23_amphipod/src/main.rs", "rank": 72, "score": 83649.94128754122 }, { "content": "fn part02(_lines: &[String]) -> usize {\n\n 0\n\n}\n\n\n", "file_path": "2021/23_amphipod/src/main.rs", "rank": 73, "score": 83649.94128754122 }, { "content": "#[derive(Clone, Debug)]\n\nstruct Grid {\n\n inner: Vec<Vec<u8>>,\n\n rows: usize,\n\n columns: usize,\n\n}\n\n\n\nimpl From<Vec<String>> for Grid {\n\n fn from(value: Vec<String>) -> Self {\n\n let inner: Vec<Vec<u8>> = value\n\n .iter()\n\n .map(|line| {\n\n line.chars()\n\n .map(|character| (character as u8) - b'0')\n\n .collect()\n\n })\n\n .collect();\n\n let rows = inner.len();\n\n let columns = inner.get(0).map_or(0, Vec::len);\n\n Self {\n\n inner,\n", "file_path": "2021/15_chiton/src/main.rs", "rank": 74, "score": 82009.25620365994 }, { "content": "#[derive(Parser, Debug)]\n\nstruct Opts {\n\n /// Sets a custom input file for the assignment. Passing no value implies the usage of the \"official\" assignment input.\n\n // #[clap(short, long)]\n\n input_filename: Option<String>,\n\n}\n\n\n", "file_path": "helpers/src/lib.rs", "rank": 75, "score": 82009.25620365994 }, { "content": "fn part01(lines: &[String]) -> usize {\n\n let pub_key_1: usize = lines[0].parse().unwrap();\n\n let pub_key_2: usize = lines[1].parse().unwrap();\n\n\n\n let loop_size = value_in_every_loop(7)\n\n .take_while(|value| value != &pub_key_1)\n\n .count();\n\n\n\n value_in_every_loop(pub_key_2).nth(loop_size).unwrap()\n\n}\n\n\n", "file_path": "2020/25_combo-breaker/src/main.rs", "rank": 76, "score": 81999.49356542644 }, { "content": "fn part01(_lines: &[String]) -> usize {\n\n 0\n\n}\n\n\n", "file_path": "2021/25_sea-cucumber/src/main.rs", "rank": 77, "score": 81999.49356542644 }, { "content": "fn part02(lines: &[String]) -> usize {\n\n let (_, score) = RecursiveCombat::from(lines).play();\n\n score\n\n}\n\n\n", "file_path": "2020/22_crab-combat/src/main.rs", "rank": 78, "score": 81999.49356542644 }, { "content": "fn part01(_lines: &[String]) -> usize {\n\n 0\n\n}\n\n\n", "file_path": "2022/13_distress-signal/src/main.rs", "rank": 79, "score": 81999.49356542644 }, { "content": "fn part01(_lines: &[String]) -> usize {\n\n 0\n\n}\n\n\n", "file_path": "2020/20_jurassic-jigsaw/src/main.rs", "rank": 80, "score": 81999.49356542644 }, { "content": "fn part02(_lines: &[String]) -> usize {\n\n 0\n\n}\n\n\n", "file_path": "2021/25_sea-cucumber/src/main.rs", "rank": 81, "score": 81999.49356542644 }, { "content": "fn part02(_lines: &[String]) -> usize {\n\n 0\n\n}\n\n\n", "file_path": "2020/20_jurassic-jigsaw/src/main.rs", "rank": 82, "score": 81999.49356542644 }, { "content": "fn part02(_lines: &[String]) -> usize {\n\n 0\n\n}\n\n\n", "file_path": "2022/13_distress-signal/src/main.rs", "rank": 83, "score": 81999.49356542644 }, { "content": "fn part02(lines: &[String]) -> usize {\n\n lines\n\n .iter()\n\n .map(|line| evaluate_operation_part02(line))\n\n .sum()\n\n}\n\n\n", "file_path": "2020/18_operation-order/src/main.rs", "rank": 84, "score": 81999.49356542644 }, { "content": "fn part02(_lines: &[String]) -> usize {\n\n 0\n\n}\n\n\n", "file_path": "2021/22_reactor-reboot/src/main.rs", "rank": 85, "score": 81999.49356542644 }, { "content": "fn part02(_lines: &[String]) -> usize {\n\n 0\n\n}\n\n\n", "file_path": "2020/25_combo-breaker/src/main.rs", "rank": 86, "score": 81999.49356542644 }, { "content": "fn part01(_lines: &[String]) -> usize {\n\n 0\n\n}\n\n\n", "file_path": "2021/22_reactor-reboot/src/main.rs", "rank": 87, "score": 81999.49356542644 }, { "content": "fn part02(lines: &[String]) -> usize {\n\n lines\n\n .iter()\n\n .filter(|line| {\n\n let (first_index, second_index, character, password) =\n\n scan_fmt!(line, \"{}-{} {}: {}\", usize, usize, char, String).unwrap();\n\n PasswordPolicyPart2 {\n\n indexes: [first_index, second_index],\n\n character,\n\n }\n\n .is_password_good(&password)\n\n })\n\n .count()\n\n}\n\n\n", "file_path": "2020/02_password-philosophy/src/main.rs", "rank": 88, "score": 81999.49356542644 }, { "content": "fn part01(lines: &[String]) -> usize {\n\n lines\n\n .iter()\n\n .filter(|line| {\n\n let (min, max, character, password) =\n\n scan_fmt!(line, \"{}-{} {}: {}\", usize, usize, char, String).unwrap();\n\n PasswordPolicyPart1 {\n\n min,\n\n max,\n\n character,\n\n }\n\n .is_password_good(&password)\n\n })\n\n .count()\n\n}\n\n\n", "file_path": "2020/02_password-philosophy/src/main.rs", "rank": 89, "score": 81999.49356542644 }, { "content": "fn part01(lines: &[String]) -> usize {\n\n let (_, score) = CrabCombat::from(lines).play();\n\n score\n\n}\n\n\n", "file_path": "2020/22_crab-combat/src/main.rs", "rank": 90, "score": 81999.49356542644 }, { "content": "fn part01(lines: &[String]) -> usize {\n\n lines\n\n .iter()\n\n .map(|line| evaluate_operation_part01(line))\n\n .sum()\n\n}\n\n\n", "file_path": "2020/18_operation-order/src/main.rs", "rank": 91, "score": 81999.49356542644 }, { "content": "#[derive(Clone, Debug, Eq, PartialEq)]\n\nstruct Scanner {\n\n id: ScannerId,\n\n beacons: HashSet<Point3D>,\n\n point_distances: HashMap<(Point3D, Point3D), Point3D>,\n\n known_distances: HashSet<Point3D>,\n\n}\n\n\n\nimpl TryFrom<&[String]> for Scanner {\n\n type Error = anyhow::Error;\n\n\n\n fn try_from(lines: &[String]) -> Result<Self, Self::Error> {\n\n anyhow::ensure!(lines.len() > 1);\n\n let id = ScannerId(scan_fmt!(&lines[0], \"--- scanner {} ---\", u8)?);\n\n let beacons: HashSet<Point3D> = lines[1..]\n\n .iter()\n\n .map(|line| line.parse())\n\n .collect::<Result<_, _>>()?;\n\n\n\n Ok(Self::new(id, beacons))\n\n }\n", "file_path": "2021/19_beacon-scanner/src/main.rs", "rank": 92, "score": 81082.0573267905 }, { "content": "#[derive(Clone, Copy, Debug, Default, Eq, Ord, PartialEq, PartialOrd)]\n\nstruct Position {\n\n row: i32,\n\n column: i32,\n\n}\n\n\n\nimpl Position {\n\n fn apply(&mut self, direction: Direction) {\n\n match direction {\n\n Direction::Up => self.row -= 1,\n\n Direction::Right => self.column += 1,\n\n Direction::Down => self.row += 1,\n\n Direction::Left => self.column -= 1,\n\n Direction::UpRight => {\n\n self.apply(Direction::Up);\n\n self.apply(Direction::Right);\n\n }\n\n Direction::UpLeft => {\n\n self.apply(Direction::Up);\n\n self.apply(Direction::Left);\n\n }\n", "file_path": "2022/09_rope-bridge/src/main.rs", "rank": 93, "score": 81081.90633254332 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\nstruct Operation {\n\n tokens: Vec<Token>,\n\n}\n\n\n\nimpl FromStr for Operation {\n\n type Err = ();\n\n\n\n fn from_str(value: &str) -> Result<Self, Self::Err> {\n\n Ok(Self::from(&mut value.chars().peekable()))\n\n }\n\n}\n\n\n\nimpl<I: Iterator<Item = char>> From<&mut Peekable<I>> for Operation {\n\n fn from(value: &mut Peekable<I>) -> Self {\n\n struct ExpressionInteration<'a, T: Iterator<Item = char>> {\n\n input_characters: &'a mut Peekable<T>,\n\n }\n\n\n\n impl<T: Iterator<Item = char>> Iterator for ExpressionInteration<'_, T> {\n\n type Item = Token;\n", "file_path": "2020/18_operation-order/src/main.rs", "rank": 94, "score": 81081.36165213841 }, { "content": "#[derive(Debug)]\n\nstruct Rucksack {\n\n first_compartment: BTreeSet<u8>,\n\n second_compartment: BTreeSet<u8>,\n\n}\n\n\n\nimpl FromStr for Rucksack {\n\n type Err = anyhow::Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n if s.len() % 2 == 0 {\n\n let first_compartment = s[..s.len() / 2].bytes().collect();\n\n let second_compartment = s[s.len() / 2..].bytes().collect();\n\n Ok(Self {\n\n first_compartment,\n\n second_compartment,\n\n })\n\n } else {\n\n Err(anyhow::anyhow!(\n\n \"Not even number of elements ({}) in {}\",\n\n s.len(),\n\n s\n\n ))\n\n }\n\n }\n\n}\n\n\n", "file_path": "2022/03_rucksack-reorganization/src/main.rs", "rank": 95, "score": 81076.69519224131 }, { "content": "// Game is compressed as much as possible such that it would not cause\n\n// memory problems as we will store many\n\nstruct Game {\n\n is_player1_round: bool,\n\n // Positions are between 1 and 10, hence 8 bits are sufficient\n\n player1_position: u8,\n\n player2_position: u8,\n\n // Scores are up to 1000 for part 1 and 21 for part2,\n\n // hence 11 bits are sufficient with some margin\n\n // as there are no 11 bits integers we're using the closer\n\n // one (16 bits)\n\n player1_score: u16,\n\n player2_score: u16,\n\n}\n\n\n\nimpl TryFrom<Vec<String>> for Game {\n\n type Error = anyhow::Error;\n\n\n\n fn try_from(lines: Vec<String>) -> Result<Self, Self::Error> {\n\n anyhow::ensure!(lines.len() == 2);\n\n let player1_position = scan_fmt!(&lines[0], \"Player 1 starting position: {}\", u8)?;\n\n let player2_position = scan_fmt!(&lines[1], \"Player 2 starting position: {}\", u8)?;\n", "file_path": "2021/21_dirac-dice/src/main.rs", "rank": 96, "score": 81076.69519224131 }, { "content": "#[derive(Debug)]\n\nstruct Passport {\n\n fields: HashMap<String, String>,\n\n}\n\n\n\nimpl Passport {\n\n const REQUIRED_FIELDS: [&'static str; 7] = [\"byr\", \"iyr\", \"eyr\", \"hgt\", \"hcl\", \"ecl\", \"pid\"];\n\n\n\n fn is_field_valid(&self, field_name: &str) -> Option<()> {\n\n fn is_in_range(value: i32, min: i32, max: i32) -> Option<()> {\n\n if value >= min && value <= max {\n\n Some(())\n\n } else {\n\n None\n\n }\n\n }\n\n fn is_integer_in_range(value: &str, min: i32, max: i32) -> Option<()> {\n\n is_in_range(value.parse::<i32>().ok()?, min, max)\n\n }\n\n\n\n let field_value = self.fields.get(field_name)?;\n", "file_path": "2020/04_passport-processing/src/main.rs", "rank": 97, "score": 81076.69519224131 }, { "content": "#[derive(Clone, Debug)]\n\nstruct Move {\n\n quantity: usize,\n\n src: usize,\n\n dst: usize,\n\n}\n\nimpl FromStr for Move {\n\n type Err = anyhow::Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let (quantity, src, dst) = scan_fmt!(s, \"move {} from {} to {}\", usize, usize, usize)?;\n\n Ok(Self { quantity, src, dst })\n\n }\n\n}\n\n\n", "file_path": "2022/05_supply-stacks/src/main.rs", "rank": 98, "score": 81076.69519224131 }, { "content": "#[derive(Debug)]\n\nstruct Assignment {\n\n first: Range,\n\n second: Range,\n\n}\n\nimpl FromStr for Assignment {\n\n type Err = anyhow::Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let (first_start, first_end, second_start, second_end) =\n\n scan_fmt!(s, \"{}-{},{}-{}\", usize, usize, usize, usize)?;\n\n Ok(Self {\n\n first: (first_start..=first_end).into(),\n\n second: (second_start..=second_end).into(),\n\n })\n\n }\n\n}\n\n\n", "file_path": "2022/04_camp-cleanup/src/main.rs", "rank": 99, "score": 81076.69519224131 } ]
Rust
src/connectivity/network/tests/dhcp_interop/dhcp_validity/src/lib.rs
EnderNightLord-ChromeBook/fuchsia-pine64-pinephone
05e2c059b57b6217089090a0315971d1735ecf57
use { anyhow::{format_err, Context as _, Error}, fidl_fuchsia_net as fnet, fidl_fuchsia_net_dhcpv6 as fdhcpv6, fidl_fuchsia_net_interfaces as finterfaces, fidl_fuchsia_net_name as fnetname, fidl_fuchsia_net_stack as fstack, fidl_fuchsia_net_stack_ext::FidlReturn as _, fidl_fuchsia_netemul_guest::{ CommandListenerMarker, GuestDiscoveryMarker, GuestInteractionMarker, }, fuchsia_async::TimeoutExt as _, fuchsia_component::client, netemul_guest_lib::wait_for_command_completion, std::time::Duration, }; pub async fn configure_dhcp_server(guest_name: &str, command_to_run: &str) -> Result<(), Error> { let mut env = vec![]; let guest_discovery_service = client::connect_to_service::<GuestDiscoveryMarker>()?; let (gis, gis_ch) = fidl::endpoints::create_proxy::<GuestInteractionMarker>()?; let () = guest_discovery_service.get_guest(None, guest_name, gis_ch)?; let (client_proxy, server_end) = fidl::endpoints::create_proxy::<CommandListenerMarker>()?; gis.execute_command(command_to_run, &mut env.iter_mut(), None, None, None, server_end)?; wait_for_command_completion(client_proxy.take_event_stream(), None).await } pub async fn verify_v4_addr_present(addr: fnet::IpAddress, timeout: Duration) -> Result<(), Error> { let interface_state = client::connect_to_service::<finterfaces::StateMarker>()?; let mut if_map = std::collections::HashMap::new(); fidl_fuchsia_net_interfaces_ext::wait_interface( fidl_fuchsia_net_interfaces_ext::event_stream_from_state(&interface_state)?, &mut if_map, |if_map| { if_map .iter() .filter_map(|(_, properties)| properties.addresses.as_ref()) .flatten() .find_map(|a| if a.addr?.addr == addr { Some(()) } else { None }) }, ) .on_timeout(timeout, || Err(anyhow::anyhow!("timed out"))) .await .map_err(|e| { e.context(format!( "DHCPv4 client got unexpected addresses: {}, address missing: {:?}", if_map.iter().fold(String::from("addresses present:"), |s, (id, properties)| { s + &format!(" {:?}: {:?}", id, properties.addresses) }), addr )) }) } pub async fn verify_v6_dns_servers( interface_id: u64, want_dns_servers: Vec<fnetname::DnsServer_>, ) -> Result<(), Error> { let stack = client::connect_to_service::<fstack::StackMarker>() .context("connecting to stack service")?; let info = stack .get_interface_info(interface_id) .await .squash_result() .context("getting interface info")?; let addr = info .properties .addresses .into_iter() .find_map(|addr: fnet::Subnet| match addr.addr { fnet::IpAddress::Ipv4(_addr) => None, fnet::IpAddress::Ipv6(addr) => { if addr.addr[..8] == [0xfe, 0x80, 0, 0, 0, 0, 0, 0] { Some(addr) } else { None } } }) .ok_or(format_err!("no addresses found to start DHCPv6 client on"))?; let provider = client::connect_to_service::<fdhcpv6::ClientProviderMarker>() .context("connecting to DHCPv6 client")?; let (client_end, server_end) = fidl::endpoints::create_endpoints::<fdhcpv6::ClientMarker>() .context("creating DHCPv6 client channel")?; let () = provider.new_client( fdhcpv6::NewClientParams { interface_id: Some(interface_id), address: Some(fnet::Ipv6SocketAddress { address: addr, port: fdhcpv6::DEFAULT_CLIENT_PORT, zone_index: interface_id, }), models: Some(fdhcpv6::OperationalModels { stateless: Some(fdhcpv6::Stateless { options_to_request: Some(vec![fdhcpv6::RequestableOptionCode::DnsServers]), }), }), }, server_end, )?; let client_proxy = client_end.into_proxy().context("getting client proxy from channel")?; let got_dns_servers = client_proxy.watch_servers().await.context("watching DNS servers")?; if got_dns_servers == want_dns_servers { Ok(()) } else { Err(format_err!( "DHCPv6 client received unexpected DNS servers:\ngot dns servers: {:?}\n, want dns servers: {:?}\n", got_dns_servers, want_dns_servers )) } }
use { anyhow::{format_err, Context as _, Error}, fidl_fuchsia_net as fnet, fidl_fuchsia_net_dhcpv6 as fdhcpv6, fidl_fuchsia_net_interfaces as finterfaces, fidl_fuchsia_net_name as fnetname, fidl_fuchsia_net_stack as fstack, fidl_fuchsia_net_stack_ext::FidlReturn as _, fidl_fuchsia_netemul_guest::{ CommandListenerMarker, GuestDiscoveryMarker, GuestInteractionMarker, }, fuchsia_async::TimeoutExt as _, fuchsia_component::client, netemul_guest_lib::wait_for_command_completion, std::time::Duration, }; pub async fn configure_dhcp_server(guest_name: &str, command_to_run: &str) -> Result<(), Error> { let mut env = vec![]; let guest_discovery_service = client::connect_to_service::<GuestDiscoveryMarker>()?; let (gis, gis_ch) = fidl::endpoints::create_proxy::<GuestInteractionMarker>()?; let () = guest_discovery_service.get_guest(None, guest_name, gis_ch)?; let (client_proxy, server_end) = fidl::endpoints::create_proxy::<CommandListenerMarker>()?; gis.execute_command(command_to_run, &mut env.iter_mut(), None, None, None, server_end)?; wait_for_command_completion(client_proxy.take_event_stream(), None).await }
pub async fn verify_v6_dns_servers( interface_id: u64, want_dns_servers: Vec<fnetname::DnsServer_>, ) -> Result<(), Error> { let stack = client::connect_to_service::<fstack::StackMarker>() .context("connecting to stack service")?; let info = stack .get_interface_info(interface_id) .await .squash_result() .context("getting interface info")?; let addr = info .properties .addresses .into_iter() .find_map(|addr: fnet::Subnet| match addr.addr { fnet::IpAddress::Ipv4(_addr) => None, fnet::IpAddress::Ipv6(addr) => { if addr.addr[..8] == [0xfe, 0x80, 0, 0, 0, 0, 0, 0] { Some(addr) } else { None } } }) .ok_or(format_err!("no addresses found to start DHCPv6 client on"))?; let provider = client::connect_to_service::<fdhcpv6::ClientProviderMarker>() .context("connecting to DHCPv6 client")?; let (client_end, server_end) = fidl::endpoints::create_endpoints::<fdhcpv6::ClientMarker>() .context("creating DHCPv6 client channel")?; let () = provider.new_client( fdhcpv6::NewClientParams { interface_id: Some(interface_id), address: Some(fnet::Ipv6SocketAddress { address: addr, port: fdhcpv6::DEFAULT_CLIENT_PORT, zone_index: interface_id, }), models: Some(fdhcpv6::OperationalModels { stateless: Some(fdhcpv6::Stateless { options_to_request: Some(vec![fdhcpv6::RequestableOptionCode::DnsServers]), }), }), }, server_end, )?; let client_proxy = client_end.into_proxy().context("getting client proxy from channel")?; let got_dns_servers = client_proxy.watch_servers().await.context("watching DNS servers")?; if got_dns_servers == want_dns_servers { Ok(()) } else { Err(format_err!( "DHCPv6 client received unexpected DNS servers:\ngot dns servers: {:?}\n, want dns servers: {:?}\n", got_dns_servers, want_dns_servers )) } }
pub async fn verify_v4_addr_present(addr: fnet::IpAddress, timeout: Duration) -> Result<(), Error> { let interface_state = client::connect_to_service::<finterfaces::StateMarker>()?; let mut if_map = std::collections::HashMap::new(); fidl_fuchsia_net_interfaces_ext::wait_interface( fidl_fuchsia_net_interfaces_ext::event_stream_from_state(&interface_state)?, &mut if_map, |if_map| { if_map .iter() .filter_map(|(_, properties)| properties.addresses.as_ref()) .flatten() .find_map(|a| if a.addr?.addr == addr { Some(()) } else { None }) }, ) .on_timeout(timeout, || Err(anyhow::anyhow!("timed out"))) .await .map_err(|e| { e.context(format!( "DHCPv4 client got unexpected addresses: {}, address missing: {:?}", if_map.iter().fold(String::from("addresses present:"), |s, (id, properties)| { s + &format!(" {:?}: {:?}", id, properties.addresses) }), addr )) }) }
function_block-full_function
[]
Rust
src/csv2json/mod.rs
perzanko/csv2json
aae4c50856f73fc0f88450dd2f38ad8fc6ffaeae
extern crate serde_json; use std::collections::HashMap; use std::env; use std::fs::File; use std::io; use std::io::prelude::*; use std::process; use std::time::Instant; type Lines = Vec<String>; type Nav = String; type NavKeys = Vec<String>; type Args = Vec<String>; type Rows = Vec<HashMap<String, String>>; type Hash = HashMap<String, String>; pub fn new() { let start_time = Instant::now(); let (input_file, output_file) = parse_args(&env::args().collect::<Args>()); if is_str_empty(&input_file) || is_str_empty(&output_file) { show_help(); process::exit(0x0100); } let nav_with_lines: (Nav, Lines) = read_file(&input_file).unwrap(); let nav: NavKeys = fetch_keys(&nav_with_lines.0); let rows: Rows = generate_rows(&nav_with_lines.1, &nav); let json = serde_json::to_string(&rows).unwrap(); match write_file(&output_file, &json) { Ok(()) => println!("Ok, done! - {}", output_file), Err(err) => println!("Something went wrong. {}", err), }; println!("Rows: {}", rows.len()); println!("Elapsed: {} ms", get_elapsed_time(start_time)); } fn parse_args(args: &Args) -> (String, String) { let mut input_file = String::new(); let mut output_file = String::new(); let mut args: Args = args.clone(); args.remove(0); args.chunks(2).for_each(|x| match x[0].as_ref() { "--input" => { input_file = x[1].clone(); } "-i" => { input_file = x[1].clone(); } "--output" => { output_file = x[1].clone(); } "-o" => { output_file = x[1].clone(); } _ => {} }); (input_file, output_file) } fn read_file(file_path: &String) -> Result<(Nav, Lines), io::Error> { let file = File::open(file_path)?; let buf_reader = io::BufReader::new(file); let mut lines = Vec::new(); let mut nav = String::new(); for (i, line) in buf_reader.lines().enumerate() { let line = line.unwrap(); if i == 0 { nav = line } else { lines.push(line) } } Ok((nav, lines)) } fn write_file(file_path: &String, data: &String) -> Result<(), io::Error> { let mut file = File::create(file_path)?; file.write_all(&data.as_bytes())?; Ok(()) } fn fetch_keys(keys_str: &String) -> NavKeys { let mut i: i16 = -1; keys_str .split(",") .collect::<Vec<&str>>() .into_iter() .map(|key| { i += 1; String::from(key) }) .collect() } fn generate_rows(lines: &Lines, nav: &NavKeys) -> Vec<Hash> { lines .into_iter() .map(|line| { let mut hash: HashMap<String, String> = HashMap::new(); let mut i = 0; let nav = nav; line.split(",") .collect::<Vec<&str>>() .into_iter() .for_each(|x| { i += 1; hash.insert(nav[i - 1].clone(), String::from(x)); }); return hash; }) .collect() } fn show_help() { print!( " Created by perzanko ---- .o88b. .d8888. db db .d888b. d88b .d8888. .d88b. d8b db d8P Y8 88' YP 88 88 VP `8D `8P' 88' YP .8P Y8. 888o 88 8P `8bo. Y8 8P odD' 88 `8bo. 88 88 88V8o 88 8b `Y8b. `8b d8' .88' 88 `Y8b. 88 88 88 V8o88 Y8b d8 db 8D `8bd8' j88. db. 88 db 8D `8b d8' 88 V888 `Y88P' `8888Y' YP 888888D Y8888P `8888Y' `Y88P' VP V8P This tool provides simple and efficient csv to json conversion. Usage: csv2json --input [path] --output [path] -h, --help print this help -i, --input input path of CSV file -o, --output output path of converted JSON " ) } fn is_str_empty(text: &String) -> bool { if text.trim().len() == 0 { true } else { false } } fn get_elapsed_time(start_time: Instant) -> String { let x = start_time.elapsed(); ((x.as_secs() * 1_000) + (x.subsec_nanos() / 1_000_000) as u64).to_string() }
extern crate serde_json; use std::collections::HashMap; use std::env; use std::fs::File; use std::io; use std::io::prelude::*; use std::process; use std::time::Instant; type Lines = Vec<String>; type Nav = String; type NavKeys = Vec<String>; type Args = Vec<String>; type Rows = Vec<HashMap<String, String>>; type Hash = HashMap<String, String>; pub fn new() { let start_time = Instant::now(); let (input_file, output_file) = parse_args(&env::args().collect::<Args>()); if is_str_empty(&input_file) || is_str_empty(&output_file) { show_help(); process::exit(0x0100); } let nav_with_lines: (Nav, Lines) = read_file(&input_file).unwrap(); let nav: NavKeys = fetch_keys(&nav_with_lines.0); let rows: Rows = generate_rows(&nav_with_lines.1, &nav); let json = serde_json::to_string(&rows).unwrap(); match write_file(&output_file, &json) { Ok(()) => println!("Ok, done! - {}", output_file), Err(err) => println!("Something went wrong. {}", err), }; println!("Rows: {}", rows.len()); println!("Elapsed: {} ms", get_elapsed_time(start_time)); } fn parse_args(args: &Args) -> (String, String) {
fn read_file(file_path: &String) -> Result<(Nav, Lines), io::Error> { let file = File::open(file_path)?; let buf_reader = io::BufReader::new(file); let mut lines = Vec::new(); let mut nav = String::new(); for (i, line) in buf_reader.lines().enumerate() { let line = line.unwrap(); if i == 0 { nav = line } else { lines.push(line) } } Ok((nav, lines)) } fn write_file(file_path: &String, data: &String) -> Result<(), io::Error> { let mut file = File::create(file_path)?; file.write_all(&data.as_bytes())?; Ok(()) } fn fetch_keys(keys_str: &String) -> NavKeys { let mut i: i16 = -1; keys_str .split(",") .collect::<Vec<&str>>() .into_iter() .map(|key| { i += 1; String::from(key) }) .collect() } fn generate_rows(lines: &Lines, nav: &NavKeys) -> Vec<Hash> { lines .into_iter() .map(|line| { let mut hash: HashMap<String, String> = HashMap::new(); let mut i = 0; let nav = nav; line.split(",") .collect::<Vec<&str>>() .into_iter() .for_each(|x| { i += 1; hash.insert(nav[i - 1].clone(), String::from(x)); }); return hash; }) .collect() } fn show_help() { print!( " Created by perzanko ---- .o88b. .d8888. db db .d888b. d88b .d8888. .d88b. d8b db d8P Y8 88' YP 88 88 VP `8D `8P' 88' YP .8P Y8. 888o 88 8P `8bo. Y8 8P odD' 88 `8bo. 88 88 88V8o 88 8b `Y8b. `8b d8' .88' 88 `Y8b. 88 88 88 V8o88 Y8b d8 db 8D `8bd8' j88. db. 88 db 8D `8b d8' 88 V888 `Y88P' `8888Y' YP 888888D Y8888P `8888Y' `Y88P' VP V8P This tool provides simple and efficient csv to json conversion. Usage: csv2json --input [path] --output [path] -h, --help print this help -i, --input input path of CSV file -o, --output output path of converted JSON " ) } fn is_str_empty(text: &String) -> bool { if text.trim().len() == 0 { true } else { false } } fn get_elapsed_time(start_time: Instant) -> String { let x = start_time.elapsed(); ((x.as_secs() * 1_000) + (x.subsec_nanos() / 1_000_000) as u64).to_string() }
let mut input_file = String::new(); let mut output_file = String::new(); let mut args: Args = args.clone(); args.remove(0); args.chunks(2).for_each(|x| match x[0].as_ref() { "--input" => { input_file = x[1].clone(); } "-i" => { input_file = x[1].clone(); } "--output" => { output_file = x[1].clone(); } "-o" => { output_file = x[1].clone(); } _ => {} }); (input_file, output_file) }
function_block-function_prefix_line
[ { "content": "fn main() {\n\n csv2json::new();\n\n}\n", "file_path": "src/main.rs", "rank": 15, "score": 25552.07465452124 }, { "content": "# csv2json\n\n\n\nThis tool provides simple and efficient csv to json converting, written in Rust.\n\n\n\n## Getting Started\n\n\n\nYou need to have the Rust and Cargo installed. Please follow this [instruction](https://www.rust-lang.org/tools/install)\n\n\n\n\n\n### Installing\n\n\n\nIf you want to run the `csv2json`, please run in the project directory:\n\n\n\n```\n\ncargo build --release\n\n```\n\n\n\nAfter this command, you should have a compiled binary already in directory `./target/release/`\n\n\n\nNow please run:\n\n```\n\ntarget/release/csv2json -i [input] -o [output]\n\n```\n\nand that's it!\n\n\n\n## Example\n\n\n\n<img src=\"https://raw.githubusercontent.com/perzanko/csv2json/master/example/example.png\" width=\"500\">\n\n\n\n## Authors\n\n\n\n* **Kacper Perzankowski** - [perzanko](https://github.com/perzanko)\n\n\n\n## License\n\n\n\nThis project is licensed under the MIT License - see the [LICENSE.md](LICENSE.md) file for details\n", "file_path": "README.md", "rank": 17, "score": 0.22498081427576033 } ]
Rust
python/src/lib.rs
kngwyu/rogue-gym
00de77e6c9f3d2b9ed602f93abb5526c13791ca9
#[macro_use] extern crate failure; extern crate ndarray; extern crate numpy; extern crate pyo3; extern crate rect_iter; extern crate rogue_gym_core; #[cfg(unix)] extern crate rogue_gym_devui; mod fearures; mod state_impls; mod thread_impls; use fearures::{MessageFlagInner, StatusFlagInner}; use ndarray::{Array2, Axis, Zip}; use numpy::PyArray3; use pyo3::{ basic::{PyObjectProtocol, PyObjectReprProtocol, PyObjectStrProtocol}, exceptions::RuntimeError, prelude::*, }; use rect_iter::{Get2D, GetMut2D, RectRange}; use rogue_gym_core::character::player::Status; use rogue_gym_core::dungeon::{Positioned, X, Y}; use rogue_gym_core::{error::*, symbol, GameConfig, RunTime}; use state_impls::GameStateImpl; use std::collections::HashMap; use std::fmt::Display; use std::str::from_utf8_unchecked; use thread_impls::ThreadConductor; fn pyresult<T, E: Display>(result: Result<T, E>) -> PyResult<T> { pyresult_with(result, "Error in rogue-gym") } fn pyresult_with<T, E: Display>(result: Result<T, E>, msg: &str) -> PyResult<T> { result.map_err(|e| PyErr::new::<RuntimeError, _>(format!("{}: {}", msg, e))) } #[pyclass] #[derive(Clone, Debug, PartialEq)] struct PlayerState { map: Vec<Vec<u8>>, history: Array2<bool>, status: Status, symbols: u8, message: MessageFlagInner, is_terminal: bool, } impl PlayerState { fn new(w: X, h: Y, symbols: u8) -> Self { let (w, h) = (w.0 as usize, h.0 as usize); PlayerState { map: vec![vec![b' '; w]; h], history: Array2::from_elem([h, w], false), status: Status::default(), symbols, message: MessageFlagInner::new(), is_terminal: false, } } fn reset(&mut self, runtime: &RunTime) -> GameResult<()> { self.status = runtime.player_status(); self.draw_map(runtime)?; self.message = MessageFlagInner::new(); self.is_terminal = false; Ok(()) } fn draw_map(&mut self, runtime: &RunTime) -> GameResult<()> { self.history = runtime.history(&self.status).unwrap(); runtime.draw_screen(|Positioned(cd, tile)| -> GameResult<()> { *self .map .try_get_mut_p(cd) .into_chained(|| "in python::GameState::react")? = tile.to_byte(); Ok(()) }) } fn dungeon_str(&self) -> impl Iterator<Item = &str> { self.map.iter().map(|v| unsafe { from_utf8_unchecked(v) }) } fn gray_image_with_offset<'py>( &self, py: Python<'py>, offset: usize, ) -> PyResult<&'py PyArray3<f32>> { let (h, w) = (self.map.len(), self.map[0].len()); let py_array = PyArray3::zeros(py, [1 + offset, h, w], false); RectRange::zero_start(w, h) .unwrap() .into_iter() .for_each(|(x, y)| unsafe { let symbol = symbol::tile_to_sym(*self.map.get_xy(x, y)).unwrap(); *py_array.uget_mut([0, y, x]) = f32::from(symbol) / f32::from(self.symbols); }); Ok(py_array) } fn symbol_image_with_offset<'py>( &self, py: Python<'py>, offset: usize, ) -> PyResult<&'py PyArray3<f32>> { let (h, w) = (self.map.len(), self.map[0].len()); let channels = usize::from(self.symbols); let py_array = PyArray3::zeros(py, [channels + offset, h, w], false); pyresult(symbol::construct_symbol_map( &self.map, h, w, self.symbols - 1, |idx| unsafe { py_array.uget_mut(idx) }, ))?; Ok(py_array) } fn copy_hist(&self, py_array: &PyArray3<f32>, offset: usize) { let mut array = py_array.as_array_mut(); let hist_array = array.index_axis_mut(Axis(0), usize::from(offset)); Zip::from(hist_array).and(&self.history).apply(|p, &r| { *p = if r { 1.0 } else { 0.0 }; }); } } impl<'p> PyObjectReprProtocol<'p> for PlayerState { type Success = String; type Result = PyResult<String>; } impl<'p> PyObjectStrProtocol<'p> for PlayerState { type Success = String; type Result = PyResult<String>; } impl<'p> PyObjectProtocol<'p> for PlayerState { fn __repr__(&'p self) -> <Self as PyObjectReprProtocol>::Result { let mut dungeon = self.dungeon_str().fold(String::new(), |mut res, s| { res.push_str(s); res.push('\n'); res }); dungeon.push_str(&format!("{}", self.status)); Ok(dungeon) } fn __str__(&'p self) -> <Self as PyObjectStrProtocol>::Result { self.__repr__() } } #[pymethods] impl PlayerState { #[getter] fn status(&self) -> PyResult<HashMap<String, u32>> { Ok(self .status .to_dict_vec() .into_iter() .map(|(s, v)| (s.to_owned(), v)) .collect()) } #[getter] fn dungeon(&self) -> PyResult<Vec<String>> { Ok(self.dungeon_str().map(|s| s.to_string()).collect()) } #[getter] fn dungeon_level(&self) -> PyResult<u32> { Ok(self.status.dungeon_level) } #[getter] fn gold(&self) -> PyResult<u32> { Ok(self.status.gold) } #[getter] fn symbols(&self) -> PyResult<usize> { Ok(usize::from(self.symbols)) } #[getter] fn is_terminal(&self) -> PyResult<bool> { Ok(self.is_terminal) } fn status_vec(&self, flag: u32) -> Vec<i32> { let flag = StatusFlagInner(flag); flag.to_vector(&self.status) } fn gray_image(&self, flag: Option<u32>) -> PyResult<&PyArray3<f32>> { let (py, flag) = ( unsafe { Python::assume_gil_acquired() }, StatusFlagInner::from(flag), ); let array = self.gray_image_with_offset(py, flag.len())?; flag.copy_status(&self.status, 1, &mut array.as_array_mut()); Ok(array) } fn gray_image_with_hist(&self, flag: Option<u32>) -> PyResult<&PyArray3<f32>> { let (py, flag) = ( unsafe { Python::assume_gil_acquired() }, StatusFlagInner::from(flag), ); let array = self.gray_image_with_offset(py, flag.len() + 1)?; let offset = flag.copy_status(&self.status, 1, &mut array.as_array_mut()); self.copy_hist(&array, offset); Ok(array) } fn symbol_image(&self, flag: Option<u32>) -> PyResult<&PyArray3<f32>> { let (py, flag) = ( unsafe { Python::assume_gil_acquired() }, StatusFlagInner::from(flag), ); let array = self.symbol_image_with_offset(py, flag.len())?; flag.copy_status( &self.status, usize::from(self.symbols), &mut array.as_array_mut(), ); Ok(array) } fn symbol_image_with_hist(&self, flag: Option<u32>) -> PyResult<&PyArray3<f32>> { let (py, flag) = ( unsafe { Python::assume_gil_acquired() }, StatusFlagInner::from(flag), ); let array = self.symbol_image_with_offset(py, flag.len() + 1)?; let offset = flag.copy_status( &self.status, usize::from(self.symbols), &mut array.as_array_mut(), ); self.copy_hist(&array, offset); Ok(array) } } #[pyclass] struct GameState { inner: GameStateImpl, config: GameConfig, } #[pymethods] impl GameState { #[new] fn __new__(obj: &PyRawObject, max_steps: usize, config_str: Option<String>) -> PyResult<()> { let config = if let Some(cfg) = config_str { pyresult_with(GameConfig::from_json(&cfg), "Failed to parse config")? } else { GameConfig::default() }; let inner = pyresult(GameStateImpl::new(config.clone(), max_steps))?; obj.init(GameState { inner, config }); Ok(()) } fn screen_size(&self) -> (i32, i32) { (self.config.height, self.config.width) } fn set_seed(&mut self, seed: u64) -> PyResult<()> { self.config.seed = Some(seed as u128); Ok(()) } fn reset(&mut self) -> PyResult<()> { pyresult(self.inner.reset(self.config.clone())) } fn prev(&self) -> PlayerState { self.inner.state() } fn react(&mut self, input: u8) -> PyResult<()> { pyresult(self.inner.react(input)) } fn dump_history(&self) -> PyResult<String> { pyresult_with( self.inner.runtime.saved_inputs_as_json(), "Error when getting history", ) } fn dump_config(&self) -> PyResult<String> { pyresult_with(self.config.to_json(), "Error when getting config") } fn symbols(&self) -> PyResult<usize> { Ok(self.inner.symbols()) } } #[pyclass] struct ParallelGameState { conductor: ThreadConductor, configs: Vec<GameConfig>, symbols: u8, } #[pymethods] impl ParallelGameState { #[new] fn __new__( obj: &PyRawObject, py: Python, max_steps: usize, configs: Vec<String>, ) -> PyResult<()> { let configs = { let mut res = vec![]; for cfg in configs { res.push(pyresult_with( GameConfig::from_json(&cfg), "Failed to parse config", )?); } res }; let symbols = configs[0] .symbol_max() .expect("Failed to get symbol max") .to_byte() + 1; let cloned = configs.clone(); let conductor = py.allow_threads(move || ThreadConductor::new(cloned, max_steps)); let conductor = pyresult(conductor)?; obj.init(ParallelGameState { conductor, configs, symbols, }); Ok(()) } fn screen_size(&self) -> (i32, i32) { (self.configs[0].height, self.configs[0].width) } fn symbols(&self) -> PyResult<usize> { Ok(usize::from(self.symbols)) } fn seed(&mut self, py: Python, seed: Vec<u128>) -> PyResult<()> { let ParallelGameState { ref mut conductor, .. } = self; let res = py.allow_threads(move || conductor.seed(seed)); pyresult(res) } fn states(&mut self, py: Python) -> PyResult<Vec<PlayerState>> { let ParallelGameState { ref mut conductor, .. } = self; let res = py.allow_threads(move || conductor.states()); pyresult(res) } fn step(&mut self, py: Python, input: Vec<u8>) -> PyResult<Vec<PlayerState>> { let ParallelGameState { ref mut conductor, .. } = self; let res = py.allow_threads(move || conductor.step(input)); pyresult(res) } fn reset(&mut self, py: Python) -> PyResult<Vec<PlayerState>> { let ParallelGameState { ref mut conductor, .. } = self; let res = py.allow_threads(move || conductor.reset()); pyresult(res) } fn close(&mut self, py: Python) -> PyResult<()> { let ParallelGameState { ref mut conductor, .. } = self; pyresult(py.allow_threads(move || conductor.close())) } } #[cfg(unix)] #[pyfunction] fn replay(game: &GameState, py: Python, interval_ms: u64) -> PyResult<()> { use rogue_gym_devui::show_replay; let inputs = game.inner.runtime.saved_inputs().to_vec(); let config = game.config.clone(); let res = py.allow_threads(move || show_replay(config, inputs, interval_ms)); pyresult(res) } #[cfg(unix)] #[pyfunction] fn play_cli(game: &GameState) -> PyResult<()> { use rogue_gym_devui::play_game; pyresult(play_game(game.config.clone(), false))?; Ok(()) } #[pymodule(_rogue_gym)] fn init_mod(_py: Python, m: &PyModule) -> PyResult<()> { m.add_class::<GameState>()?; m.add_class::<PlayerState>()?; m.add_class::<ParallelGameState>()?; #[cfg(unix)] m.add_wrapped(pyo3::wrap_pyfunction!(replay))?; #[cfg(unix)] m.add_wrapped(pyo3::wrap_pyfunction!(play_cli))?; Ok(()) }
#[macro_use] extern crate failure; extern crate ndarray; extern crate numpy; extern crate pyo3; extern crate rect_iter; exter
sage: MessageFlagInner, is_terminal: bool, } impl PlayerState { fn new(w: X, h: Y, symbols: u8) -> Self { let (w, h) = (w.0 as usize, h.0 as usize); PlayerState { map: vec![vec![b' '; w]; h], history: Array2::from_elem([h, w], false), status: Status::default(), symbols, message: MessageFlagInner::new(), is_terminal: false, } } fn reset(&mut self, runtime: &RunTime) -> GameResult<()> { self.status = runtime.player_status(); self.draw_map(runtime)?; self.message = MessageFlagInner::new(); self.is_terminal = false; Ok(()) } fn draw_map(&mut self, runtime: &RunTime) -> GameResult<()> { self.history = runtime.history(&self.status).unwrap(); runtime.draw_screen(|Positioned(cd, tile)| -> GameResult<()> { *self .map .try_get_mut_p(cd) .into_chained(|| "in python::GameState::react")? = tile.to_byte(); Ok(()) }) } fn dungeon_str(&self) -> impl Iterator<Item = &str> { self.map.iter().map(|v| unsafe { from_utf8_unchecked(v) }) } fn gray_image_with_offset<'py>( &self, py: Python<'py>, offset: usize, ) -> PyResult<&'py PyArray3<f32>> { let (h, w) = (self.map.len(), self.map[0].len()); let py_array = PyArray3::zeros(py, [1 + offset, h, w], false); RectRange::zero_start(w, h) .unwrap() .into_iter() .for_each(|(x, y)| unsafe { let symbol = symbol::tile_to_sym(*self.map.get_xy(x, y)).unwrap(); *py_array.uget_mut([0, y, x]) = f32::from(symbol) / f32::from(self.symbols); }); Ok(py_array) } fn symbol_image_with_offset<'py>( &self, py: Python<'py>, offset: usize, ) -> PyResult<&'py PyArray3<f32>> { let (h, w) = (self.map.len(), self.map[0].len()); let channels = usize::from(self.symbols); let py_array = PyArray3::zeros(py, [channels + offset, h, w], false); pyresult(symbol::construct_symbol_map( &self.map, h, w, self.symbols - 1, |idx| unsafe { py_array.uget_mut(idx) }, ))?; Ok(py_array) } fn copy_hist(&self, py_array: &PyArray3<f32>, offset: usize) { let mut array = py_array.as_array_mut(); let hist_array = array.index_axis_mut(Axis(0), usize::from(offset)); Zip::from(hist_array).and(&self.history).apply(|p, &r| { *p = if r { 1.0 } else { 0.0 }; }); } } impl<'p> PyObjectReprProtocol<'p> for PlayerState { type Success = String; type Result = PyResult<String>; } impl<'p> PyObjectStrProtocol<'p> for PlayerState { type Success = String; type Result = PyResult<String>; } impl<'p> PyObjectProtocol<'p> for PlayerState { fn __repr__(&'p self) -> <Self as PyObjectReprProtocol>::Result { let mut dungeon = self.dungeon_str().fold(String::new(), |mut res, s| { res.push_str(s); res.push('\n'); res }); dungeon.push_str(&format!("{}", self.status)); Ok(dungeon) } fn __str__(&'p self) -> <Self as PyObjectStrProtocol>::Result { self.__repr__() } } #[pymethods] impl PlayerState { #[getter] fn status(&self) -> PyResult<HashMap<String, u32>> { Ok(self .status .to_dict_vec() .into_iter() .map(|(s, v)| (s.to_owned(), v)) .collect()) } #[getter] fn dungeon(&self) -> PyResult<Vec<String>> { Ok(self.dungeon_str().map(|s| s.to_string()).collect()) } #[getter] fn dungeon_level(&self) -> PyResult<u32> { Ok(self.status.dungeon_level) } #[getter] fn gold(&self) -> PyResult<u32> { Ok(self.status.gold) } #[getter] fn symbols(&self) -> PyResult<usize> { Ok(usize::from(self.symbols)) } #[getter] fn is_terminal(&self) -> PyResult<bool> { Ok(self.is_terminal) } fn status_vec(&self, flag: u32) -> Vec<i32> { let flag = StatusFlagInner(flag); flag.to_vector(&self.status) } fn gray_image(&self, flag: Option<u32>) -> PyResult<&PyArray3<f32>> { let (py, flag) = ( unsafe { Python::assume_gil_acquired() }, StatusFlagInner::from(flag), ); let array = self.gray_image_with_offset(py, flag.len())?; flag.copy_status(&self.status, 1, &mut array.as_array_mut()); Ok(array) } fn gray_image_with_hist(&self, flag: Option<u32>) -> PyResult<&PyArray3<f32>> { let (py, flag) = ( unsafe { Python::assume_gil_acquired() }, StatusFlagInner::from(flag), ); let array = self.gray_image_with_offset(py, flag.len() + 1)?; let offset = flag.copy_status(&self.status, 1, &mut array.as_array_mut()); self.copy_hist(&array, offset); Ok(array) } fn symbol_image(&self, flag: Option<u32>) -> PyResult<&PyArray3<f32>> { let (py, flag) = ( unsafe { Python::assume_gil_acquired() }, StatusFlagInner::from(flag), ); let array = self.symbol_image_with_offset(py, flag.len())?; flag.copy_status( &self.status, usize::from(self.symbols), &mut array.as_array_mut(), ); Ok(array) } fn symbol_image_with_hist(&self, flag: Option<u32>) -> PyResult<&PyArray3<f32>> { let (py, flag) = ( unsafe { Python::assume_gil_acquired() }, StatusFlagInner::from(flag), ); let array = self.symbol_image_with_offset(py, flag.len() + 1)?; let offset = flag.copy_status( &self.status, usize::from(self.symbols), &mut array.as_array_mut(), ); self.copy_hist(&array, offset); Ok(array) } } #[pyclass] struct GameState { inner: GameStateImpl, config: GameConfig, } #[pymethods] impl GameState { #[new] fn __new__(obj: &PyRawObject, max_steps: usize, config_str: Option<String>) -> PyResult<()> { let config = if let Some(cfg) = config_str { pyresult_with(GameConfig::from_json(&cfg), "Failed to parse config")? } else { GameConfig::default() }; let inner = pyresult(GameStateImpl::new(config.clone(), max_steps))?; obj.init(GameState { inner, config }); Ok(()) } fn screen_size(&self) -> (i32, i32) { (self.config.height, self.config.width) } fn set_seed(&mut self, seed: u64) -> PyResult<()> { self.config.seed = Some(seed as u128); Ok(()) } fn reset(&mut self) -> PyResult<()> { pyresult(self.inner.reset(self.config.clone())) } fn prev(&self) -> PlayerState { self.inner.state() } fn react(&mut self, input: u8) -> PyResult<()> { pyresult(self.inner.react(input)) } fn dump_history(&self) -> PyResult<String> { pyresult_with( self.inner.runtime.saved_inputs_as_json(), "Error when getting history", ) } fn dump_config(&self) -> PyResult<String> { pyresult_with(self.config.to_json(), "Error when getting config") } fn symbols(&self) -> PyResult<usize> { Ok(self.inner.symbols()) } } #[pyclass] struct ParallelGameState { conductor: ThreadConductor, configs: Vec<GameConfig>, symbols: u8, } #[pymethods] impl ParallelGameState { #[new] fn __new__( obj: &PyRawObject, py: Python, max_steps: usize, configs: Vec<String>, ) -> PyResult<()> { let configs = { let mut res = vec![]; for cfg in configs { res.push(pyresult_with( GameConfig::from_json(&cfg), "Failed to parse config", )?); } res }; let symbols = configs[0] .symbol_max() .expect("Failed to get symbol max") .to_byte() + 1; let cloned = configs.clone(); let conductor = py.allow_threads(move || ThreadConductor::new(cloned, max_steps)); let conductor = pyresult(conductor)?; obj.init(ParallelGameState { conductor, configs, symbols, }); Ok(()) } fn screen_size(&self) -> (i32, i32) { (self.configs[0].height, self.configs[0].width) } fn symbols(&self) -> PyResult<usize> { Ok(usize::from(self.symbols)) } fn seed(&mut self, py: Python, seed: Vec<u128>) -> PyResult<()> { let ParallelGameState { ref mut conductor, .. } = self; let res = py.allow_threads(move || conductor.seed(seed)); pyresult(res) } fn states(&mut self, py: Python) -> PyResult<Vec<PlayerState>> { let ParallelGameState { ref mut conductor, .. } = self; let res = py.allow_threads(move || conductor.states()); pyresult(res) } fn step(&mut self, py: Python, input: Vec<u8>) -> PyResult<Vec<PlayerState>> { let ParallelGameState { ref mut conductor, .. } = self; let res = py.allow_threads(move || conductor.step(input)); pyresult(res) } fn reset(&mut self, py: Python) -> PyResult<Vec<PlayerState>> { let ParallelGameState { ref mut conductor, .. } = self; let res = py.allow_threads(move || conductor.reset()); pyresult(res) } fn close(&mut self, py: Python) -> PyResult<()> { let ParallelGameState { ref mut conductor, .. } = self; pyresult(py.allow_threads(move || conductor.close())) } } #[cfg(unix)] #[pyfunction] fn replay(game: &GameState, py: Python, interval_ms: u64) -> PyResult<()> { use rogue_gym_devui::show_replay; let inputs = game.inner.runtime.saved_inputs().to_vec(); let config = game.config.clone(); let res = py.allow_threads(move || show_replay(config, inputs, interval_ms)); pyresult(res) } #[cfg(unix)] #[pyfunction] fn play_cli(game: &GameState) -> PyResult<()> { use rogue_gym_devui::play_game; pyresult(play_game(game.config.clone(), false))?; Ok(()) } #[pymodule(_rogue_gym)] fn init_mod(_py: Python, m: &PyModule) -> PyResult<()> { m.add_class::<GameState>()?; m.add_class::<PlayerState>()?; m.add_class::<ParallelGameState>()?; #[cfg(unix)] m.add_wrapped(pyo3::wrap_pyfunction!(replay))?; #[cfg(unix)] m.add_wrapped(pyo3::wrap_pyfunction!(play_cli))?; Ok(()) }
n crate rogue_gym_core; #[cfg(unix)] extern crate rogue_gym_devui; mod fearures; mod state_impls; mod thread_impls; use fearures::{MessageFlagInner, StatusFlagInner}; use ndarray::{Array2, Axis, Zip}; use numpy::PyArray3; use pyo3::{ basic::{PyObjectProtocol, PyObjectReprProtocol, PyObjectStrProtocol}, exceptions::RuntimeError, prelude::*, }; use rect_iter::{Get2D, GetMut2D, RectRange}; use rogue_gym_core::character::player::Status; use rogue_gym_core::dungeon::{Positioned, X, Y}; use rogue_gym_core::{error::*, symbol, GameConfig, RunTime}; use state_impls::GameStateImpl; use std::collections::HashMap; use std::fmt::Display; use std::str::from_utf8_unchecked; use thread_impls::ThreadConductor; fn pyresult<T, E: Display>(result: Result<T, E>) -> PyResult<T> { pyresult_with(result, "Error in rogue-gym") } fn pyresult_with<T, E: Display>(result: Result<T, E>, msg: &str) -> PyResult<T> { result.map_err(|e| PyErr::new::<RuntimeError, _>(format!("{}: {}", msg, e))) } #[pyclass] #[derive(Clone, Debug, PartialEq)] struct PlayerState { map: Vec<Vec<u8>>, history: Array2<bool>, status: Status, symbols: u8, mes
random
[ { "content": "#![cfg_attr(test, feature(test))]\n\n#[macro_use]\n\nextern crate bitflags;\n\n#[macro_use]\n\nextern crate derive_more;\n\n#[macro_use]\n\nextern crate enum_iterator;\n\n#[macro_use]\n\nextern crate failure;\n\nextern crate fixedbitset;\n\nextern crate ndarray;\n\nextern crate num_traits;\n\n#[macro_use]\n\nextern crate log;\n\nextern crate rand;\n\nextern crate rand_xorshift;\n\nextern crate rect_iter;\n\nextern crate regex;\n\n#[macro_use]\n\nextern crate serde;\n", "file_path": "core/src/lib.rs", "rank": 1, "score": 15.898894763226801 }, { "content": "extern crate chrono;\n\nextern crate clap;\n\n#[macro_use]\n\nextern crate failure;\n\nextern crate fern;\n\n#[macro_use]\n\nextern crate log;\n\nextern crate rogue_gym_core;\n\nextern crate rogue_gym_uilib;\n\nextern crate termion;\n\nextern crate tuple_map;\n\n\n\npub mod error;\n\npub mod screen;\n\nuse error::*;\n\nuse rogue_gym_core::input::InputCode;\n\nuse rogue_gym_core::{GameConfig, RunTime};\n\nuse rogue_gym_uilib::{process_reaction, Screen, Transition};\n\nuse screen::{RawTerm, TermScreen};\n\nuse std::io;\n\nuse std::sync::mpsc;\n\nuse std::thread;\n\nuse std::time::Duration;\n\nuse termion::event::Key;\n\nuse termion::input::TermRead;\n\n\n", "file_path": "devui/src/lib.rs", "rank": 2, "score": 11.707334038556201 }, { "content": "extern crate chrono;\n\nextern crate clap;\n\nextern crate fern;\n\nextern crate log;\n\nextern crate rogue_gym_core;\n\nextern crate rogue_gym_devui;\n\nextern crate termion;\n\nextern crate tuple_map;\n\n\n\nuse std::fs::{File, OpenOptions};\n\nuse std::io::prelude::*;\n\n\n\nuse clap::ArgMatches;\n\nuse rogue_gym_core::{json_to_inputs, read_file, GameConfig};\n\nuse rogue_gym_devui::error::*;\n\nuse rogue_gym_devui::{play_game, show_replay};\n\n\n\nconst DEFAULT_INTERVAL_MS: u64 = 500;\n\n\n", "file_path": "devui/src/main.rs", "rank": 3, "score": 10.033597415295834 }, { "content": "extern crate serde_json;\n\nextern crate smallvec;\n\n#[cfg(unix)]\n\nextern crate termion;\n\n#[cfg(feature = \"bench\")]\n\nextern crate test;\n\nextern crate tuple_map;\n\n\n\nmod actions;\n\nuse std::fs::File;\n\nuse std::io::{self, Read};\n\npub mod character;\n\npub mod dungeon;\n\npub mod error;\n\nmod fenwick;\n\npub mod input;\n\npub mod item;\n\nmod rng;\n\nmod smallstr;\n\npub mod symbol;\n", "file_path": "core/src/lib.rs", "rank": 4, "score": 9.486697166988257 }, { "content": "#[macro_use]\n\nextern crate failure;\n\n\n\nmod draw;\n\nmod font;\n\nmod term_image;\n\nmod theme;\n\nuse self::draw::GifEncoder;\n\nuse clap::{self, ArgMatches};\n\nuse rogue_gym_core::{error::*, input::InputCode, json_to_inputs, read_file, GameConfig};\n\nconst UBUNTU_MONO: &[u8; 205748] = include_bytes!(\"../../data/fonts/UbuntuMono-R.ttf\");\n\nuse self::font::FontHandle;\n\nuse self::theme::Theme;\n\n\n", "file_path": "act2gif/src/main.rs", "rank": 5, "score": 7.784145151402287 }, { "content": "use crate::input::{InputCode, Key};\n\nuse failure::{self, Error, Fail};\n\nuse rect_iter::IndexError;\n\nuse serde_json::Error as JsonError;\n\nuse std::error::Error as StdError;\n\nuse std::fmt;\n\n\n\npub type GameResult<T> = Result<T, Error>;\n\n\n", "file_path": "core/src/error.rs", "rank": 6, "score": 6.019127580824188 }, { "content": "//! module for handling actions and do some operations related to multiple modules\n\nuse crate::character::{\n\n fight, player::PlayerEvent, Action, DamageReaction, Enemy, EnemyHandler, Player,\n\n};\n\nuse crate::dungeon::{Direction, Dungeon, DungeonPath};\n\nuse crate::error::*;\n\nuse crate::item::{itembox::Entry as ItemEntry, ItemHandler, ItemToken};\n\nuse crate::ui::UiState;\n\nuse crate::{GameInfo, GameMsg, Reaction};\n\nuse std::iter;\n\nuse std::rc::Rc;\n\n\n\npub(crate) fn process_action(\n\n action: Action,\n\n info: &mut GameInfo,\n\n dungeon: &mut dyn Dungeon,\n\n item: &mut ItemHandler,\n\n player: &mut Player,\n\n enemies: &mut EnemyHandler,\n\n) -> GameResult<(Option<UiState>, Vec<Reaction>)> {\n", "file_path": "core/src/actions.rs", "rank": 7, "score": 4.650219826819137 }, { "content": "use ndarray::{ArrayViewMut, Axis, Ix3};\n\nuse rogue_gym_core::character::player::Status;\n\nuse rogue_gym_core::GameMsg;\n\n\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\npub(crate) struct MessageFlagInner(pub u32);\n\n\n\n#[rustfmt::skip]\n\nimpl MessageFlagInner {\n\n const HIT_FROM: u32 = 0b000_000_001;\n\n const HIT_TO: u32 = 0b000_000_010;\n\n const MISS_TO: u32 = 0b000_000_100;\n\n const MISS_FROM: u32 = 0b000_001_000;\n\n const KILLED: u32 = 0b000_010_000;\n\n const SECRET_DOOR: u32 = 0b000_100_000;\n\n const NO_DOWNSTAIR: u32 = 0b001_000_000;\n\n}\n\n\n\nimpl MessageFlagInner {\n\n pub fn new() -> Self {\n", "file_path": "python/src/fearures.rs", "rank": 8, "score": 4.609263946658296 }, { "content": " self.steps = 0;\n\n Ok(())\n\n }\n\n pub(crate) fn state(&self) -> PlayerState {\n\n self.state.clone()\n\n }\n\n pub(crate) fn symbols(&self) -> usize {\n\n usize::from(self.state.symbols)\n\n }\n\n pub(crate) fn react(&mut self, input: u8) -> GameResult<()> {\n\n if self.steps > self.max_steps {\n\n return Ok(());\n\n }\n\n let res = self.runtime.react_to_key(Key::Char(input as char))?;\n\n self.state.message.reset();\n\n let mut dead = false;\n\n for reaction in res {\n\n match reaction {\n\n Reaction::Redraw => {\n\n self.state.draw_map(&self.runtime)?;\n", "file_path": "python/src/state_impls.rs", "rank": 9, "score": 4.1949236349545425 }, { "content": "use super::{clamp, DamageReaction, Defense, Dice, Exp, HitPoint, Level, Maxed, Strength};\n\nuse crate::dungeon::{Direction, DungeonPath};\n\nuse crate::error::GameResult;\n\nuse crate::item::{\n\n armor, food::Food, itembox::ItemBox, weapon, InitItem, Item, ItemHandler, ItemKind, ItemToken,\n\n};\n\nuse crate::rng::RngHandle;\n\nuse crate::tile::{Drawable, Tile};\n\nuse smallstr::SmallStr;\n\nuse std::{cmp, fmt};\n\nuse tuple_map::TupleMap2;\n\n\n\n/// Player configuration\n\n#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]\n\npub struct Config {\n\n #[serde(default, flatten)]\n\n pub level: Leveling,\n\n #[serde(default = \"default_hunger_time\")]\n\n pub hunger_time: u32,\n\n #[serde(default = \"default_init_hp\")]\n", "file_path": "core/src/character/player.rs", "rank": 10, "score": 4.184789431627651 }, { "content": "use rogue_gym_core::{\n\n error::GameResult,\n\n input::{Key, KeyMap},\n\n ui::{MordalKind, UiState},\n\n GameConfig, Reaction, RunTime,\n\n};\n\nuse PlayerState;\n\n\n\npub(crate) struct GameStateImpl {\n\n pub(crate) runtime: RunTime,\n\n state: PlayerState,\n\n steps: usize,\n\n max_steps: usize,\n\n}\n\n\n\nunsafe impl Send for GameStateImpl {}\n\n\n\nimpl GameStateImpl {\n\n pub(crate) fn new(config: GameConfig, max_steps: usize) -> GameResult<Self> {\n\n let symbols = config\n", "file_path": "python/src/state_impls.rs", "rank": 11, "score": 3.948243568655669 }, { "content": "use crate::font::FontHandle;\n\nuse crate::term_image::TermImage;\n\nuse crate::theme::Theme;\n\nuse image::gif::{DisposalMethod, Encoder};\n\nuse rogue_gym_core::{error::*, input::InputCode, GameConfig, Reaction};\n\nuse rogue_gym_uilib::process_reaction;\n\nuse std::fs::File;\n\nuse std::io::BufWriter;\n\n\n\npub struct GifEncoder<'a> {\n\n config: GameConfig,\n\n font: FontHandle<'a>,\n\n scale: u32,\n\n theme: Theme,\n\n interval: u32,\n\n}\n\n\n\nimpl<'a> GifEncoder<'a> {\n\n pub fn new(\n\n config: GameConfig,\n", "file_path": "act2gif/src/draw.rs", "rank": 12, "score": 3.883004190606571 }, { "content": " ItemKind::Armor(a) => name == a.name(),\n\n _ => false,\n\n })\n\n }\n\n Ok(())\n\n }\n\n pub fn strength(&self) -> Maxed<Strength> {\n\n self.status.strength\n\n }\n\n pub fn level(&self) -> Level {\n\n self.status.level\n\n }\n\n pub(crate) fn buttle(&mut self) {\n\n self.status.quiet = 0\n\n }\n\n pub(crate) fn turn_passed(&mut self, rng: &mut RngHandle) -> Vec<PlayerEvent> {\n\n let mut res = vec![];\n\n self.status.food_left -= 1;\n\n if self.status.food_left == 0 {\n\n return vec![PlayerEvent::Dead];\n", "file_path": "core/src/character/player.rs", "rank": 13, "score": 3.7095141943945813 }, { "content": " dam_plus: 0.into(),\n\n worth,\n\n launcher,\n\n };\n\n (weapon, attr, num.into())\n\n }\n\n fn name(&self) -> &str {\n\n self.name.as_ref()\n\n }\n\n fn worth(&self) -> crate::item::ItemNum {\n\n self.worth\n\n }\n\n}\n\n\n\npub(crate) fn rogue_init_weapons(vec: &mut Vec<InitItem>) {\n\n ((0, 0, 1, 1), (2, 0, 1, 0), (3, 25, 0, 0)).for_each(|(idx, num_plus, hit_plus, dam_plus)| {\n\n vec.push(InitItem::Weapon {\n\n name: BUILTIN_WEAPONS[idx].name.clone(),\n\n num_plus,\n\n hit_plus,\n", "file_path": "core/src/item/weapon.rs", "rank": 14, "score": 3.7095141943945813 }, { "content": " let enem = self.placed_enemies.remove(&place)?;\n\n enem.run();\n\n self.active_enemies.insert(place, enem);\n\n Some(())\n\n }\n\n pub(crate) fn remove_enemies(&mut self) {\n\n self.active_enemies = BTreeMap::new();\n\n self.placed_enemies = BTreeMap::new();\n\n }\n\n pub(crate) fn move_actives(\n\n &mut self,\n\n player_pos: &DungeonPath,\n\n gold_pos: Option<&DungeonPath>,\n\n dungeon: &mut dyn Dungeon,\n\n ) -> Vec<Attack> {\n\n let mut out = Vec::new();\n\n debug!(\n\n \"[EnemyHandler::move_actives] before: {:?}\",\n\n self.active_enemies\n\n );\n", "file_path": "core/src/character/enemies.rs", "rank": 15, "score": 3.502200799279783 }, { "content": "use super::handler::{Handler, ItemInner, ItemStat};\n\nuse super::{InitItem, Item, ItemAttr, ItemKind, ItemNum};\n\nuse crate::character::{Dice, HitPoint, Level};\n\nuse crate::rng::{Parcent, RngHandle};\n\nuse crate::SmallStr;\n\nuse std::fmt;\n\nuse std::ops::Range;\n\nuse tuple_map::TupleMap3;\n\n\n\n/// Weapon configuration\n\n#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]\n\npub struct Config {\n\n #[serde(default = \"default_weapons\")]\n\n pub weapons: Vec<Preset>,\n\n #[serde(default = \"default_cursed_rate\")]\n\n #[serde(skip_serializing_if = \"is_default_cursed_rate\")]\n\n pub cursed_rate: Parcent,\n\n #[serde(default = \"default_powerup_rate\")]\n\n #[serde(skip_serializing_if = \"is_default_powerup_rate\")]\n\n pub powerup_rate: Parcent,\n", "file_path": "core/src/item/weapon.rs", "rank": 16, "score": 3.480437568621258 }, { "content": " }\n\n if self.notify_hungry() {\n\n res.push(PlayerEvent::Hungry);\n\n }\n\n if self.heal(rng) {\n\n res.push(PlayerEvent::Healed);\n\n }\n\n res\n\n }\n\n pub(crate) fn get_damage(&mut self, damage: HitPoint) -> DamageReaction {\n\n self.status.hp.current = cmp::max(self.status.hp.current - damage, HitPoint(0));\n\n if self.status.hp.current == HitPoint(0) {\n\n DamageReaction::Death\n\n } else {\n\n DamageReaction::None\n\n }\n\n }\n\n pub(crate) fn level_up(&mut self, exp: Exp, rng: &mut RngHandle) -> bool {\n\n self.status.exp += exp;\n\n let diff = self\n", "file_path": "core/src/character/player.rs", "rank": 17, "score": 3.4444508543940455 }, { "content": " DamageReaction::None\n\n }\n\n }\n\n fn run(&self) {\n\n self.running.replace(true);\n\n }\n\n}\n\n\n\nimpl Drawable for Enemy {\n\n fn tile(&self) -> Tile {\n\n self.tile\n\n }\n\n}\n\n\n\npub(crate) struct Attack(Rc<Enemy>);\n\n\n\nimpl Attack {\n\n pub fn enemy(&self) -> &Enemy {\n\n self.0.as_ref()\n\n }\n", "file_path": "core/src/character/enemies.rs", "rank": 18, "score": 3.4136157483226337 }, { "content": "use super::{Damage, Defense, Dice, Enemy, HitPoint, Level, Player, Strength};\n\nuse crate::rng::{Parcent, RngHandle};\n\nuse item::ItemToken;\n\nuse std::iter;\n\n\n", "file_path": "core/src/character/fight.rs", "rank": 19, "score": 3.4136157483226337 }, { "content": " res.push(Reaction::UiTransition(mordal.clone()));\n\n return Ok(Some(mordal));\n\n }\n\n DamageReaction::None => {}\n\n }\n\n }\n\n None => {\n\n res.push(Reaction::Notify(GameMsg::MissFrom(\n\n at.enemy().name().to_owned(),\n\n )));\n\n }\n\n }\n\n }\n\n if did_hit {\n\n res.push(Reaction::StatusUpdated);\n\n }\n\n Ok(None)\n\n}\n\n\n\npub(crate) fn new_level(\n", "file_path": "core/src/actions.rs", "rank": 20, "score": 3.048303072370465 }, { "content": " rng.range(1..level - 6)\n\n } else {\n\n 0\n\n };\n\n if heal > 0 {\n\n self.status.hp.current += HitPoint(heal);\n\n self.status.hp.verify();\n\n self.status.quiet = 0;\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n fn notify_hungry(&mut self) -> bool {\n\n let hunger = self.config.hunger_time / 10;\n\n self.status.food_left == hunger || self.status.food_left == hunger * 2\n\n }\n\n}\n\n\n\npub(crate) enum PlayerEvent {\n", "file_path": "core/src/character/player.rs", "rank": 21, "score": 2.990122708858669 }, { "content": " };\n\n (armor, ItemAttr::empty(), 1.into())\n\n }\n\n}\n\n\n\nimpl ItemStat for ArmorStatus {\n\n type Item = Armor;\n\n fn appear_rate(&self) -> Parcent {\n\n self.appear_rate\n\n }\n\n fn build(self, _rng: &mut RngHandle) -> (Armor, ItemAttr, ItemNum) {\n\n self.build_inner()\n\n }\n\n fn name(&self) -> &str {\n\n self.name.as_ref()\n\n }\n\n fn worth(&self) -> crate::item::ItemNum {\n\n self.worth\n\n }\n\n}\n", "file_path": "core/src/item/armor.rs", "rank": 22, "score": 2.9618574837508596 }, { "content": " }\n\n pub fn from_path(p: &DungeonPath) -> Self {\n\n Address {\n\n level: p[0] as u32,\n\n cd: Coord::new(p[1], p[2]),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::{Address, Coord, Direction, DungeonPath, MoveResult, TupleMap2};\n\n use crate::{GameConfig, RunTime};\n\n // tiny dungeon setting\n\n const CONFIG: &str = r#\"\n\n{\n\n \"width\": 32,\n\n \"height\": 16,\n\n \"seed\": 5,\n\n \"dungeon\": {\n", "file_path": "core/src/dungeon/rogue/mod.rs", "rank": 23, "score": 2.8801796392791568 }, { "content": " self.active_enemies.insert(next, enemy);\n\n }\n\n debug!(\n\n \"[EnemyHandler::move_actives] after: {:?}\",\n\n self.active_enemies\n\n );\n\n out\n\n }\n\n pub(crate) fn rng(&mut self) -> &mut RngHandle {\n\n &mut self.rng\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Eq, PartialEq)]\n\npub struct StaticStatus {\n\n attack: &'static [Dice<HitPoint>],\n\n attr: EnemyAttr,\n\n defense: Defense,\n\n exp: Exp,\n\n gold: ItemNum,\n", "file_path": "core/src/character/enemies.rs", "rank": 24, "score": 2.778034777487774 }, { "content": " self.placed_enemies\n\n .get(&path)\n\n .or_else(|| self.active_enemies.get(&path))\n\n .map(Rc::clone)\n\n }\n\n pub fn activate_area<'a, F>(&mut self, is_in_activation_area: F)\n\n where\n\n F: Fn(&DungeonPath) -> bool,\n\n {\n\n let removes: Vec<_> = self\n\n .placed_enemies\n\n .iter()\n\n .filter(|(p, e)| is_in_activation_area(p) && e.is_mean())\n\n .map(|(p, _)| p.to_owned())\n\n .collect();\n\n for path in removes {\n\n self.activate(path);\n\n }\n\n }\n\n pub(crate) fn activate(&mut self, place: DungeonPath) -> Option<()> {\n", "file_path": "core/src/character/enemies.rs", "rank": 25, "score": 2.6828868556917773 }, { "content": "use fenwick::FenwickSet;\n\nuse num_traits::PrimInt;\n\nuse rand::{\n\n distributions::uniform::SampleUniform, thread_rng, Error as RndError, RngCore, SeedableRng,\n\n};\n\npub(crate) use rand::{seq::SliceRandom, Rng};\n\nuse rand_xorshift::XorShiftRng;\n\nuse std::cmp;\n\nuse std::mem;\n\nuse std::ops::{Bound, Range, RangeBounds};\n\n\n", "file_path": "core/src/rng.rs", "rank": 26, "score": 2.637715862046227 }, { "content": "}\n\n\n\nimpl Enemy {\n\n pub(crate) const STRENGTH: Strength = Strength(10);\n\n pub fn is_mean(&self) -> bool {\n\n self.attr.get().contains(EnemyAttr::MEAN)\n\n }\n\n pub fn is_greedy(&self) -> bool {\n\n self.attr.get().contains(EnemyAttr::GREEDY)\n\n }\n\n pub fn is_random(&self) -> bool {\n\n self.attr.get().contains(EnemyAttr::RANDOM)\n\n }\n\n pub fn is_confused(&self) -> bool {\n\n self.attr.get().contains(EnemyAttr::CONFUSED)\n\n }\n\n pub fn is_running(&self) -> bool {\n\n self.running.get()\n\n }\n\n pub fn level(&self) -> Level {\n", "file_path": "core/src/character/enemies.rs", "rank": 27, "score": 2.637715862046227 }, { "content": "use input::System;\n\n\n\n/// A representation of Ui transition\n\n#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]\n\npub enum UiState {\n\n Dungeon,\n\n Mordal(MordalKind),\n\n}\n\n\n\nimpl UiState {\n\n pub(crate) fn die(message: String) -> Self {\n\n UiState::Mordal(MordalKind::Grave(message.into_boxed_str()))\n\n }\n\n}\n\n\n\n/// mordals\n\n#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]\n\npub enum MordalKind {\n\n Grave(Box<str>),\n\n Inventory,\n", "file_path": "core/src/ui.rs", "rank": 28, "score": 2.594040744793051 }, { "content": "pub enum DamageReaction {\n\n Death,\n\n None,\n\n}\n\n\n\npub(crate) fn clamp<T: Ord>(value: T, min: T, max: T) -> T {\n\n use std::cmp;\n\n cmp::max(cmp::min(value, max), min)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n #[test]\n\n fn test_dice() {\n\n let dice = Dice::new(2, HitPoint(4));\n\n let mut rng = RngHandle::new();\n\n for _ in 0..100 {\n\n let hp = dice.random(&mut rng);\n\n assert!(2 <= hp.0 && hp.0 <= 8);\n\n }\n\n }\n\n}\n", "file_path": "core/src/character/mod.rs", "rank": 29, "score": 2.572741109898014 }, { "content": "#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]\n\npub enum InitItem {\n\n Noinit(Item),\n\n Armor {\n\n name: SmallStr,\n\n def_plus: i32,\n\n },\n\n Weapon {\n\n name: SmallStr,\n\n num_plus: u32,\n\n hit_plus: i32,\n\n dam_plus: i32,\n\n },\n\n}\n\n\n\nimpl InitItem {\n\n pub(crate) fn initialize(self, handle: &mut ItemHandler) -> GameResult<ItemToken> {\n\n match self {\n\n InitItem::Noinit(item) => Ok(item),\n\n InitItem::Weapon {\n", "file_path": "core/src/item/mod.rs", "rank": 30, "score": 2.531174232343861 }, { "content": " font: FontHandle<'a>,\n\n scale: u32,\n\n theme: Theme,\n\n interval: u32,\n\n ) -> Self {\n\n GifEncoder {\n\n config,\n\n font,\n\n scale,\n\n theme,\n\n interval,\n\n }\n\n }\n\n pub fn exec(&mut self, inputs: Vec<InputCode>, filename: &str) -> GameResult<()> {\n\n let mut runtime = self.config.clone().build()?;\n\n let file = File::create(filename).into_chained(|| \"Failed to crate file\")?;\n\n let writer = BufWriter::new(file);\n\n let mut encoder = Encoder::new(writer);\n\n let mut term = TermImage::new(\n\n self.config.width.into(),\n", "file_path": "act2gif/src/draw.rs", "rank": 31, "score": 2.4909291617978293 }, { "content": " .symbol_max()\n\n .expect(\"Failed to get symbol max\")\n\n .to_byte()\n\n + 1;\n\n let mut runtime = config.build()?;\n\n runtime.keymap = KeyMap::ai();\n\n let (w, h) = runtime.screen_size();\n\n let mut state = PlayerState::new(w, h, symbols);\n\n state.reset(&mut runtime)?;\n\n Ok(GameStateImpl {\n\n runtime,\n\n state,\n\n steps: 0,\n\n max_steps,\n\n })\n\n }\n\n pub(crate) fn reset(&mut self, config: GameConfig) -> GameResult<()> {\n\n self.runtime = config.build()?;\n\n self.runtime.keymap = KeyMap::ai();\n\n self.state.reset(&mut self.runtime)?;\n", "file_path": "python/src/state_impls.rs", "rank": 32, "score": 2.451943835783881 }, { "content": "#[derive(Copy, Clone, Debug)]\n\npub(crate) struct StatusFlagInner(pub u32);\n\n\n\n#[rustfmt::skip]\n\nimpl StatusFlagInner {\n\n const DUNGEON_LEVEL: u32 = 0b000_000_001;\n\n const HP_CURRENT: u32 = 0b000_000_010;\n\n const HP_MAX: u32 = 0b000_000_100;\n\n const STR_CURRENT: u32 = 0b000_001_000;\n\n const STR_MAX: u32 = 0b000_010_000;\n\n const DEFENSE: u32 = 0b000_100_000;\n\n const PLAYER_LEVEL: u32 = 0b001_000_000;\n\n const EXP: u32 = 0b010_000_000;\n\n const HUNGER: u32 = 0b100_000_000;\n\n}\n\n\n\nimpl From<Option<u32>> for StatusFlagInner {\n\n fn from(u: Option<u32>) -> Self {\n\n StatusFlagInner(u.unwrap_or(0))\n\n }\n", "file_path": "python/src/fearures.rs", "rank": 33, "score": 2.3957014483477135 }, { "content": "use rogue_gym_core::{\n\n error::{GameResult, ResultExt2},\n\n GameConfig,\n\n};\n\nuse state_impls::GameStateImpl;\n\nuse std::sync::mpsc::{self, Receiver, SyncSender};\n\nuse std::thread;\n\nuse PlayerState;\n\n\n\npub(crate) struct ThreadConductor {\n\n receivers: Vec<Receiver<GameResult<PlayerState>>>,\n\n senders: Vec<SyncSender<Instruction>>,\n\n}\n\n\n\nimpl ThreadConductor {\n\n const SENDER_BOUND: usize = 4;\n\n pub fn new(configs: Vec<GameConfig>, max_steps: usize) -> GameResult<Self> {\n\n let mut receivers = vec![];\n\n let mut senders = vec![];\n\n for config in configs {\n", "file_path": "python/src/thread_impls.rs", "rank": 34, "score": 2.241458543606019 }, { "content": "use error::*;\n\nuse rogue_gym_core::dungeon::{Coord, X, Y};\n\nuse rogue_gym_uilib::Screen;\n\nuse std::collections::VecDeque;\n\nuse std::io::{self, Stdout, Write};\n\nuse termion::raw::{IntoRawMode, RawTerminal};\n\nuse termion::{clear, cursor, terminal_size};\n\nuse tuple_map::TupleMap2;\n\n\n\npub type RawTerm = RawTerminal<Stdout>;\n\n\n\n/// wrapper of stdout as rogue screen\n\npub struct TermScreen<T> {\n\n /// stdout\n\n term: T,\n\n has_notification: bool,\n\n width: u16,\n\n height: u16,\n\n pub(crate) pending_messages: VecDeque<String>,\n\n}\n", "file_path": "devui/src/screen.rs", "rank": 35, "score": 2.2255377411369883 }, { "content": "//! module for making and managing dangeon\n\nmod coord;\n\nmod field;\n\nmod rogue;\n\npub use self::coord::{Coord, Direction, Positioned, X, Y};\n\npub use self::field::{Cell, CellAttr, Field};\n\nuse character::player::Status as PlayerStatus;\n\nuse character::EnemyHandler;\n\nuse error::*;\n\nuse item::{ItemHandler, ItemToken};\n\nuse ndarray::Array2;\n\nuse smallvec::SmallVec;\n\nuse tile::Tile;\n\nuse {GameInfo, GameMsg, GlobalConfig};\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]\n\n#[serde(tag = \"style\")]\n\n#[serde(rename_all = \"lowercase\")]\n\npub enum DungeonStyle {\n\n /// rogue 5.4.4 like dungeon\n", "file_path": "core/src/dungeon/mod.rs", "rank": 36, "score": 2.1606875066099085 }, { "content": "//! rogue floor\n\nuse super::{passages, rooms, Address, Config, Room, Surface};\n\nuse dungeon::{Cell, CellAttr, Coord, Direction, Field, Positioned, X, Y};\n\nuse enemies::EnemyHandler;\n\nuse enum_iterator::IntoEnumIterator;\n\nuse error::*;\n\nuse fenwick::FenwickSet;\n\nuse item::{ItemHandler, ItemToken};\n\nuse ndarray::Array2;\n\nuse rect_iter::{Get2D, GetMut2D};\n\nuse rng::RngHandle;\n\nuse std::collections::{HashMap, HashSet, VecDeque};\n\nuse GameMsg;\n\n\n\n/// representation of 'floor'\n\n#[derive(Clone, Debug, Default)]\n\npub struct Floor {\n\n /// rooms\n\n pub rooms: Vec<Room>,\n\n /// Coordinates of doors\n", "file_path": "core/src/dungeon/rogue/floor.rs", "rank": 37, "score": 2.1033702940584766 }, { "content": "use crate::font::{DrawInst, FontHandle};\n\nuse image::{gif::Frame, Pixel, Rgb, Rgba, RgbaImage};\n\nuse rect_iter::GetMut2D;\n\nuse rect_iter::RectRange;\n\nuse rogue_gym_core::dungeon::{Coord, X, Y};\n\nuse rogue_gym_core::error::{GameResult, ResultExt1};\n\nuse rogue_gym_uilib::Screen;\n\nuse rusttype::point;\n\nuse tuple_map::TupleMap2;\n\n\n\n#[derive(Debug, Fail)]\n\n#[fail(display = \"EncodeError\")]\n\npub struct EncodeError;\n\n\n\npub struct TermImage<'a: 'b, 'b> {\n\n buffer: RgbaImage,\n\n background: Rgba<u8>,\n\n fontcolor: Rgb<u8>,\n\n fontsize: u32,\n\n size: Coord,\n", "file_path": "act2gif/src/term_image.rs", "rank": 38, "score": 2.0779439551779473 }, { "content": "pub mod tile;\n\npub mod ui;\n\n\n\nuse character::{enemies, player, EnemyHandler, Player};\n\nuse dungeon::{Direction, Dungeon, DungeonStyle, Positioned, X, Y};\n\nuse error::*;\n\nuse input::{InputCode, Key, KeyMap};\n\nuse item::{ItemHandler, ItemKind};\n\nuse ndarray::Array2;\n\npub use smallstr::SmallStr;\n\nuse tile::{Drawable, Tile};\n\nuse ui::{MordalKind, MordalMsg, UiState};\n\n\n\n/// Game configuration\n\n/// it's inteded to construct from json\n\n#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]\n\npub struct GameConfig {\n\n /// screen width\n\n #[serde(default = \"default_screen_width\")]\n\n #[serde(skip_serializing_if = \"is_default_width\")]\n", "file_path": "core/src/lib.rs", "rank": 39, "score": 2.0758371138296052 }, { "content": "pub mod floor;\n\npub mod maze;\n\npub mod passages;\n\npub mod rooms;\n\n\n\nuse self::floor::Floor;\n\npub use self::rooms::{Room, RoomKind};\n\nuse super::{Coord, Direction, Dungeon as DungeonTrait, DungeonPath, MoveResult, Positioned, X, Y};\n\nuse character::{player::Status as PlayerStatus, EnemyHandler};\n\nuse enum_iterator::IntoEnumIterator;\n\nuse error::*;\n\nuse item::{ItemHandler, ItemToken};\n\nuse ndarray::Array2;\n\nuse rect_iter::{Get2D, GetMut2D, RectRange};\n\nuse rng::RngHandle;\n\nuse std::collections::VecDeque;\n\nuse tile::{Drawable, Tile};\n\nuse tuple_map::TupleMap2;\n\nuse {GameInfo, GameMsg, GlobalConfig};\n\n\n", "file_path": "core/src/dungeon/rogue/mod.rs", "rank": 40, "score": 1.9725539522631452 }, { "content": "use super::{DamageReaction, Defense, Dice, Exp, HitPoint, Level, Strength};\n\nuse crate::{Drawable, SmallStr};\n\nuse dungeon::{Dungeon, DungeonPath, MoveResult};\n\nuse item::ItemNum;\n\nuse rng::{Parcent, RngHandle};\n\nuse smallvec::SmallVec;\n\nuse std::cell::Cell;\n\nuse std::collections::BTreeMap;\n\nuse std::ops::Range;\n\nuse std::rc::{Rc, Weak};\n\nuse tile::Tile;\n\n\n\npub type DiceVec<T> = SmallVec<[Dice<T>; 4]>;\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]\n\npub struct Config {\n\n #[serde(default = \"default_enemies\")]\n\n pub enemies: Vec<Preset>,\n\n #[serde(default = \"default_appear_rate_gold\")]\n\n #[serde(skip_serializing_if = \"is_default_appear_rate_gold\")]\n", "file_path": "core/src/character/enemies.rs", "rank": 41, "score": 1.9014067761517843 } ]
Rust
src/bin/2019_day20.rs
ibookstein/aoc
6beb8369f80d47eef95022ced9a9b9a9bccc7d99
use aoc::aoc_input::get_input; use itertools::iproduct; use std::collections::{HashMap, HashSet}; use std::convert::{TryFrom, TryInto}; use std::ops::Index; use std::str::FromStr; type Vertex = usize; type Label = (usize, usize); type LabelMap = HashMap<Label, Vertex>; type VertexSet = HashSet<Vertex>; type AdjacencyList = Vec<VertexSet>; #[derive(Debug, Copy, Clone, PartialEq, Eq)] enum BfsReply { Halt, Continue, } #[derive(Debug)] struct Graph { label_map: LabelMap, adj_list: AdjacencyList, } impl Graph { fn new() -> Graph { Graph { label_map: LabelMap::new(), adj_list: AdjacencyList::new(), } } fn get_or_insert_vertex(&mut self, label: &Label) -> Vertex { match self.label_map.get(label) { Some(v) => *v, None => { let new_vertex = self.adj_list.len(); self.adj_list.push(VertexSet::new()); self.label_map.insert(label.to_owned(), new_vertex); new_vertex } } } fn add_edge_by_labels(&mut self, from_label: &Label, to_label: &Label) { let from_vertex = self.get_or_insert_vertex(from_label); let to_vertex = self.get_or_insert_vertex(to_label); self.adj_list[from_vertex].insert(to_vertex); } fn add_bidirectional_edge_by_labels(&mut self, label1: &Label, label2: &Label) { self.add_edge_by_labels(label1, label2); self.add_edge_by_labels(label2, label1); } fn bfs_layers(&self, origin: Vertex, mut func: impl FnMut(usize, &VertexSet) -> BfsReply) { let mut visited = VertexSet::new(); visited.insert(origin); let mut current_layer = VertexSet::new(); current_layer.insert(origin); let mut depth = 0usize; while !current_layer.is_empty() { match func(depth, &current_layer) { BfsReply::Halt => return, BfsReply::Continue => (), } let new_layer: VertexSet = current_layer .iter() .flat_map(|v| &self.adj_list[*v]) .cloned() .filter(|v| !visited.contains(v)) .collect(); visited.extend(&new_layer); current_layer = new_layer; depth += 1; } } } #[derive(Debug)] struct AsciiGrid { grid: Vec<u8>, width: usize, } impl AsciiGrid { fn height(&self) -> usize { self.grid.len() / self.width } fn width(&self) -> usize { self.width } } impl Index<Label> for AsciiGrid { type Output = u8; fn index(&self, index: Label) -> &Self::Output { self.grid.index(self.width * index.1 + index.0) } } impl FromStr for AsciiGrid { type Err = &'static str; fn from_str(s: &str) -> Result<Self, Self::Err> { let mut grid: Vec<u8> = Vec::new(); let mut width: Option<usize> = None; for line in s.lines() { if !line.is_ascii() { return Err("Non-ASCII line"); } if line.is_empty() { return Err("Empty line"); } if width.is_some() && width.unwrap() != line.len() { return Err("Non-uniform line length"); } width = Some(line.len()); grid.extend(line.bytes()); } if grid.len() == 0 { return Err("No lines"); } Ok(AsciiGrid { grid, width: width.unwrap(), }) } } #[derive(Debug)] struct Maze { graph: Graph, start: Vertex, end: Vertex, } impl TryFrom<&AsciiGrid> for Maze { type Error = &'static str; fn try_from(grid: &AsciiGrid) -> Result<Self, Self::Error> { let width = grid.width(); let height = grid.height(); let mid_x = width / 2; let mid_y = height / 2; let wall = '#' as u8; let empty = '.' as u8; let space = ' ' as u8; let tiles = [wall, empty]; let left_width = (2..width - 2) .take_while(|x| tiles.contains(&grid[(*x, mid_y)])) .count(); let right_width = (2..width - 2) .rev() .take_while(|x| tiles.contains(&grid[(*x, mid_y)])) .count(); let top_height = (2..height - 2) .take_while(|y| tiles.contains(&grid[(mid_x, *y)])) .count(); let bottom_height = (2..height - 2) .rev() .take_while(|y| tiles.contains(&grid[(mid_x, *y)])) .count(); let mut portals = HashMap::<[u8; 2], Vec<Label>>::new(); let outer_ys = 2..height - 2; let inner_ys = 4 + top_height..height - bottom_height - 4; let outer_xs = 2..width - 2; let inner_xs = 4 + left_width..width - right_width - 4; let leftbound1 = outer_ys.clone().map(|y| (0, y)); let leftbound2 = inner_ys.clone().map(|y| (width - right_width - 4, y)); for (x, y) in leftbound1.chain(leftbound2) { let key = [grid[(x, y)], grid[(x + 1, y)]]; if key[0] != space { portals.entry(key).or_default().push((x + 2, y)); } } let rightbound1 = inner_ys.map(|y| (2 + left_width, y)); let rightbound2 = outer_ys.map(|y| (width - 2, y)); for (x, y) in rightbound1.chain(rightbound2) { let key = [grid[(x, y)], grid[(x + 1, y)]]; if key[0] != space { portals.entry(key).or_default().push((x - 1, y)); } } let topbound1 = outer_xs.clone().map(|x| (x, 0)); let topbound2 = inner_xs.clone().map(|x| (x, height - bottom_height - 4)); for (x, y) in topbound1.chain(topbound2) { let key = [grid[(x, y)], grid[(x, y + 1)]]; if key[0] != space { portals.entry(key).or_default().push((x, y + 2)); } } let bottombound1 = inner_xs.map(|x| (x, 2 + top_height)); let bottombound2 = outer_xs.map(|x| (x, height - 2)); for (x, y) in bottombound1.chain(bottombound2) { let key = [grid[(x, y)], grid[(x, y + 1)]]; if key[0] != space { portals.entry(key).or_default().push((x, y - 1)); } } let start_portal = ['A' as u8; 2]; let end_portal = ['Z' as u8; 2]; let mut graph = Graph::new(); let mut start: Option<Vertex> = None; let mut end: Option<Vertex> = None; for (key, value) in portals.iter() { match value[..] { [point] => { if *key == start_portal { start = Some(graph.get_or_insert_vertex(&point)) } else if *key == end_portal { end = Some(graph.get_or_insert_vertex(&point)) } else { return Err("Bad portal"); } } [point1, point2] => graph.add_bidirectional_edge_by_labels(&point1, &point2), _ => { return Err("Bad portal"); } } } if start.is_none() { return Err("Start portal not found"); } if end.is_none() { return Err("End portal not found"); } for coord in iproduct!(2..width - 2, 2..height - 2) { if grid[coord] != empty { continue; } let (x, y) = coord; let adjacents = [(x - 1, y), (x + 1, y), (x, y - 1), (x, y + 1)]; for adj in adjacents.iter().copied() { if grid[adj] != empty { continue; } graph.add_bidirectional_edge_by_labels(&coord, &adj); } } Ok(Maze { graph, start: start.unwrap(), end: end.unwrap(), }) } } impl Maze { fn start_end_distance(&self) -> usize { let mut result: Option<usize> = None; self.graph.bfs_layers(self.start, |depth, layer| { if layer.contains(&self.end) { result = Some(depth); BfsReply::Halt } else { BfsReply::Continue } }); result.unwrap() } } fn main() { let input = get_input(2019, 20); let grid: AsciiGrid = input.parse().unwrap(); let maze: Maze = (&grid).try_into().unwrap(); println!("Start-end distance: {}", maze.start_end_distance()); }
use aoc::aoc_input::get_input; use itertools::iproduct; use std::collections::{HashMap, HashSet}; use std::convert::{TryFrom, TryInto}; use std::ops::Index; use std::str::FromStr; type Vertex = usize; type Label = (usize, usize); type LabelMap = HashMap<Label, Vertex>; type VertexSet = HashSet<Vertex>; type AdjacencyList = Vec<VertexSet>; #[derive(Debug, Copy, Clone, PartialEq, Eq)] enum BfsReply { Halt, Continue, } #[derive(Debug)] struct Graph { label_map: LabelMap, adj_list: AdjacencyList, } impl Graph { fn new() -> Graph { Graph { label_map: LabelMap::new(), adj_list: AdjacencyList::new(), } } fn get_or_insert_vertex(&mut self, label: &Label) -> Vertex { match self.label_map.get(label) { Some(v) => *v, None => { let new_vertex = self.adj_list.len(); self.adj_list.push(VertexSet::new()); self.label_map.insert(label.to_owned(), new_vertex); new_vertex } } } fn add_edge_by_labels(&mut self, from_label: &Label, to_label: &Label) { let from_vertex = self.get_or_insert_vertex(from_label); let to_vertex = self.get_or_insert_vertex(to_label); self.adj_list[from_vertex].insert(to_vertex); } fn add_bidirectional_edge_by_labels(&mut self, label1: &Label, label2: &Label) { self.add_edge_by_labels(label1, label2); self.add_edge_by_labels(label2, label1); } fn bfs_layers(&self, origin: Vertex, mut func: impl FnMut(usize, &VertexSet) -> BfsReply) { let mut visited = VertexSet::new(); visited.insert(origin); let mut current_layer = VertexSet::new(); current_layer.insert(origin); let mut depth = 0usize; while !current_layer.is_e
nsert_vertex(&point)) } else if *key == end_portal { end = Some(graph.get_or_insert_vertex(&point)) } else { return Err("Bad portal"); } } [point1, point2] => graph.add_bidirectional_edge_by_labels(&point1, &point2), _ => { return Err("Bad portal"); } } } if start.is_none() { return Err("Start portal not found"); } if end.is_none() { return Err("End portal not found"); } for coord in iproduct!(2..width - 2, 2..height - 2) { if grid[coord] != empty { continue; } let (x, y) = coord; let adjacents = [(x - 1, y), (x + 1, y), (x, y - 1), (x, y + 1)]; for adj in adjacents.iter().copied() { if grid[adj] != empty { continue; } graph.add_bidirectional_edge_by_labels(&coord, &adj); } } Ok(Maze { graph, start: start.unwrap(), end: end.unwrap(), }) } } impl Maze { fn start_end_distance(&self) -> usize { let mut result: Option<usize> = None; self.graph.bfs_layers(self.start, |depth, layer| { if layer.contains(&self.end) { result = Some(depth); BfsReply::Halt } else { BfsReply::Continue } }); result.unwrap() } } fn main() { let input = get_input(2019, 20); let grid: AsciiGrid = input.parse().unwrap(); let maze: Maze = (&grid).try_into().unwrap(); println!("Start-end distance: {}", maze.start_end_distance()); }
mpty() { match func(depth, &current_layer) { BfsReply::Halt => return, BfsReply::Continue => (), } let new_layer: VertexSet = current_layer .iter() .flat_map(|v| &self.adj_list[*v]) .cloned() .filter(|v| !visited.contains(v)) .collect(); visited.extend(&new_layer); current_layer = new_layer; depth += 1; } } } #[derive(Debug)] struct AsciiGrid { grid: Vec<u8>, width: usize, } impl AsciiGrid { fn height(&self) -> usize { self.grid.len() / self.width } fn width(&self) -> usize { self.width } } impl Index<Label> for AsciiGrid { type Output = u8; fn index(&self, index: Label) -> &Self::Output { self.grid.index(self.width * index.1 + index.0) } } impl FromStr for AsciiGrid { type Err = &'static str; fn from_str(s: &str) -> Result<Self, Self::Err> { let mut grid: Vec<u8> = Vec::new(); let mut width: Option<usize> = None; for line in s.lines() { if !line.is_ascii() { return Err("Non-ASCII line"); } if line.is_empty() { return Err("Empty line"); } if width.is_some() && width.unwrap() != line.len() { return Err("Non-uniform line length"); } width = Some(line.len()); grid.extend(line.bytes()); } if grid.len() == 0 { return Err("No lines"); } Ok(AsciiGrid { grid, width: width.unwrap(), }) } } #[derive(Debug)] struct Maze { graph: Graph, start: Vertex, end: Vertex, } impl TryFrom<&AsciiGrid> for Maze { type Error = &'static str; fn try_from(grid: &AsciiGrid) -> Result<Self, Self::Error> { let width = grid.width(); let height = grid.height(); let mid_x = width / 2; let mid_y = height / 2; let wall = '#' as u8; let empty = '.' as u8; let space = ' ' as u8; let tiles = [wall, empty]; let left_width = (2..width - 2) .take_while(|x| tiles.contains(&grid[(*x, mid_y)])) .count(); let right_width = (2..width - 2) .rev() .take_while(|x| tiles.contains(&grid[(*x, mid_y)])) .count(); let top_height = (2..height - 2) .take_while(|y| tiles.contains(&grid[(mid_x, *y)])) .count(); let bottom_height = (2..height - 2) .rev() .take_while(|y| tiles.contains(&grid[(mid_x, *y)])) .count(); let mut portals = HashMap::<[u8; 2], Vec<Label>>::new(); let outer_ys = 2..height - 2; let inner_ys = 4 + top_height..height - bottom_height - 4; let outer_xs = 2..width - 2; let inner_xs = 4 + left_width..width - right_width - 4; let leftbound1 = outer_ys.clone().map(|y| (0, y)); let leftbound2 = inner_ys.clone().map(|y| (width - right_width - 4, y)); for (x, y) in leftbound1.chain(leftbound2) { let key = [grid[(x, y)], grid[(x + 1, y)]]; if key[0] != space { portals.entry(key).or_default().push((x + 2, y)); } } let rightbound1 = inner_ys.map(|y| (2 + left_width, y)); let rightbound2 = outer_ys.map(|y| (width - 2, y)); for (x, y) in rightbound1.chain(rightbound2) { let key = [grid[(x, y)], grid[(x + 1, y)]]; if key[0] != space { portals.entry(key).or_default().push((x - 1, y)); } } let topbound1 = outer_xs.clone().map(|x| (x, 0)); let topbound2 = inner_xs.clone().map(|x| (x, height - bottom_height - 4)); for (x, y) in topbound1.chain(topbound2) { let key = [grid[(x, y)], grid[(x, y + 1)]]; if key[0] != space { portals.entry(key).or_default().push((x, y + 2)); } } let bottombound1 = inner_xs.map(|x| (x, 2 + top_height)); let bottombound2 = outer_xs.map(|x| (x, height - 2)); for (x, y) in bottombound1.chain(bottombound2) { let key = [grid[(x, y)], grid[(x, y + 1)]]; if key[0] != space { portals.entry(key).or_default().push((x, y - 1)); } } let start_portal = ['A' as u8; 2]; let end_portal = ['Z' as u8; 2]; let mut graph = Graph::new(); let mut start: Option<Vertex> = None; let mut end: Option<Vertex> = None; for (key, value) in portals.iter() { match value[..] { [point] => { if *key == start_portal { start = Some(graph.get_or_i
random
[ { "content": "pub fn digits<T>(mut num: T, radix: T) -> impl Iterator<Item = T>\n\nwhere\n\n T: UnsignedDigits<T>,\n\n{\n\n let zero = T::zero();\n\n\n\n let mut divisor = T::one();\n\n while num >= divisor * radix {\n\n divisor *= radix;\n\n }\n\n\n\n std::iter::from_fn(move || {\n\n if divisor == zero {\n\n None\n\n } else {\n\n let v = num / divisor;\n\n num %= divisor;\n\n divisor /= radix;\n\n Some(v)\n\n }\n\n })\n\n}\n", "file_path": "src/digits.rs", "rank": 0, "score": 74380.28518279849 }, { "content": "pub fn new_stream_ref_from_iter(iter: impl IntoIterator<Item = isize>) -> StreamRef {\n\n let s = new_stream_ref();\n\n s.borrow_mut().extend(iter);\n\n s\n\n}\n\n\n", "file_path": "src/intcode.rs", "rank": 1, "score": 69458.1229191116 }, { "content": "pub fn new_stream_ref() -> StreamRef {\n\n Rc::new(RefCell::new(Stream::new()))\n\n}\n\n\n", "file_path": "src/intcode.rs", "rank": 2, "score": 55004.535479674865 }, { "content": "pub fn split_exact<'a, const N: usize>(\n\n s: &'a str,\n\n pat: &str,\n\n) -> Result<[&'a str; N], &'static str> {\n\n iter_consume_exact(s.split(pat))\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 3, "score": 51273.35896979222 }, { "content": "pub fn parse_prefix_and_split<'a, const N: usize>(\n\n s: &'a str,\n\n prefix: &str,\n\n split_pat: &str,\n\n) -> Result<[&'a str; N], &'static str> {\n\n if !s.starts_with(prefix) {\n\n return Err(\"Invalid prefix\");\n\n }\n\n\n\n iter_consume_exact(s[prefix.len()..].split(split_pat))\n\n}\n", "file_path": "src/parse.rs", "rank": 4, "score": 49678.24875399699 }, { "content": "pub fn new_stream_ref_from(value: isize) -> StreamRef {\n\n let s = new_stream_ref();\n\n s.borrow_mut().push_back(value);\n\n s\n\n}\n\n\n", "file_path": "src/intcode.rs", "rank": 5, "score": 49283.4474934695 }, { "content": "pub fn iter_consume_exact<T, const N: usize>(\n\n mut iter: impl Iterator<Item = T>,\n\n) -> Result<[T; N], &'static str> {\n\n let mut data = MaybeUninit::<[T; N]>::uninit();\n\n let start: *mut T = unsafe { std::mem::transmute(&mut data) };\n\n\n\n for i in 0..N {\n\n let item = iter.next().ok_or(\"Insufficient items in iterator\")?;\n\n unsafe { start.offset(i as isize).write(item) }\n\n }\n\n\n\n if iter.next().is_some() {\n\n return Err(\"Too many items in iterator\");\n\n }\n\n\n\n Ok(unsafe { data.assume_init() })\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 6, "score": 48209.54349869874 }, { "content": "fn parse_addressing_mode(digit: usize) -> IntcodeResult<AddressingMode> {\n\n match digit {\n\n 0 => Ok(AddressingMode::AbsoluteAddress),\n\n 1 => Ok(AddressingMode::Immediate),\n\n 2 => Ok(AddressingMode::BasePointerRelative),\n\n _ => Err(IntcodeError::InvalidAddressingMode),\n\n }\n\n}\n\n\n\nimpl IntcodeMachine {\n\n pub fn new_io(tape: Tape, input: StreamRef, output: StreamRef) -> Self {\n\n IntcodeMachine {\n\n tape,\n\n input,\n\n output,\n\n pc: 0,\n\n bp: 0,\n\n }\n\n }\n\n\n", "file_path": "src/intcode.rs", "rank": 7, "score": 47879.722550785606 }, { "content": "#[derive(Debug)]\n\nstruct Operand {\n\n mode: AddressingMode,\n\n value: isize,\n\n}\n\n\n", "file_path": "src/intcode.rs", "rank": 8, "score": 47371.96394400817 }, { "content": "#[derive(Debug)]\n\nstruct Opcode {\n\n operation: Operation,\n\n operands: Vec<Operand>,\n\n}\n\n\n\npub type IntcodeResult<T> = Result<T, IntcodeError>;\n\npub type Tape = Vec<isize>;\n\npub type Stream = VecDeque<isize>;\n\npub type StreamRef = Rc<RefCell<Stream>>;\n\n\n\n#[derive(Debug)]\n\npub struct IntcodeMachine {\n\n tape: Tape,\n\n pub input: StreamRef,\n\n pub output: StreamRef,\n\n pc: isize,\n\n bp: isize,\n\n}\n\n\n", "file_path": "src/intcode.rs", "rank": 9, "score": 47371.96394400817 }, { "content": "#[derive(Debug)]\n\nenum Operation {\n\n Add,\n\n Multiply,\n\n Input,\n\n Output,\n\n JumpTrue,\n\n JumpFalse,\n\n LessThan,\n\n Equals,\n\n AdjustBasePointer,\n\n Halt,\n\n}\n\n\n", "file_path": "src/intcode.rs", "rank": 10, "score": 47371.96394400817 }, { "content": "#[derive(Debug)]\n\nenum AddressingMode {\n\n AbsoluteAddress,\n\n Immediate,\n\n BasePointerRelative,\n\n}\n\n\n", "file_path": "src/intcode.rs", "rank": 11, "score": 46065.34506574787 }, { "content": "fn get_session_key() -> String {\n\n let mut path = std::env::current_exe().unwrap();\n\n path.pop();\n\n path.push(&SESSION_FILE_PATH.iter().cloned().collect::<PathBuf>());\n\n\n\n let mut content = String::new();\n\n let mut file = File::open(path).unwrap();\n\n file.read_to_string(&mut content).unwrap();\n\n content.trim().to_string()\n\n}\n\n\n", "file_path": "src/aoc_input.rs", "rank": 12, "score": 37939.39677563337 }, { "content": "pub fn parse_intcode_program(input: &str) -> Tape {\n\n input\n\n .trim()\n\n .split(',')\n\n .map(|s| s.parse().unwrap())\n\n .collect::<Tape>()\n\n}\n", "file_path": "src/intcode.rs", "rank": 13, "score": 31889.245873714608 }, { "content": "pub fn manhattan_distance(lhs: Coord, rhs: Coord) -> isize {\n\n (lhs.0 - rhs.0).abs() + (lhs.1 - rhs.1).abs()\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Hash)]\n\npub struct CoordVec(Vec<isize>);\n\n\n\nimpl CoordVec {\n\n pub fn origin(n: usize) -> Self {\n\n Self(vec![0; n])\n\n }\n\n\n\n pub fn from_vec(v: Vec<isize>) -> Self {\n\n Self(v)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq)]\n\npub struct DeltaVec(Vec<isize>);\n\n\n", "file_path": "src/coordinates.rs", "rank": 14, "score": 29256.0393932917 }, { "content": "pub fn div_round_nearest(dividend: isize, divisor: isize) -> isize {\n\n let sign = 1 - ((dividend < 0) ^ (divisor < 0)) as isize * 2;\n\n let (q, r) = dividend.div_rem(&divisor);\n\n q + (r.abs() > (divisor / 2).abs()) as isize * sign\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::assert_eq;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_div_round_nearest() {\n\n assert_eq!(div_round_nearest(8, 3), 3);\n\n assert_eq!(div_round_nearest(8, -3), -3);\n\n assert_eq!(div_round_nearest(-8, 3), -3);\n\n assert_eq!(div_round_nearest(-8, -3), 3);\n\n\n\n assert_eq!(div_round_nearest(10, 3), 3);\n\n assert_eq!(div_round_nearest(10, -3), -3);\n\n assert_eq!(div_round_nearest(-10, 3), -3);\n\n assert_eq!(div_round_nearest(-10, -3), 3);\n\n }\n\n}\n", "file_path": "src/num.rs", "rank": 15, "score": 28587.99471694339 }, { "content": "pub fn get_input(year: u16, day: u8) -> String {\n\n try_get_input(year, day).expect(\"Failed getting input\")\n\n}\n", "file_path": "src/aoc_input.rs", "rank": 16, "score": 28587.99471694339 }, { "content": "fn try_get_input(year: u16, day: u8) -> Result<String, Box<dyn Error>> {\n\n let mut path = PathBuf::new();\n\n path.push(std::env::current_exe()?.parent().unwrap());\n\n path.push(CACHE_DIR);\n\n let filename = format!(\"{}_{}.txt\", year, day);\n\n path.push(&filename);\n\n\n\n let _ = std::fs::create_dir(path.parent().unwrap());\n\n\n\n if path.exists() {\n\n println!(\"Cache hit for {}\", &filename);\n\n Ok(std::fs::read_to_string(path)?)\n\n } else {\n\n println!(\"Cache miss for {}\", &filename);\n\n let input_str = get_input_web(year, day)?;\n\n std::fs::write(path, &input_str)?;\n\n Ok(input_str)\n\n }\n\n}\n\n\n", "file_path": "src/aoc_input.rs", "rank": 17, "score": 24321.615318162214 }, { "content": "fn get_input_web(year: u16, day: u8) -> Result<String, Box<dyn Error>> {\n\n let url_str = format!(\"https://adventofcode.com/{}/day/{}/input\", year, day);\n\n let mut headers = HeaderMap::new();\n\n headers.insert(\n\n COOKIE,\n\n format!(\"session={}\", get_session_key()).parse().unwrap(),\n\n );\n\n let resp = Client::new().get(&url_str).headers(headers).send()?;\n\n Ok(resp.text()?)\n\n}\n\n\n", "file_path": "src/aoc_input.rs", "rank": 18, "score": 24321.615318162214 }, { "content": "use num_traits::{Signed, Zero};\n\nuse std::ops::{Add, AddAssign, Index, Mul, MulAssign, Neg, Sub, SubAssign};\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]\n\npub struct VecN<T, const N: usize>([T; N]);\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]\n\npub struct Scalar<T>(T);\n\n\n\nimpl<T: Copy + Zero, const N: usize> VecN<T, N> {\n\n pub fn zero() -> Self {\n\n Self([T::zero(); N])\n\n }\n\n\n\n pub fn new(inner: [T; N]) -> Self {\n\n Self(inner)\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n self.0.len()\n", "file_path": "src/vec.rs", "rank": 19, "score": 10.586749144552034 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq, EnumIter, IntoPrimitive, TryFromPrimitive)]\n\n#[repr(u8)]\n\npub enum Direction {\n\n Up = 0,\n\n Right = 1,\n\n Down = 2,\n\n Left = 3,\n\n}\n\n\n\nimpl Direction {\n\n pub fn turn(self, to: Turn) -> Self {\n\n let res = (u8::from(self) + u8::from(to)) % 4;\n\n Direction::try_from(res).unwrap()\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, EnumIter)]\n", "file_path": "src/coordinates.rs", "rank": 20, "score": 9.875298495522745 }, { "content": " fn size_hint(&self) -> (usize, Option<usize>) {\n\n self.iter.size_hint()\n\n }\n\n}\n\n\n\nimpl<'a, T> FusedIterator for GridIterMut<'a, T> {}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct Grid<T> {\n\n grid: Vec<T>,\n\n width: usize,\n\n}\n\n\n\nimpl<T> Grid<T>\n\nwhere\n\n T: Default,\n\n{\n\n pub fn new(width: usize, height: usize) -> Self {\n\n let mut grid = Vec::new();\n\n grid.resize_with(width * height, Default::default);\n", "file_path": "src/grid.rs", "rank": 21, "score": 9.806360286958265 }, { "content": "use num_enum::{IntoPrimitive, TryFromPrimitive};\n\nuse std::convert::{From, TryFrom};\n\nuse std::ops::{Add, AddAssign, Mul};\n\nuse strum_macros::EnumIter;\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq, IntoPrimitive, TryFromPrimitive)]\n\n#[repr(u8)]\n\npub enum Turn {\n\n Front = 0,\n\n Right = 1,\n\n Back = 2,\n\n Left = 3,\n\n}\n\n\n\nimpl Mul<Turn> for isize {\n\n type Output = Turn;\n\n\n\n fn mul(self, rhs: Turn) -> Self::Output {\n\n let n = self.rem_euclid(4) as u8;\n\n Turn::try_from((n * u8::from(rhs)) % 4).unwrap()\n", "file_path": "src/coordinates.rs", "rank": 22, "score": 8.521303942680099 }, { "content": " }\n\n\n\n pub fn values_mut(&mut self) -> impl Iterator<Item = &mut T> {\n\n self.grid.iter_mut()\n\n }\n\n\n\n fn index_for(&self, c: Coord) -> Option<usize> {\n\n let Coord(x, y) = c;\n\n if x < 0 || y < 0 {\n\n return None;\n\n }\n\n\n\n let (x, y) = (x as usize, y as usize);\n\n let (w, h) = (self.width(), self.height());\n\n if x < w && y < h {\n\n Some(x + w * y)\n\n } else {\n\n None\n\n }\n\n }\n", "file_path": "src/grid.rs", "rank": 23, "score": 8.37954393010667 }, { "content": " }\n\n\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n self.iter.size_hint()\n\n }\n\n}\n\n\n\nimpl<'a, T> FusedIterator for GridIter<'a, T> {}\n\n\n\n#[derive(Debug)]\n\npub struct GridIterMut<'a, T: 'a> {\n\n iter: std::slice::IterMut<'a, T>,\n\n width: usize,\n\n coord: Coord,\n\n}\n\n\n\nimpl<'a, T> GridIterMut<'a, T> {\n\n pub fn new(grid: &'a mut Grid<T>) -> Self {\n\n Self {\n\n iter: grid.grid.iter_mut(),\n", "file_path": "src/grid.rs", "rank": 24, "score": 7.698520072640159 }, { "content": "impl Rotation {\n\n pub fn to_cw_count(&self) -> usize {\n\n match *self {\n\n Rotation::Cw0 => 0,\n\n Rotation::Cw90 => 1,\n\n Rotation::Cw180 => 2,\n\n Rotation::Cw270 => 3,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct GridIter<'a, T: 'a> {\n\n iter: std::slice::Iter<'a, T>,\n\n width: usize,\n\n coord: Coord,\n\n}\n\n\n\nimpl<'a, T> GridIter<'a, T> {\n\n pub fn new(grid: &'a Grid<T>) -> Self {\n", "file_path": "src/grid.rs", "rank": 25, "score": 7.504596086862804 }, { "content": " self.verify_addr(newbp)?;\n\n self.bp = newbp;\n\n }\n\n Operation::Halt => {\n\n self.pc = start_pc;\n\n return Ok(Some(StopStatus::Halted));\n\n }\n\n };\n\n\n\n Ok(None)\n\n }\n\n\n\n pub fn run(&mut self) -> IntcodeResult<StopStatus> {\n\n loop {\n\n match self.tick() {\n\n Ok(None) => continue,\n\n Ok(Some(status)) => return Ok(status),\n\n Err(e) => return Err(e),\n\n }\n\n }\n", "file_path": "src/intcode.rs", "rank": 26, "score": 7.491738739159216 }, { "content": "#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\npub struct Delta(pub isize, pub isize);\n\n\n\nimpl Delta {\n\n pub fn turn(&self, t: Turn) -> Delta {\n\n match t {\n\n Turn::Front => *self,\n\n Turn::Right => Delta(-self.1, self.0),\n\n Turn::Back => Delta(-self.0, -self.1),\n\n Turn::Left => Delta(self.1, -self.0),\n\n }\n\n }\n\n}\n\n\n\nimpl Add<Delta> for Coord {\n\n type Output = Coord;\n\n\n\n fn add(self, rhs: Delta) -> Self::Output {\n\n Coord(self.0 + rhs.0, self.1 + rhs.1)\n\n }\n", "file_path": "src/coordinates.rs", "rank": 27, "score": 7.2686426117660705 }, { "content": "{\n\n pub fn count_eq(&self, item: &T) -> usize {\n\n self.values().filter(|&e| e == item).count()\n\n }\n\n}\n\n\n\nimpl<T> Grid<T>\n\nwhere\n\n T: Clone,\n\n{\n\n pub fn rotate_clockwise_inplace(&mut self) {\n\n let w = self.width() as isize;\n\n let h = self.height() as isize;\n\n let mut v = Vec::with_capacity(self.grid.len());\n\n\n\n for x in 0..w {\n\n for y in (0..h).rev() {\n\n v.push(self.get(Coord(x, y)).unwrap().clone());\n\n }\n\n }\n", "file_path": "src/grid.rs", "rank": 28, "score": 6.868597476277547 }, { "content": " fn add(self, rhs: VecN<T, N>) -> Self::Output {\n\n let mut res = self;\n\n res += rhs;\n\n res\n\n }\n\n}\n\n\n\nimpl<T: Copy + MulAssign<T>, const N: usize> Mul<VecN<T, N>> for Scalar<T> {\n\n type Output = VecN<T, N>;\n\n\n\n fn mul(self, rhs: VecN<T, N>) -> Self::Output {\n\n let mut res = rhs;\n\n for c in res.0.iter_mut() {\n\n *c *= self.0;\n\n }\n\n res\n\n }\n\n}\n\n\n\nimpl<T: Copy + Neg<Output = T>, const N: usize> Neg for VecN<T, N> {\n", "file_path": "src/vec.rs", "rank": 29, "score": 6.840618204187196 }, { "content": " width: grid.width,\n\n coord: Coord::origin(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, T> Iterator for GridIterMut<'a, T> {\n\n type Item = (Coord, &'a mut T);\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let res = (self.coord, self.iter.next()?);\n\n if self.coord.0 as usize == self.width - 1 {\n\n self.coord.0 = 0;\n\n self.coord.1 += 1;\n\n } else {\n\n self.coord.0 += 1;\n\n }\n\n Some(res)\n\n }\n\n\n", "file_path": "src/grid.rs", "rank": 30, "score": 6.827607863781044 }, { "content": "pub enum Direction8 {\n\n North,\n\n NorthEast,\n\n East,\n\n SouthEast,\n\n South,\n\n SouthWest,\n\n West,\n\n NorthWest,\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]\n\npub struct Coord(pub isize, pub isize);\n\n\n\nimpl Coord {\n\n pub fn origin() -> Coord {\n\n Coord(0, 0)\n\n }\n\n}\n\n\n", "file_path": "src/coordinates.rs", "rank": 31, "score": 6.804141944477849 }, { "content": " }\n\n}\n\n\n\nimpl<T, const N: usize> Index<usize> for VecN<T, N> {\n\n type Output = T;\n\n\n\n fn index(&self, index: usize) -> &Self::Output {\n\n &self.0[index]\n\n }\n\n}\n\n\n\nimpl<T: Copy + SubAssign, const N: usize> SubAssign<VecN<T, N>> for VecN<T, N> {\n\n fn sub_assign(&mut self, rhs: VecN<T, N>) {\n\n for i in 0..self.0.len() {\n\n self.0[i] -= rhs.0[i];\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Copy + SubAssign, const N: usize> Sub<VecN<T, N>> for VecN<T, N> {\n", "file_path": "src/vec.rs", "rank": 32, "score": 6.668905254562867 }, { "content": "\n\n self.grid = v;\n\n self.width = h as usize;\n\n }\n\n\n\n pub fn rotate_inplace(&mut self, rotation: Rotation) {\n\n for _ in 0..rotation.to_cw_count() {\n\n self.rotate_clockwise_inplace();\n\n }\n\n }\n\n}\n\n\n\nimpl<T> FromStr for Grid<T>\n\nwhere\n\n T: TryFrom<char>,\n\n{\n\n type Err = &'static str;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let mut grid: Vec<T> = Vec::new();\n", "file_path": "src/grid.rs", "rank": 33, "score": 6.562678425892871 }, { "content": " Self { grid, width }\n\n }\n\n}\n\n\n\nimpl<T> Grid<T> {\n\n pub fn from_vec_and_width(grid: Vec<T>, width: usize) -> Self {\n\n assert_eq!(grid.len() % width, 0);\n\n Self { grid, width }\n\n }\n\n\n\n pub fn height(&self) -> usize {\n\n self.grid.len() / self.width\n\n }\n\n\n\n pub fn width(&self) -> usize {\n\n self.width\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n self.grid.len()\n", "file_path": "src/grid.rs", "rank": 34, "score": 6.419489443399563 }, { "content": " Self {\n\n iter: grid.grid.iter(),\n\n width: grid.width,\n\n coord: Coord::origin(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, T> Iterator for GridIter<'a, T> {\n\n type Item = (Coord, &'a T);\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let res = (self.coord, self.iter.next()?);\n\n if self.coord.0 as usize == self.width - 1 {\n\n self.coord.0 = 0;\n\n self.coord.1 += 1;\n\n } else {\n\n self.coord.0 += 1;\n\n }\n\n Some(res)\n", "file_path": "src/grid.rs", "rank": 35, "score": 6.247579937759485 }, { "content": " pub fn new(tape: Tape) -> Self {\n\n Self::new_io(tape, new_stream_ref(), new_stream_ref())\n\n }\n\n\n\n fn verify_addr(&mut self, addr: isize) -> IntcodeResult<usize> {\n\n if addr < 0 {\n\n return Err(IntcodeError::NegativeAddress);\n\n }\n\n\n\n let addr = addr as usize;\n\n if addr >= self.tape.len() {\n\n self.tape.resize(addr + 1, 0);\n\n }\n\n Ok(addr)\n\n }\n\n\n\n pub fn read_addr(&mut self, addr: isize) -> IntcodeResult<isize> {\n\n let addr = self.verify_addr(addr)?;\n\n Ok(self.tape[addr])\n\n }\n", "file_path": "src/intcode.rs", "rank": 36, "score": 6.211716997616638 }, { "content": "use crate::coordinates::Coord;\n\nuse std::convert::{TryFrom, TryInto};\n\nuse std::fmt::{Display, Write};\n\nuse std::iter::FusedIterator;\n\nuse std::str::FromStr;\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub enum Axis {\n\n Horizontal,\n\n Vertical,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub enum Rotation {\n\n Cw0,\n\n Cw90,\n\n Cw180,\n\n Cw270,\n\n}\n\n\n", "file_path": "src/grid.rs", "rank": 37, "score": 6.061292385928891 }, { "content": " type Output = VecN<T, N>;\n\n\n\n fn neg(self) -> Self::Output {\n\n let mut res = self;\n\n for c in res.0.iter_mut() {\n\n *c = -*c;\n\n }\n\n res\n\n }\n\n}\n\n\n\nimpl<T: Zero + Copy + Signed + AddAssign, const N: usize> VecN<T, N> {\n\n pub fn manhattan_norm(&self) -> T {\n\n let mut norm = T::zero();\n\n for c in self.0.iter() {\n\n norm += c.abs();\n\n }\n\n norm\n\n }\n\n}\n\n\n\npub type ISizeVec3 = VecN<isize, 3>;\n", "file_path": "src/vec.rs", "rank": 38, "score": 5.763859748593008 }, { "content": " type Output = VecN<T, N>;\n\n\n\n fn sub(self, rhs: VecN<T, N>) -> Self::Output {\n\n let mut res = self;\n\n res -= rhs;\n\n res\n\n }\n\n}\n\n\n\nimpl<T: Copy + AddAssign, const N: usize> AddAssign<VecN<T, N>> for VecN<T, N> {\n\n fn add_assign(&mut self, rhs: VecN<T, N>) {\n\n for i in 0..self.0.len() {\n\n self.0[i] += rhs.0[i];\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Copy + AddAssign, const N: usize> Add<VecN<T, N>> for VecN<T, N> {\n\n type Output = VecN<T, N>;\n\n\n", "file_path": "src/vec.rs", "rank": 39, "score": 5.752644100890617 }, { "content": " }\n\n\n\n pub fn is_empty(&self) -> bool {\n\n self.len() != 0\n\n }\n\n\n\n pub fn iter(&self) -> impl Iterator<Item = (Coord, &T)> {\n\n GridIter::new(self)\n\n }\n\n\n\n pub fn iter_mut(&mut self) -> impl Iterator<Item = (Coord, &mut T)> {\n\n GridIterMut::new(self)\n\n }\n\n\n\n pub fn keys<'a>(&'a self) -> impl Iterator<Item = Coord> + 'a {\n\n self.iter().map(|(c, _)| c)\n\n }\n\n\n\n pub fn values(&self) -> impl Iterator<Item = &T> {\n\n self.grid.iter()\n", "file_path": "src/grid.rs", "rank": 40, "score": 5.714605922091131 }, { "content": "impl DeltaVec {\n\n pub fn from_vec(v: Vec<isize>) -> Self {\n\n Self(v)\n\n }\n\n}\n\n\n\nimpl<'a, 'b> Add<&'b DeltaVec> for &'a CoordVec {\n\n type Output = CoordVec;\n\n\n\n fn add(self, rhs: &'b DeltaVec) -> Self::Output {\n\n let n = self.0.len();\n\n assert_eq!(n, rhs.0.len());\n\n let res: Vec<_> = (0..n).map(|i| self.0[i] + rhs.0[i]).collect();\n\n CoordVec(res)\n\n }\n\n}\n\n\n\nimpl<'a> AddAssign<&'a DeltaVec> for CoordVec {\n\n fn add_assign(&mut self, rhs: &'a DeltaVec) {\n\n *self = &*self + rhs;\n", "file_path": "src/coordinates.rs", "rank": 41, "score": 5.2827307135229 }, { "content": " }\n\n\n\n pub fn run_to_completion(&mut self) -> IntcodeResult<()> {\n\n match self.run()? {\n\n StopStatus::Halted => Ok(()),\n\n _ => Err(IntcodeError::DidNotRunToCompletion),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/intcode.rs", "rank": 42, "score": 5.078721720067781 }, { "content": "}\n\n\n\nimpl Mul<Delta> for isize {\n\n type Output = Delta;\n\n\n\n fn mul(self, rhs: Delta) -> Self::Output {\n\n Delta(self * rhs.0, self * rhs.1)\n\n }\n\n}\n\n\n\nimpl From<Direction> for Delta {\n\n fn from(d: Direction) -> Self {\n\n match d {\n\n Direction::Up => Delta(0, -1),\n\n Direction::Down => Delta(0, 1),\n\n Direction::Left => Delta(-1, 0),\n\n Direction::Right => Delta(1, 0),\n\n }\n\n }\n\n}\n", "file_path": "src/coordinates.rs", "rank": 43, "score": 4.879918184897452 }, { "content": "}\n\n\n\nimpl AddAssign<Delta> for Coord {\n\n fn add_assign(&mut self, rhs: Delta) {\n\n *self = *self + rhs;\n\n }\n\n}\n\n\n\nimpl Add<Delta> for Delta {\n\n type Output = Delta;\n\n\n\n fn add(self, rhs: Delta) -> Self::Output {\n\n Delta(self.0 + rhs.0, self.1 + rhs.1)\n\n }\n\n}\n\n\n\nimpl AddAssign<Delta> for Delta {\n\n fn add_assign(&mut self, rhs: Delta) {\n\n *self = *self + rhs;\n\n }\n", "file_path": "src/coordinates.rs", "rank": 44, "score": 4.635715784778801 }, { "content": " }\n\n}\n\n\n\nimpl<'a, 'b> Add<&'b DeltaVec> for &'a DeltaVec {\n\n type Output = DeltaVec;\n\n\n\n fn add(self, rhs: &'b DeltaVec) -> Self::Output {\n\n let n = self.0.len();\n\n assert_eq!(n, rhs.0.len());\n\n let res: Vec<_> = (0..n).map(|i| self.0[i] + rhs.0[i]).collect();\n\n DeltaVec(res)\n\n }\n\n}\n\n\n\nimpl<'a> AddAssign<&'a DeltaVec> for DeltaVec {\n\n fn add_assign(&mut self, rhs: &'a DeltaVec) {\n\n *self = &*self + rhs;\n\n }\n\n}\n", "file_path": "src/coordinates.rs", "rank": 45, "score": 4.584280245192236 }, { "content": " let mut width: Option<usize> = None;\n\n\n\n for line in s.lines() {\n\n if line.is_empty() {\n\n return Err(\"Empty line\");\n\n }\n\n\n\n let mut cur_width = 0usize;\n\n for c in line.chars() {\n\n let item: T = c.try_into().or(Err(\"Failed parsing char\"))?;\n\n grid.push(item);\n\n cur_width += 1;\n\n }\n\n\n\n if width.is_some() && width.unwrap() != cur_width {\n\n return Err(\"Non-uniform line length\");\n\n }\n\n\n\n width = Some(cur_width);\n\n }\n", "file_path": "src/grid.rs", "rank": 46, "score": 4.280117930854635 }, { "content": "use crate::digits::digits;\n\nuse std::cell::RefCell;\n\nuse std::collections::VecDeque;\n\nuse std::rc::Rc;\n\n\n\n#[derive(Debug)]\n\npub enum IntcodeError {\n\n InvalidOpcodeOperation,\n\n NegativeOpcode,\n\n InvalidAddressingMode,\n\n NegativeAddress,\n\n InvalidStoreAddressingMode,\n\n DidNotRunToCompletion,\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum StopStatus {\n\n Halted,\n\n BlockedOnInput,\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/intcode.rs", "rank": 47, "score": 4.228034005327409 }, { "content": "\n\n let width = width.ok_or(\"No lines\")?;\n\n Ok(Grid { grid, width })\n\n }\n\n}\n\n\n\nimpl<T> Display for Grid<T>\n\nwhere\n\n T: Clone + Into<char>,\n\n{\n\n fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n for y in 0..self.height() as isize {\n\n for x in 0..self.width() as isize {\n\n let e = self.get(Coord(x, y)).unwrap();\n\n fmt.write_char(e.clone().into())?;\n\n }\n\n fmt.write_char('\\n')?;\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/grid.rs", "rank": 48, "score": 3.752852344434581 }, { "content": " digits.extend(vec![0; 5 - digits.len()]);\n\n\n\n let (operation, operand_count) = match 10 * digits[1] + digits[0] {\n\n 1 => (Operation::Add, 3),\n\n 2 => (Operation::Multiply, 3),\n\n 3 => (Operation::Input, 1),\n\n 4 => (Operation::Output, 1),\n\n 5 => (Operation::JumpTrue, 2),\n\n 6 => (Operation::JumpFalse, 2),\n\n 7 => (Operation::LessThan, 3),\n\n 8 => (Operation::Equals, 3),\n\n 9 => (Operation::AdjustBasePointer, 1),\n\n 99 => (Operation::Halt, 0),\n\n _ => return Err(IntcodeError::InvalidOpcodeOperation),\n\n };\n\n\n\n let mut operands = Vec::<Operand>::new();\n\n for i in 0..operand_count {\n\n let mode = parse_addressing_mode(digits[2 + i])?;\n\n let value = self.read_pc()?;\n", "file_path": "src/intcode.rs", "rank": 49, "score": 3.632918366731699 }, { "content": "\n\nimpl From<Direction8> for Delta {\n\n fn from(d: Direction8) -> Self {\n\n match d {\n\n Direction8::North => Delta(0, -1),\n\n Direction8::NorthEast => Delta(1, -1),\n\n Direction8::East => Delta(1, 0),\n\n Direction8::SouthEast => Delta(1, 1),\n\n Direction8::South => Delta(0, 1),\n\n Direction8::SouthWest => Delta(-1, 1),\n\n Direction8::West => Delta(-1, 0),\n\n Direction8::NorthWest => Delta(-1, -1),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/coordinates.rs", "rank": 50, "score": 3.509585613802387 }, { "content": " self.store(&opcode.operands[2], value)?;\n\n }\n\n Operation::Multiply => {\n\n let value = self.load(&opcode.operands[0])? * self.load(&opcode.operands[1])?;\n\n self.store(&opcode.operands[2], value)?;\n\n }\n\n Operation::Input => {\n\n let input = self.input.borrow_mut().pop_front();\n\n match input {\n\n Some(value) => self.store(&opcode.operands[0], value)?,\n\n None => {\n\n self.pc = start_pc;\n\n return Ok(Some(StopStatus::BlockedOnInput));\n\n }\n\n };\n\n }\n\n Operation::Output => {\n\n let value = self.load(&opcode.operands[0])?;\n\n self.output.borrow_mut().push_back(value);\n\n }\n", "file_path": "src/intcode.rs", "rank": 51, "score": 3.284004187769971 }, { "content": "\n\n pub fn get(&self, c: Coord) -> Option<&T> {\n\n let idx = self.index_for(c)?;\n\n self.grid.get(idx)\n\n }\n\n\n\n pub fn get_mut(&mut self, c: Coord) -> Option<&mut T> {\n\n let idx = self.index_for(c)?;\n\n self.grid.get_mut(idx)\n\n }\n\n\n\n pub fn flip_inplace(&mut self, axis: Axis) {\n\n let w = self.width() as isize;\n\n let h = self.height() as isize;\n\n match axis {\n\n Axis::Horizontal => {\n\n for y in 0..h / 2 {\n\n for x in 0..w {\n\n let i1 = self.index_for(Coord(x, y)).unwrap();\n\n let i2 = self.index_for(Coord(x, h - 1 - y)).unwrap();\n", "file_path": "src/grid.rs", "rank": 52, "score": 3.1911693120803633 }, { "content": "\n\n fn write_addr(&mut self, addr: isize, value: isize) -> IntcodeResult<()> {\n\n let addr = self.verify_addr(addr)?;\n\n self.tape[addr] = value;\n\n Ok(())\n\n }\n\n\n\n fn read_pc(&mut self) -> IntcodeResult<isize> {\n\n let value = self.read_addr(self.pc)?;\n\n self.pc += 1;\n\n Ok(value)\n\n }\n\n\n\n fn read_opcode(&mut self) -> IntcodeResult<Opcode> {\n\n let opcode = self.read_pc()?;\n\n if opcode < 0 {\n\n return Err(IntcodeError::NegativeOpcode);\n\n }\n\n\n\n let mut digits: Vec<_> = digits(opcode as usize, 10).collect();\n", "file_path": "src/intcode.rs", "rank": 53, "score": 2.8193152118319946 }, { "content": "use reqwest::blocking::Client;\n\nuse reqwest::header::{HeaderMap, COOKIE};\n\nuse std::error::Error;\n\nuse std::fs::File;\n\nuse std::io::prelude::*;\n\nuse std::path::PathBuf;\n\n\n\nconst CACHE_DIR: &str = \"input_cache\";\n\nconst SESSION_FILE_PATH: &[&str] = &[\"..\", \"..\", \"session.txt\"];\n\n\n", "file_path": "src/aoc_input.rs", "rank": 54, "score": 2.776517134148186 }, { "content": " self.grid.swap(i1, i2);\n\n }\n\n }\n\n }\n\n Axis::Vertical => {\n\n for x in 0..w / 2 {\n\n for y in 0..h {\n\n let i1 = self.index_for(Coord(x, y)).unwrap();\n\n let i2 = self.index_for(Coord(w - 1 - x, y)).unwrap();\n\n self.grid.swap(i1, i2);\n\n }\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<T> Grid<T>\n\nwhere\n\n T: std::cmp::PartialEq,\n", "file_path": "src/grid.rs", "rank": 55, "score": 2.646903793636519 }, { "content": " operands.push(Operand { mode, value });\n\n }\n\n\n\n Ok(Opcode {\n\n operation,\n\n operands,\n\n })\n\n }\n\n\n\n fn load(&mut self, op: &Operand) -> IntcodeResult<isize> {\n\n match op.mode {\n\n AddressingMode::AbsoluteAddress => Ok(self.read_addr(op.value)?),\n\n AddressingMode::Immediate => Ok(op.value),\n\n AddressingMode::BasePointerRelative => Ok(self.read_addr(self.bp + op.value)?),\n\n }\n\n }\n\n\n\n fn store(&mut self, op: &Operand, value: isize) -> IntcodeResult<()> {\n\n match op.mode {\n\n AddressingMode::AbsoluteAddress => Ok(self.write_addr(op.value, value)?),\n", "file_path": "src/intcode.rs", "rank": 56, "score": 2.5218095175661497 }, { "content": "use num_integer::Integer;\n\n\n", "file_path": "src/num.rs", "rank": 57, "score": 2.520612348795983 }, { "content": "use std::mem::MaybeUninit;\n\n\n", "file_path": "src/parse.rs", "rank": 58, "score": 2.441799427940077 }, { "content": "use num_traits::{sign::Unsigned, AsPrimitive, One, Zero};\n\nuse std::ops::{DivAssign, MulAssign, RemAssign};\n\n\n", "file_path": "src/digits.rs", "rank": 59, "score": 2.412630603248221 }, { "content": " AddressingMode::BasePointerRelative => Ok(self.write_addr(self.bp + op.value, value)?),\n\n AddressingMode::Immediate => Err(IntcodeError::InvalidStoreAddressingMode),\n\n }\n\n }\n\n\n\n fn jump_conditional(&mut self, condition: bool, target: isize) -> IntcodeResult<()> {\n\n if condition {\n\n self.verify_addr(target)?;\n\n self.pc = target;\n\n }\n\n Ok(())\n\n }\n\n\n\n fn tick(&mut self) -> IntcodeResult<Option<StopStatus>> {\n\n let start_pc = self.pc;\n\n let opcode = self.read_opcode()?;\n\n\n\n match opcode.operation {\n\n Operation::Add => {\n\n let value = self.load(&opcode.operands[0])? + self.load(&opcode.operands[1])?;\n", "file_path": "src/intcode.rs", "rank": 60, "score": 2.058047099974774 } ]
Rust
src/cli/commands/color_commands.rs
dandycheung/pastel
3719824a56fb9eb92eb960068e513b95486756a7
use crate::colorspace::get_mixing_function; use crate::commands::prelude::*; use pastel::ColorblindnessType; use pastel::Fraction; fn clamp(lower: f64, upper: f64, x: f64) -> f64 { f64::max(f64::min(upper, x), lower) } macro_rules! color_command { ($cmd_name:ident, $config:ident, $matches:ident, $color:ident, $body:block) => { pub struct $cmd_name; impl ColorCommand for $cmd_name { fn run( &self, out: &mut Output, $matches: &ArgMatches, $config: &Config, $color: &Color, ) -> Result<()> { let output = $body; out.show_color($config, &output) } } }; } color_command!(SaturateCommand, _config, matches, color, { let amount = number_arg(matches, "amount")?; color.saturate(amount) }); color_command!(DesaturateCommand, _config, matches, color, { let amount = number_arg(matches, "amount")?; color.desaturate(amount) }); color_command!(LightenCommand, _config, matches, color, { let amount = number_arg(matches, "amount")?; color.lighten(amount) }); color_command!(DarkenCommand, _config, matches, color, { let amount = number_arg(matches, "amount")?; color.darken(amount) }); color_command!(RotateCommand, _config, matches, color, { let degrees = number_arg(matches, "degrees")?; color.rotate_hue(degrees) }); color_command!(ComplementCommand, _config, _matches, color, { color.complementary() }); color_command!(ToGrayCommand, _config, _matches, color, { color.to_gray() }); color_command!(TextColorCommand, _config, _matches, color, { color.text_color() }); color_command!(MixCommand, config, matches, color, { let mut print_spectrum = PrintSpectrum::Yes; let base = ColorArgIterator::from_color_arg( config, matches.value_of("base").expect("required argument"), &mut print_spectrum, )?; let fraction = Fraction::from(1.0 - number_arg(matches, "fraction")?); let mix = get_mixing_function(matches.value_of("colorspace").expect("required argument")); mix(&base, color, fraction) }); color_command!(ColorblindCommand, config, matches, color, { let cb_ty = matches.value_of("type").expect("required argument"); let cb_ty = cb_ty.to_lowercase(); let cb_ty = match cb_ty.as_ref() { "prot" => ColorblindnessType::Protanopia, "deuter" => ColorblindnessType::Deuteranopia, "trit" => ColorblindnessType::Tritanopia, &_ => { unreachable!("Unknown property"); } }; color.simulate_colorblindness(cb_ty) }); color_command!(SetCommand, config, matches, color, { let property = matches.value_of("property").expect("required argument"); let property = property.to_lowercase(); let property = property.as_ref(); let value = number_arg(matches, "value")?; match property { "red" | "green" | "blue" => { let mut rgba = color.to_rgba(); let value = clamp(0.0, 255.0, value) as u8; match property { "red" => { rgba.r = value; } "green" => { rgba.g = value; } "blue" => { rgba.b = value; } _ => unreachable!(), } Color::from_rgba(rgba.r, rgba.g, rgba.b, rgba.alpha) } "hsl-hue" | "hsl-saturation" | "hsl-lightness" => { let mut hsla = color.to_hsla(); match property { "hsl-hue" => { hsla.h = value; } "hsl-saturation" => { hsla.s = value; } "hsl-lightness" => { hsla.l = value; } _ => unreachable!(), } Color::from_hsla(hsla.h, hsla.s, hsla.l, hsla.alpha) } "lightness" | "lab-a" | "lab-b" => { let mut lab = color.to_lab(); match property { "lightness" => { lab.l = value; } "lab-a" => { lab.a = value; } "lab-b" => { lab.b = value; } _ => unreachable!(), } Color::from_lab(lab.l, lab.a, lab.b, lab.alpha) } "hue" | "chroma" => { let mut lch = color.to_lch(); match property { "hue" => { lch.h = value; } "chroma" => { lch.c = value; } _ => unreachable!(), } Color::from_lch(lch.l, lch.c, lch.h, lch.alpha) } &_ => { unreachable!("Unknown property"); } } });
use crate::colorspace::get_mixing_function; use crate::commands::prelude::*; use pastel::ColorblindnessType; use pastel::Fraction; fn clamp(lower: f64, upper: f64, x: f64) -> f64 { f64::max(f64::min(upper, x), lower) } macro_rules! color_command { ($cmd_name:ident, $config:ident, $matches:ident, $color:ident, $body:block) => { pub struct $cmd_name; impl ColorCommand for $cmd_name { fn run( &self, out: &mut Output, $matches: &ArgMatches, $config: &Config, $color: &Color,
match property { "hsl-hue" => { hsla.h = value; } "hsl-saturation" => { hsla.s = value; } "hsl-lightness" => { hsla.l = value; } _ => unreachable!(), } Color::from_hsla(hsla.h, hsla.s, hsla.l, hsla.alpha) } "lightness" | "lab-a" | "lab-b" => { let mut lab = color.to_lab(); match property { "lightness" => { lab.l = value; } "lab-a" => { lab.a = value; } "lab-b" => { lab.b = value; } _ => unreachable!(), } Color::from_lab(lab.l, lab.a, lab.b, lab.alpha) } "hue" | "chroma" => { let mut lch = color.to_lch(); match property { "hue" => { lch.h = value; } "chroma" => { lch.c = value; } _ => unreachable!(), } Color::from_lch(lch.l, lch.c, lch.h, lch.alpha) } &_ => { unreachable!("Unknown property"); } } });
) -> Result<()> { let output = $body; out.show_color($config, &output) } } }; } color_command!(SaturateCommand, _config, matches, color, { let amount = number_arg(matches, "amount")?; color.saturate(amount) }); color_command!(DesaturateCommand, _config, matches, color, { let amount = number_arg(matches, "amount")?; color.desaturate(amount) }); color_command!(LightenCommand, _config, matches, color, { let amount = number_arg(matches, "amount")?; color.lighten(amount) }); color_command!(DarkenCommand, _config, matches, color, { let amount = number_arg(matches, "amount")?; color.darken(amount) }); color_command!(RotateCommand, _config, matches, color, { let degrees = number_arg(matches, "degrees")?; color.rotate_hue(degrees) }); color_command!(ComplementCommand, _config, _matches, color, { color.complementary() }); color_command!(ToGrayCommand, _config, _matches, color, { color.to_gray() }); color_command!(TextColorCommand, _config, _matches, color, { color.text_color() }); color_command!(MixCommand, config, matches, color, { let mut print_spectrum = PrintSpectrum::Yes; let base = ColorArgIterator::from_color_arg( config, matches.value_of("base").expect("required argument"), &mut print_spectrum, )?; let fraction = Fraction::from(1.0 - number_arg(matches, "fraction")?); let mix = get_mixing_function(matches.value_of("colorspace").expect("required argument")); mix(&base, color, fraction) }); color_command!(ColorblindCommand, config, matches, color, { let cb_ty = matches.value_of("type").expect("required argument"); let cb_ty = cb_ty.to_lowercase(); let cb_ty = match cb_ty.as_ref() { "prot" => ColorblindnessType::Protanopia, "deuter" => ColorblindnessType::Deuteranopia, "trit" => ColorblindnessType::Tritanopia, &_ => { unreachable!("Unknown property"); } }; color.simulate_colorblindness(cb_ty) }); color_command!(SetCommand, config, matches, color, { let property = matches.value_of("property").expect("required argument"); let property = property.to_lowercase(); let property = property.as_ref(); let value = number_arg(matches, "value")?; match property { "red" | "green" | "blue" => { let mut rgba = color.to_rgba(); let value = clamp(0.0, 255.0, value) as u8; match property { "red" => { rgba.r = value; } "green" => { rgba.g = value; } "blue" => { rgba.b = value; } _ => unreachable!(), } Color::from_rgba(rgba.r, rgba.g, rgba.b, rgba.alpha) } "hsl-hue" | "hsl-saturation" | "hsl-lightness" => { let mut hsla = color.to_hsla();
random
[ { "content": "/// Trim a number such that it fits into the range [lower, upper].\n\npub fn clamp(lower: Scalar, upper: Scalar, x: Scalar) -> Scalar {\n\n Scalar::max(Scalar::min(upper, x), lower)\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct Fraction {\n\n f: Scalar,\n\n}\n\n\n\nimpl Fraction {\n\n pub fn from(s: Scalar) -> Self {\n\n Fraction {\n\n f: clamp(0.0, 1.0, s),\n\n }\n\n }\n\n\n\n pub fn value(self) -> Scalar {\n\n self.f\n\n }\n\n}\n\n\n", "file_path": "src/helper.rs", "rank": 1, "score": 171202.1231389646 }, { "content": "fn print_board(out: &mut Output, config: &Config, mode: Mode) -> Result<()> {\n\n // These colors have been chosen/computed such that the perceived color difference (CIE delta-E\n\n // 2000) to the closest ANSI 8-bit color is maximal.\n\n let c1 = Color::from_rgb(73, 39, 50);\n\n let c2 = Color::from_rgb(16, 51, 30);\n\n let c3 = Color::from_rgb(29, 54, 90);\n\n\n\n let width = config.colorcheck_width;\n\n\n\n let mut canvas = Canvas::new(\n\n width + 2 * config.padding,\n\n 3 * width + 3 * config.padding,\n\n Brush::from_mode(Some(mode)),\n\n );\n\n\n\n canvas.draw_rect(config.padding, config.padding, width, width, &c1);\n\n\n\n canvas.draw_rect(\n\n config.padding,\n\n 2 * config.padding + width,\n", "file_path": "src/cli/commands/colorcheck.rs", "rank": 2, "score": 164284.16409518497 }, { "content": "/// Re-arrange the sequence of colors such that the minimal difference between a given color and\n\n/// any of its predecessors is maximized.\n\n///\n\n/// Note: this is only a heuristic and will not yield optimal results (especially at the end of\n\n/// the sequence).\n\n///\n\n/// See: <https://en.wikipedia.org/wiki/Farthest-first_traversal>\n\npub fn rearrange_sequence(colors: &mut Vec<Color>, metric: DistanceMetric) {\n\n let distance = |c1: &Color, c2: &Color| match metric {\n\n DistanceMetric::CIE76 => c1.distance_delta_e_cie76(c2),\n\n DistanceMetric::CIEDE2000 => c1.distance_delta_e_ciede2000(c2),\n\n };\n\n\n\n // vector where the i-th element contains the minimum distance to the colors from 0 to i-1.\n\n let mut min_distances = vec![i32::max_value(); colors.len()];\n\n\n\n for i in 1..colors.len() {\n\n let mut max_i = colors.len();\n\n let mut max_d = i32::min_value();\n\n\n\n for j in i..colors.len() {\n\n min_distances[j] =\n\n min_distances[j].min((distance(&colors[j], &colors[i - 1]) * 1000.0) as i32);\n\n\n\n if min_distances[j] > max_d {\n\n max_i = j;\n\n max_d = min_distances[j];\n\n }\n\n }\n\n\n\n colors.swap(i, max_i);\n\n min_distances.swap(i, max_i);\n\n }\n\n}\n\n\n", "file_path": "src/distinct.rs", "rank": 3, "score": 160470.71012037623 }, { "content": "/// Print a color spectrum to STDERR.\n\npub fn print_colorspectrum(config: &Config) -> Result<()> {\n\n let width = config.colorpicker_width;\n\n\n\n let mut canvas = Canvas::new(\n\n width + 2 * config.padding,\n\n width + 2 * config.padding,\n\n Brush::from_environment(Stream::Stderr),\n\n );\n\n canvas.draw_rect(\n\n config.padding,\n\n config.padding,\n\n width + 2,\n\n width + 2,\n\n &Color::white(),\n\n );\n\n\n\n for y in 0..width {\n\n for x in 0..width {\n\n let rx = (x as f64) / (width as f64);\n\n let ry = (y as f64) / (width as f64);\n", "file_path": "src/cli/colorpicker.rs", "rank": 4, "score": 145547.0687778088 }, { "content": "/// Returns a list of named colors, sorted by the perceived distance to the given color\n\npub fn similar_colors(color: &Color) -> Vec<&NamedColor> {\n\n let mut colors: Vec<&NamedColor> = NAMED_COLORS.iter().collect();\n\n colors.sort_by_key(|nc| (1000.0 * nc.color.distance_delta_e_ciede2000(color)) as i32);\n\n colors.dedup_by(|n1, n2| n1.color == n2.color);\n\n colors\n\n}\n", "file_path": "src/cli/utility.rs", "rank": 5, "score": 144192.99905087016 }, { "content": "pub fn number_arg(matches: &ArgMatches, name: &str) -> Result<f64> {\n\n let value_str = matches.value_of(name).expect(\"required argument\");\n\n value_str\n\n .parse::<f64>()\n\n .map_err(|_| PastelError::CouldNotParseNumber(value_str.into()))\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum PrintSpectrum {\n\n Yes,\n\n No,\n\n}\n\n\n\npub enum ColorArgIterator<'a> {\n\n FromPositionalArguments(&'a Config<'a>, Values<'a>, PrintSpectrum),\n\n FromStdin,\n\n}\n\n\n\nimpl<'a> ColorArgIterator<'a> {\n\n pub fn from_args(config: &'a Config, args: Option<Values<'a>>) -> Result<Self> {\n", "file_path": "src/cli/commands/io.rs", "rank": 6, "score": 137844.99762734436 }, { "content": "fn get_h_prime_fn(x: f64, y: f64) -> f64 {\n\n if x == 0.0 && y == 0.0 {\n\n return 0.0;\n\n }\n\n\n\n let mut hue_angle = radians_to_degrees(x.atan2(y));\n\n\n\n if hue_angle < 0.0 {\n\n hue_angle += 360.0;\n\n }\n\n\n\n hue_angle\n\n}\n\n\n", "file_path": "src/delta_e.rs", "rank": 7, "score": 136470.45271845092 }, { "content": "pub fn parse_color(input: &str) -> Option<Color> {\n\n alt((\n\n all_consuming(parse_hex),\n\n all_consuming(parse_numeric_rgb),\n\n all_consuming(parse_percentage_rgb),\n\n all_consuming(parse_hsl),\n\n all_consuming(parse_gray),\n\n all_consuming(parse_lab),\n\n all_consuming(parse_lch),\n\n all_consuming(parse_named),\n\n ))(input.trim())\n\n .ok()\n\n .map(|(_, c)| c)\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 8, "score": 133311.25902068208 }, { "content": "fn blue_red_yellow(f: f64) -> Color {\n\n let blue = Color::from_rgb(0, 0, 120);\n\n let red = Color::from_rgb(224, 0, 119);\n\n let yellow = Color::from_rgb(255, 255, 0);\n\n\n\n if f < 0.5 {\n\n blue.mix::<HSLA>(&red, Fraction::from(2.0 * f))\n\n } else {\n\n red.mix::<HSLA>(&yellow, Fraction::from(2.0 * (f - 0.5)))\n\n }\n\n}\n\n\n", "file_path": "src/cli/commands/distinct.rs", "rank": 9, "score": 130542.23019347189 }, { "content": "pub fn distinct_colors(\n\n count: usize,\n\n distance_metric: DistanceMetric,\n\n fixed_colors: Vec<Color>,\n\n callback: &mut dyn FnMut(&IterationStatistics),\n\n) -> (Vec<Color>, DistanceResult) {\n\n assert!(count > 1);\n\n assert!(fixed_colors.len() <= count);\n\n\n\n let num_fixed_colors = fixed_colors.len();\n\n let mut colors = fixed_colors;\n\n\n\n for _ in num_fixed_colors..count {\n\n colors.push(random::strategies::UniformRGB.generate());\n\n }\n\n\n\n let mut annealing = SimulatedAnnealing::new(\n\n &colors,\n\n SimulationParameters {\n\n initial_temperature: 3.0,\n", "file_path": "src/distinct.rs", "rank": 10, "score": 128756.39556364014 }, { "content": "pub fn key_function(sort_order: &str, color: &Color) -> i32 {\n\n match sort_order {\n\n \"brightness\" => (color.brightness() * 1000.0) as i32,\n\n \"luminance\" => (color.luminance() * 1000.0) as i32,\n\n \"hue\" => (color.to_lch().h * 1000.0) as i32,\n\n \"chroma\" => (color.to_lch().c * 1000.0) as i32,\n\n \"random\" => random(),\n\n _ => unreachable!(\"Unknown sort order\"),\n\n }\n\n}\n\n\n\nimpl GenericCommand for SortCommand {\n\n fn run(&self, out: &mut Output, matches: &ArgMatches, config: &Config) -> Result<()> {\n\n let sort_order = matches.value_of(\"sort-order\").expect(\"required argument\");\n\n\n\n let mut colors: Vec<Color> = vec![];\n\n for color in ColorArgIterator::from_args(config, matches.values_of(\"color\"))? {\n\n colors.push(color?);\n\n }\n\n\n", "file_path": "src/cli/commands/sort.rs", "rank": 11, "score": 127693.98868103817 }, { "content": "pub fn cie76(c1: &Lab, c2: &Lab) -> f64 {\n\n ((c1.l - c2.l).powi(2) + (c1.a - c2.a).powi(2) + (c1.b - c2.b).powi(2)).sqrt()\n\n}\n\n\n", "file_path": "src/delta_e.rs", "rank": 12, "score": 120518.11293140583 }, { "content": "pub fn ciede2000(color1: &Lab, color2: &Lab) -> f64 {\n\n let ksub_l = 1.0;\n\n let ksub_c = 1.0;\n\n let ksub_h = 1.0;\n\n\n\n let delta_l_prime = color2.l - color1.l;\n\n\n\n let l_bar = (color1.l + color2.l) / 2.0;\n\n\n\n let c1 = (color1.a.powi(2) + color1.b.powi(2)).sqrt();\n\n let c2 = (color2.a.powi(2) + color2.b.powi(2)).sqrt();\n\n\n\n let c_bar = (c1 + c2) / 2.0;\n\n\n\n let a_prime_1 = color1.a\n\n + (color1.a / 2.0) * (1.0 - (c_bar.powi(7) / (c_bar.powi(7) + 25f64.powi(7))).sqrt());\n\n let a_prime_2 = color2.a\n\n + (color2.a / 2.0) * (1.0 - (c_bar.powi(7) / (c_bar.powi(7) + 25f64.powi(7))).sqrt());\n\n\n\n let c_prime_1 = (a_prime_1.powi(2) + color1.b.powi(2)).sqrt();\n", "file_path": "src/delta_e.rs", "rank": 13, "score": 120518.11293140583 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n c.bench_function(\"parse_hex\", |b| {\n\n b.iter(|| {\n\n parse_color(\"#ff0077\");\n\n })\n\n });\n\n c.bench_function(\"parse_hex_short\", |b| {\n\n b.iter(|| {\n\n parse_color(\"#f07\");\n\n })\n\n });\n\n c.bench_function(\"parse_rgb\", |b| {\n\n b.iter(|| {\n\n parse_color(\"rgb(255, 125, 0)\");\n\n })\n\n });\n\n c.bench_function(\"parse_hsl\", |b| b.iter(|| parse_color(\"hsl(280,20%,50%)\")));\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "benches/parse_color.rs", "rank": 14, "score": 117248.22021137728 }, { "content": "fn rgba(r: u8, g: u8, b: u8, a: f64) -> Color {\n\n Color::from_rgba(r, g, b, a)\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 15, "score": 117233.53559988114 }, { "content": "pub trait ColorCommand {\n\n fn run(\n\n &self,\n\n out: &mut Output,\n\n matches: &ArgMatches,\n\n config: &Config,\n\n color: &Color,\n\n ) -> Result<()>;\n\n}\n", "file_path": "src/cli/commands/traits.rs", "rank": 16, "score": 105851.62450482087 }, { "content": "fn degrees_to_radians(degrees: f64) -> f64 {\n\n degrees * (f64::consts::PI / 180.0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{ciede2000, Lab};\n\n\n\n fn round(val: f64) -> f64 {\n\n let rounded = val * 10000_f64;\n\n rounded.round() / 10000_f64\n\n }\n\n\n\n fn assert_delta_e(expected: f64, lab1: &[f64; 3], lab2: &[f64; 3]) {\n\n let color1 = Lab {\n\n l: lab1[0],\n\n a: lab1[1],\n\n b: lab1[2],\n\n alpha: 1.0,\n\n };\n", "file_path": "src/delta_e.rs", "rank": 17, "score": 104960.4045008853 }, { "content": "fn radians_to_degrees(radians: f64) -> f64 {\n\n radians * (180.0 / f64::consts::PI)\n\n}\n\n\n", "file_path": "src/delta_e.rs", "rank": 18, "score": 104960.4045008853 }, { "content": "fn get_delta_h_prime(c1: f64, c2: f64, h_prime_1: f64, h_prime_2: f64) -> f64 {\n\n if 0.0 == c1 || 0.0 == c2 {\n\n return 0.0;\n\n }\n\n\n\n if (h_prime_1 - h_prime_2).abs() <= 180.0 {\n\n return h_prime_2 - h_prime_1;\n\n }\n\n\n\n if h_prime_2 <= h_prime_1 {\n\n h_prime_2 - h_prime_1 + 360.0\n\n } else {\n\n h_prime_2 - h_prime_1 - 360.0\n\n }\n\n}\n\n\n", "file_path": "src/delta_e.rs", "rank": 19, "score": 103702.26892221288 }, { "content": "fn get_upcase_h_bar_prime(h_prime_1: f64, h_prime_2: f64) -> f64 {\n\n if (h_prime_1 - h_prime_2).abs() > 180.0 {\n\n return (h_prime_1 + h_prime_2 + 360.0) / 2.0;\n\n }\n\n\n\n (h_prime_1 + h_prime_2) / 2.0\n\n}\n\n\n", "file_path": "src/delta_e.rs", "rank": 20, "score": 102679.44193213448 }, { "content": "fn get_r_sub_t(c_bar_prime: f64, upcase_h_bar_prime: f64) -> f64 {\n\n -2.0 * (c_bar_prime.powi(7) / (c_bar_prime.powi(7) + 25f64.powi(7))).sqrt()\n\n * (degrees_to_radians(60.0 * (-(((upcase_h_bar_prime - 275.0) / 25.0).powi(2))).exp()))\n\n .sin()\n\n}\n\n\n", "file_path": "src/delta_e.rs", "rank": 21, "score": 101114.70339785097 }, { "content": "fn get_upcase_t(upcase_h_bar_prime: f64) -> f64 {\n\n 1.0 - 0.17 * (degrees_to_radians(upcase_h_bar_prime - 30.0)).cos()\n\n + 0.24 * (degrees_to_radians(2.0 * upcase_h_bar_prime)).cos()\n\n + 0.32 * (degrees_to_radians(3.0 * upcase_h_bar_prime + 6.0)).cos()\n\n - 0.20 * (degrees_to_radians(4.0 * upcase_h_bar_prime - 63.0)).cos()\n\n}\n\n\n", "file_path": "src/delta_e.rs", "rank": 22, "score": 100848.32754546811 }, { "content": "/// Like `%`, but always positive.\n\npub fn mod_positive(x: Scalar, y: Scalar) -> Scalar {\n\n (x % y + y) % y\n\n}\n\n\n", "file_path": "src/helper.rs", "rank": 23, "score": 97292.4690851545 }, { "content": "/// Run an external color picker tool (e.g. gpick or xcolor) and get the output as a string.\n\npub fn run_external_colorpicker(picker: Option<&str>) -> Result<String> {\n\n for tool in COLOR_PICKER_TOOLS\n\n .iter()\n\n .filter(|t| picker.map_or(true, |p| t.command.eq_ignore_ascii_case(p)))\n\n {\n\n let result = Command::new(tool.command).args(tool.version_args).output();\n\n\n\n let tool_is_available = match result {\n\n Ok(ref output) => {\n\n output.stdout.starts_with(tool.version_output_starts_with)\n\n || output.stderr.starts_with(tool.version_output_starts_with)\n\n }\n\n _ => false,\n\n };\n\n\n\n if tool_is_available {\n\n let result = Command::new(tool.command).args(tool.args).output()?;\n\n if !result.status.success() {\n\n return Err(PastelError::ColorPickerExecutionError(\n\n tool.command.to_string(),\n", "file_path": "src/cli/colorpicker.rs", "rank": 24, "score": 95882.49734814632 }, { "content": "pub fn get_mixing_function(\n\n colorspace_name: &str,\n\n) -> Box<dyn Fn(&Color, &Color, Fraction) -> Color> {\n\n match colorspace_name.to_lowercase().as_ref() {\n\n \"rgb\" => Box::new(|c1: &Color, c2: &Color, f: Fraction| c1.mix::<RGBA<f64>>(c2, f)),\n\n \"hsl\" => Box::new(|c1: &Color, c2: &Color, f: Fraction| c1.mix::<HSLA>(c2, f)),\n\n \"lab\" => Box::new(|c1: &Color, c2: &Color, f: Fraction| c1.mix::<Lab>(c2, f)),\n\n \"lch\" => Box::new(|c1: &Color, c2: &Color, f: Fraction| c1.mix::<LCh>(c2, f)),\n\n _ => unreachable!(\"Unknown color space\"),\n\n }\n\n}\n", "file_path": "src/cli/colorspace.rs", "rank": 25, "score": 93143.80776775708 }, { "content": "#[derive(Debug, Clone)]\n\nstruct ColorStop {\n\n color: Color,\n\n position: Fraction,\n\n}\n\n\n\n/// The representation of a color scale.\n\n/// The first `ColorStop` (position 0.0) defines the left end color.\n\n/// The last `ColorStop` (position 1.0) defines the right end color.\n\n#[derive(Debug, Clone)]\n\npub struct ColorScale {\n\n color_stops: Vec<ColorStop>,\n\n}\n\n\n\nimpl ColorScale {\n\n /// Create an empty `ColorScale`.\n\n pub fn empty() -> Self {\n\n Self {\n\n color_stops: Vec::new(),\n\n }\n\n }\n", "file_path": "src/lib.rs", "rank": 26, "score": 92751.43319886108 }, { "content": "fn run() -> Result<ExitCode> {\n\n let app = cli::build_cli();\n\n let global_matches = app.get_matches();\n\n\n\n let interactive_mode = atty::is(Stream::Stdout);\n\n\n\n let color_mode = if global_matches.is_present(\"force-color\") {\n\n Some(ansi::Mode::TrueColor)\n\n } else {\n\n match global_matches\n\n .value_of(\"color-mode\")\n\n .expect(\"required argument\")\n\n {\n\n \"24bit\" => Some(ansi::Mode::TrueColor),\n\n \"8bit\" => Some(ansi::Mode::Ansi8Bit),\n\n \"off\" => None,\n\n \"auto\" => {\n\n if interactive_mode {\n\n let env_color_mode = std::env::var(\"PASTEL_COLOR_MODE\").ok();\n\n match env_color_mode.as_deref() {\n", "file_path": "src/cli/main.rs", "rank": 27, "score": 92069.71802726941 }, { "content": "#[cfg(target_os = \"linux\")]\n\npub fn gdbus_parse_color(raw: String) -> Result<String, &'static str> {\n\n const PARSE_ERROR: &str = \"Unexpected gdbus output format\";\n\n let rgb = raw\n\n .split('(')\n\n .nth(2)\n\n .ok_or(PARSE_ERROR)?\n\n .split(')')\n\n .next()\n\n .ok_or(PARSE_ERROR)?;\n\n let rgb = rgb\n\n .split(',')\n\n .map(|v| v.trim().parse::<f64>())\n\n .collect::<Result<Vec<_>, _>>()\n\n .map_err(|_| PARSE_ERROR)?;\n\n if rgb.len() != 3 {\n\n return Err(PARSE_ERROR);\n\n }\n\n Ok(format!(\n\n \"rgb({}%,{}%,{}%)\",\n\n rgb[0] * 100.,\n\n rgb[1] * 100.,\n\n rgb[2] * 100.\n\n ))\n\n}\n", "file_path": "src/cli/colorpicker_tools.rs", "rank": 28, "score": 89527.85805420799 }, { "content": "#[cfg(windows)]\n\npub fn get_colormode() -> Option<Mode> {\n\n use std::env;\n\n let env_nocolor = env::var_os(\"NO_COLOR\");\n\n match env_nocolor {\n\n Some(_) => None,\n\n // Assume 24bit support on Windows\n\n None => Some(Mode::TrueColor),\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct Brush {\n\n mode: Option<Mode>,\n\n}\n\n\n\nimpl Brush {\n\n pub fn from_mode(mode: Option<Mode>) -> Self {\n\n Brush { mode }\n\n }\n\n\n", "file_path": "src/ansi.rs", "rank": 29, "score": 87579.7248637676 }, { "content": "pub fn build_cli() -> Command<'static> {\n\n let color_arg = Arg::new(\"color\")\n\n .help(\n\n \"Colors can be specified in many different formats, such as #RRGGBB, RRGGBB, \\\n\n #RGB, 'rgb(…, …, …)', 'hsl(…, …, …)', 'gray(…)' or simply by the name of the \\\n\n color. The identifier '-' can be used to read a single color from standard input. \\\n\n Also, the special identifier 'pick' can be used to run an external color picker \\\n\n to choose a color. If no color argument is specified, colors will be read from \\\n\n standard input.\\n\\\n\n Examples (all of these specify the same color):\\\n\n \\n - lightslategray\\\n\n \\n - '#778899'\\\n\n \\n - 778899\\\n\n \\n - 789\\\n\n \\n - 'rgb(119, 136, 153)'\\\n\n \\n - '119,136,153'\\\n\n \\n - 'hsl(210, 14.3%, 53.3%)'\\n\\\n\n Alpha transparency is also supported:\\\n\n \\n - '#77889980'\\\n\n \\n - 'rgba(119, 136, 153, 0.5)'\\\n", "file_path": "src/cli/cli.rs", "rank": 30, "score": 85368.92536475374 }, { "content": "pub trait ColorSpace {\n\n fn from_color(c: &Color) -> Self;\n\n fn into_color(self) -> Color;\n\n\n\n fn mix(&self, other: &Self, fraction: Fraction) -> Self;\n\n}\n", "file_path": "src/colorspace.rs", "rank": 31, "score": 85271.80117534977 }, { "content": "pub trait AnsiColor {\n\n fn from_ansi_8bit(code: u8) -> Self;\n\n fn to_ansi_8bit(&self) -> u8;\n\n\n\n fn to_ansi_sequence(&self, mode: Mode) -> String;\n\n}\n\n\n\nimpl AnsiColor for Color {\n\n /// Create a color from an 8-bit ANSI escape code\n\n ///\n\n /// See: <https://en.wikipedia.org/wiki/ANSI_escape_code>\n\n fn from_ansi_8bit(code: u8) -> Color {\n\n match code {\n\n 0 => Color::black(),\n\n 1 => Color::maroon(),\n\n 2 => Color::green(),\n\n 3 => Color::olive(),\n\n 4 => Color::navy(),\n\n 5 => Color::purple(),\n\n 6 => Color::teal(),\n", "file_path": "src/ansi.rs", "rank": 32, "score": 85271.80117534977 }, { "content": "fn rgb(r: u8, g: u8, b: u8) -> Color {\n\n Color::from_rgb(r, g, b)\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 33, "score": 80569.82541659176 }, { "content": "#[test]\n\nfn color_reads_colors_from_args() {\n\n pastel()\n\n .arg(\"color\")\n\n .arg(\"red\")\n\n .assert()\n\n .success()\n\n .stdout(\"hsl(0,100.0%,50.0%)\\n\");\n\n\n\n pastel()\n\n .arg(\"color\")\n\n .arg(\"red\")\n\n .arg(\"blue\")\n\n .assert()\n\n .success()\n\n .stdout(\"hsl(0,100.0%,50.0%)\\nhsl(240,100.0%,50.0%)\\n\");\n\n\n\n pastel().arg(\"color\").arg(\"no color\").assert().failure();\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 34, "score": 80163.31632197197 }, { "content": "#[test]\n\nfn color_reads_colors_from_stdin() {\n\n pastel()\n\n .arg(\"color\")\n\n .write_stdin(\"red\\nblue\\n\")\n\n .assert()\n\n .success()\n\n .stdout(\"hsl(0,100.0%,50.0%)\\nhsl(240,100.0%,50.0%)\\n\");\n\n\n\n pastel()\n\n .arg(\"color\")\n\n .write_stdin(\"no color\")\n\n .assert()\n\n .failure();\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 35, "score": 80163.31632197197 }, { "content": "fn parse_hex(input: &str) -> IResult<&str, Color> {\n\n let (input, _) = opt_hash_char(input)?;\n\n let (input, hex_chars) = hex_digit1(input)?;\n\n match hex_chars.len() {\n\n // RRGGBB\n\n 6 => {\n\n let r = hex_to_u8_unsafe(&hex_chars[0..2]);\n\n let g = hex_to_u8_unsafe(&hex_chars[2..4]);\n\n let b = hex_to_u8_unsafe(&hex_chars[4..6]);\n\n Ok((input, rgb(r, g, b)))\n\n }\n\n // RGB\n\n 3 => {\n\n let r = hex_to_u8_unsafe(&hex_chars[0..1]);\n\n let g = hex_to_u8_unsafe(&hex_chars[1..2]);\n\n let b = hex_to_u8_unsafe(&hex_chars[2..3]);\n\n let r = r * 16 + r;\n\n let g = g * 16 + g;\n\n let b = b * 16 + b;\n\n Ok((input, rgb(r, g, b)))\n", "file_path": "src/parser.rs", "rank": 36, "score": 79574.71329894724 }, { "content": "fn parse_lch(input: &str) -> IResult<&str, Color> {\n\n let (input, _) = opt(tag_no_case(\"cie\"))(input)?;\n\n let (input, _) = tag_no_case(\"lch(\")(input)?;\n\n let (input, _) = space0(input)?;\n\n let (input, l) = double(input)?;\n\n let (input, _) = parse_separator(input)?;\n\n let (input, c) = double(input)?;\n\n let (input, _) = parse_separator(input)?;\n\n let (input, h) = parse_angle(input)?;\n\n let (input, alpha) = parse_alpha(input)?;\n\n let (input, _) = space0(input)?;\n\n let (input, _) = char(')')(input)?;\n\n\n\n let c = Color::from_lch(l, c, h, alpha);\n\n\n\n Ok((input, c))\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 37, "score": 79574.71329894724 }, { "content": "fn parse_gray(input: &str) -> IResult<&str, Color> {\n\n let (input, _) = tag(\"gray(\")(input)?;\n\n let (input, _) = space0(input)?;\n\n let (input, g) = verify(alt((parse_percentage, double)), |&d| d >= 0.)(input)?;\n\n let (input, _) = space0(input)?;\n\n let (input, _) = char(')')(input)?;\n\n\n\n let c = Color::from_rgb_float(g, g, g);\n\n\n\n Ok((input, c))\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 38, "score": 79574.71329894724 }, { "content": "fn parse_named(input: &str) -> IResult<&str, Color> {\n\n let (input, color) = all_consuming(alpha1)(input)?;\n\n let nc = NAMED_COLORS\n\n .iter()\n\n .find(|nc| color.to_lowercase() == nc.name);\n\n\n\n match nc {\n\n None => Err(Err::Error(nom::error::Error::new(\n\n \"Couldn't find matching named color\",\n\n ErrorKind::Alpha,\n\n ))),\n\n Some(nc) => Ok((input, nc.color.clone())),\n\n }\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 39, "score": 79574.71329894724 }, { "content": "fn parse_hsl(input: &str) -> IResult<&str, Color> {\n\n let (input, _) = alt((tag(\"hsl(\"), tag(\"hsla(\")))(input)?;\n\n let (input, _) = space0(input)?;\n\n let (input, h) = parse_angle(input)?;\n\n let (input, _) = parse_separator(input)?;\n\n let (input, s) = parse_percentage(input)?;\n\n let (input, _) = parse_separator(input)?;\n\n let (input, l) = parse_percentage(input)?;\n\n let (input, alpha) = parse_alpha(input)?;\n\n let (input, _) = space0(input)?;\n\n let (input, _) = char(')')(input)?;\n\n\n\n let c = Color::from_hsla(h, s, l, alpha);\n\n\n\n Ok((input, c))\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 40, "score": 79574.71329894724 }, { "content": "fn parse_lab(input: &str) -> IResult<&str, Color> {\n\n let (input, _) = opt(tag_no_case(\"cie\"))(input)?;\n\n let (input, _) = tag_no_case(\"lab(\")(input)?;\n\n let (input, _) = space0(input)?;\n\n let (input, l) = double(input)?;\n\n let (input, _) = parse_separator(input)?;\n\n let (input, a) = double(input)?;\n\n let (input, _) = parse_separator(input)?;\n\n let (input, b) = double(input)?;\n\n let (input, alpha) = parse_alpha(input)?;\n\n let (input, _) = space0(input)?;\n\n let (input, _) = char(')')(input)?;\n\n\n\n let c = Color::from_lab(l, a, b, alpha);\n\n\n\n Ok((input, c))\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 41, "score": 79574.71329894724 }, { "content": "fn parse_rads(input: &str) -> IResult<&str, f64> {\n\n let (input, rads) = double(input)?;\n\n let (input, _) = tag(\"rad\")(input)?;\n\n Ok((input, rads * 180. / std::f64::consts::PI))\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 42, "score": 79495.51853174574 }, { "content": "fn parse_degrees(input: &str) -> IResult<&str, f64> {\n\n let (input, d) = double(input)?;\n\n let (input, _) = alt((tag(\"°\"), tag(\"deg\"), tag(\"\")))(input)?;\n\n Ok((input, d))\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 43, "score": 79495.51853174574 }, { "content": "fn parse_grads(input: &str) -> IResult<&str, f64> {\n\n let (input, grads) = double(input)?;\n\n let (input, _) = tag(\"grad\")(input)?;\n\n Ok((input, grads * 360. / 400.))\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 44, "score": 79495.51853174574 }, { "content": "fn parse_percentage(input: &str) -> IResult<&str, f64> {\n\n let (input, percent) = double(input)?;\n\n let (input, _) = char('%')(input)?;\n\n Ok((input, percent / 100.))\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 45, "score": 79495.51853174574 }, { "content": "fn parse_angle(input: &str) -> IResult<&str, f64> {\n\n alt((parse_turns, parse_grads, parse_rads, parse_degrees))(input)\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 46, "score": 79495.51853174574 }, { "content": "fn parse_turns(input: &str) -> IResult<&str, f64> {\n\n let (input, turns) = double(input)?;\n\n let (input, _) = tag(\"turn\")(input)?;\n\n Ok((input, turns * 360.))\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 47, "score": 79495.51853174574 }, { "content": "fn parse_numeric_rgb(input: &str) -> IResult<&str, Color> {\n\n let (input, prefixed) = opt(alt((tag(\"rgb(\"), tag(\"rgba(\"))))(input)?;\n\n let is_prefixed = prefixed.is_some();\n\n let (input, _) = space0(input)?;\n\n let (input, r) = double(input)?;\n\n let (input, _) = parse_separator(input)?;\n\n let (input, g) = double(input)?;\n\n let (input, _) = parse_separator(input)?;\n\n let (input, b) = double(input)?;\n\n let (input, alpha) = parse_alpha(input)?;\n\n let (input, _) = space0(input)?;\n\n let (input, _) = cond(is_prefixed, char(')'))(input)?;\n\n\n\n let r = r / 255.;\n\n let g = g / 255.;\n\n let b = b / 255.;\n\n let c = Color::from_rgba_float(r, g, b, alpha);\n\n\n\n Ok((input, c))\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 48, "score": 77689.14677140843 }, { "content": "fn parse_percentage_rgb(input: &str) -> IResult<&str, Color> {\n\n let (input, prefixed) = opt(alt((tag(\"rgb(\"), tag(\"rgba(\"))))(input)?;\n\n let is_prefixed = prefixed.is_some();\n\n let (input, _) = space0(input)?;\n\n let (input, r) = parse_percentage(input)?;\n\n let (input, _) = parse_separator(input)?;\n\n let (input, g) = parse_percentage(input)?;\n\n let (input, _) = parse_separator(input)?;\n\n let (input, b) = parse_percentage(input)?;\n\n let (input, alpha) = parse_alpha(input)?;\n\n let (input, _) = space0(input)?;\n\n let (input, _) = cond(is_prefixed, char(')'))(input)?;\n\n\n\n let c = Color::from_rgba_float(r, g, b, alpha);\n\n\n\n Ok((input, c))\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 49, "score": 77689.14677140843 }, { "content": "fn write_stderr(c: Color, title: &str, message: &str) {\n\n writeln!(\n\n io::stderr(),\n\n \"{}: {}\",\n\n Brush::from_environment(Stream::Stdout).paint(format!(\"[{}]\", title), c),\n\n message\n\n )\n\n .ok();\n\n}\n\n\n", "file_path": "src/cli/main.rs", "rank": 50, "score": 75968.57067224912 }, { "content": "fn parse_alpha<'a>(input: &'a str) -> IResult<&'a str, f64> {\n\n let (input, alpha) = opt(|input: &'a str| {\n\n let (input, _) = parse_separator(input)?;\n\n alt((parse_percentage, double))(input)\n\n })(input)?;\n\n Ok((input, alpha.unwrap_or(1.0)))\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 51, "score": 74938.65598089871 }, { "content": "/// Linearly interpolate between two values.\n\npub fn interpolate(a: Scalar, b: Scalar, fraction: Fraction) -> Scalar {\n\n a + fraction.value() * (b - a)\n\n}\n\n\n", "file_path": "src/helper.rs", "rank": 52, "score": 74682.56555850516 }, { "content": "/// Linearly interpolate between two angles. Always take the shortest path\n\n/// along the circle.\n\npub fn interpolate_angle(a: Scalar, b: Scalar, fraction: Fraction) -> Scalar {\n\n let paths = [(a, b), (a, b + 360.0), (a + 360.0, b)];\n\n\n\n let dist = |&(x, y): &(Scalar, Scalar)| (x - y).abs();\n\n let shortest = paths\n\n .iter()\n\n .min_by(|p1, p2| dist(p1).partial_cmp(&dist(p2)).unwrap_or(Ordering::Less))\n\n .unwrap();\n\n\n\n mod_positive(interpolate(shortest.0, shortest.1, fraction), 360.0)\n\n}\n\n\n\n// `format!`-style format strings only allow specifying a fixed floating\n\n// point precision, e.g. `{:.3}` to print 3 decimal places. This always\n\n// displays trailing zeroes, while web colors generally omit them. For\n\n// example, we'd prefer to print `0.5` as `0.5` instead of `0.500`.\n\n//\n\n// Note that this will round using omitted decimal places:\n\n//\n\n// MaxPrecision::wrap(3, 0.5004) //=> 0.500\n", "file_path": "src/helper.rs", "rank": 53, "score": 72791.21948714311 }, { "content": "fn print_colors(\n\n out: &mut dyn Write,\n\n brush: Brush,\n\n colors: &[Color],\n\n closest_pair: Option<(usize, usize)>,\n\n) -> Result<()> {\n\n for (ci, c) in colors.iter().enumerate() {\n\n let tc = c.text_color();\n\n let mut style = tc.ansi_style();\n\n style.on(c);\n\n\n\n if let Some(pair) = closest_pair {\n\n if pair.0 == ci || pair.1 == ci {\n\n style.bold(true);\n\n style.underline(true);\n\n }\n\n }\n\n\n\n write!(out, \"{} \", brush.paint(c.to_rgb_hex_string(false), style))?;\n\n }\n\n writeln!(out)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cli/commands/distinct.rs", "rank": 54, "score": 72765.5961505107 }, { "content": "fn print_iteration(out: &mut dyn Write, brush: Brush, stats: &IterationStatistics) -> Result<()> {\n\n let result = stats.distance_result;\n\n write!(\n\n out,\n\n \"[{:10.}] D_mean = {:<6.2}; D_min = {:<6.2}; T = {:.6} \",\n\n stats.iteration,\n\n result.mean_closest_distance,\n\n result.min_closest_distance,\n\n stats.temperature\n\n )?;\n\n print_colors(out, brush, &stats.colors, Some(result.closest_pair))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cli/commands/distinct.rs", "rank": 55, "score": 65033.31353576126 }, { "content": "fn named_color(name: &'static str, r: u8, g: u8, b: u8) -> NamedColor {\n\n NamedColor {\n\n name,\n\n color: Color::from_rgb(r, g, b),\n\n }\n\n}\n\n\n\npub static NAMED_COLORS: Lazy<[NamedColor; 148]> = Lazy::new(|| {\n\n [\n\n named_color(\"aliceblue\", 240, 248, 255),\n\n named_color(\"antiquewhite\", 250, 235, 215),\n\n named_color(\"aqua\", 0, 255, 255),\n\n named_color(\"aquamarine\", 127, 255, 212),\n\n named_color(\"azure\", 240, 255, 255),\n\n named_color(\"beige\", 245, 245, 220),\n\n named_color(\"bisque\", 255, 228, 196),\n\n named_color(\"black\", 0, 0, 0),\n\n named_color(\"blanchedalmond\", 255, 235, 205),\n\n named_color(\"blue\", 0, 0, 255),\n\n named_color(\"blueviolet\", 138, 43, 226),\n", "file_path": "src/named.rs", "rank": 56, "score": 63542.84268512387 }, { "content": "type Scalar = f64;\n\n\n\n#[derive(Clone)]\n\npub struct DistanceResult {\n\n /// The closest distance between any two colors\n\n pub min_closest_distance: Scalar,\n\n\n\n /// The average over all nearest-neighbor distances\n\n pub mean_closest_distance: Scalar,\n\n\n\n /// Indices of the colors that were closest to each other\n\n pub closest_pair: (usize, usize),\n\n\n\n /// The closest distance and the index of the nearest neighbor\n\n pub closest_distances: Vec<(Scalar, usize)>,\n\n\n\n pub distance_metric: DistanceMetric,\n\n\n\n /// The number of colors that are fixed and cannot be changed. The actual colors are the first\n\n /// `num_fixed_colors` elements in the `colors` array.\n", "file_path": "src/distinct.rs", "rank": 57, "score": 56527.60538613514 }, { "content": "pub trait RandomizationStrategy {\n\n fn generate(&mut self) -> Color {\n\n self.generate_with(&mut thread_rng())\n\n }\n\n\n\n fn generate_with(&mut self, r: &mut dyn RngCore) -> Color;\n\n}\n\n\n\npub mod strategies {\n\n use super::RandomizationStrategy;\n\n use crate::Color;\n\n\n\n use rand::prelude::*;\n\n\n\n pub struct Vivid;\n\n\n\n impl RandomizationStrategy for Vivid {\n\n fn generate_with(&mut self, rng: &mut dyn RngCore) -> Color {\n\n let hue = rng.gen::<f64>() * 360.0;\n\n let saturation = 0.2 + 0.6 * rng.gen::<f64>();\n", "file_path": "src/random.rs", "rank": 58, "score": 54679.5923686156 }, { "content": "pub trait ToAnsiStyle {\n\n fn ansi_style(&self) -> Style;\n\n}\n\n\n\nimpl ToAnsiStyle for Color {\n\n fn ansi_style(&self) -> Style {\n\n self.clone().into()\n\n }\n\n}\n\n\n", "file_path": "src/ansi.rs", "rank": 59, "score": 54679.5923686156 }, { "content": "pub trait GenericCommand {\n\n fn run(&self, out: &mut Output, matches: &ArgMatches, config: &Config) -> Result<()>;\n\n}\n\n\n", "file_path": "src/cli/commands/traits.rs", "rank": 60, "score": 52131.05378191799 }, { "content": "fn main() {\n\n let var = std::env::var_os(\"SHELL_COMPLETIONS_DIR\").or_else(|| std::env::var_os(\"OUT_DIR\"));\n\n let outdir = match var {\n\n None => return,\n\n Some(outdir) => outdir,\n\n };\n\n fs::create_dir_all(&outdir).unwrap();\n\n\n\n let mut cmd = build_cli();\n\n\n\n for shell in [Shell::Bash, Shell::Zsh, Shell::Fish, Shell::PowerShell] {\n\n generate_to(shell, &mut cmd, crate_name!(), &outdir).unwrap();\n\n }\n\n\n\n println!(\"cargo:rustc-cfg=pastel_normal_build\");\n\n}\n", "file_path": "build.rs", "rank": 61, "score": 49459.682883690984 }, { "content": "#[test]\n\nfn test_interpolate() {\n\n assert_eq!(0.0, interpolate_angle(0.0, 90.0, Fraction::from(0.0)));\n\n assert_eq!(45.0, interpolate_angle(0.0, 90.0, Fraction::from(0.5)));\n\n assert_eq!(90.0, interpolate_angle(0.0, 90.0, Fraction::from(1.0)));\n\n assert_eq!(90.0, interpolate_angle(0.0, 90.0, Fraction::from(1.1)));\n\n}\n\n\n", "file_path": "src/helper.rs", "rank": 62, "score": 46195.19505703078 }, { "content": "fn main() {\n\n let result = run();\n\n match result {\n\n Err(PastelError::StdoutClosed) => {}\n\n Err(err) => {\n\n write_stderr(Color::red(), \"pastel error\", &err.message());\n\n std::process::exit(1);\n\n }\n\n Ok(exit_code) => {\n\n std::process::exit(exit_code);\n\n }\n\n }\n\n}\n", "file_path": "src/cli/main.rs", "rank": 63, "score": 46195.19505703078 }, { "content": "#[test]\n\nfn parse_lch_syntax() {\n\n assert_eq!(\n\n Some(Color::from_lch(12.43, -35.5, 43.4, 1.0)),\n\n parse_color(\"Lch(12.43,-35.5,43.4)\")\n\n );\n\n assert_eq!(\n\n Some(Color::from_lch(15.0, -23.0, 43.0, 0.5)),\n\n parse_color(\"lch(15,-23,43,0.5)\")\n\n );\n\n assert_eq!(\n\n Some(Color::from_lch(15.0, 23.0, -43.0, 1.0)),\n\n parse_color(\"CIELch(15,23,-43)\")\n\n );\n\n assert_eq!(\n\n Some(Color::from_lch(15.0, 35.5, -43.4, 1.0)),\n\n parse_color(\"CIELch(15,35.5,-43.4)\")\n\n );\n\n assert_eq!(\n\n Some(Color::from_lch(15.0, -35.5, -43.4, 0.4)),\n\n parse_color(\"cieLch(15,-35.5,-43.4,0.4)\")\n", "file_path": "src/parser.rs", "rank": 64, "score": 44783.992218743304 }, { "content": "#[test]\n\nfn parse_alpha_syntax() {\n\n // hex\n\n assert_eq!(Some(rgba(255, 0, 0, 1.0)), parse_color(\"ff0000ff\"));\n\n assert_eq!(Some(rgba(255, 0, 0, 1.0)), parse_color(\"#ff0000ff\"));\n\n\n\n // rgb/rgba\n\n assert_eq!(Some(rgba(10, 0, 0, 1.0)), parse_color(\"rgb(10,0,0,1)\"));\n\n assert_eq!(Some(rgba(10, 0, 0, 1.0)), parse_color(\"rgb(10,0,0, 1)\"));\n\n assert_eq!(Some(rgba(10, 0, 0, 1.0)), parse_color(\"rgba(10,0,0,1)\"));\n\n assert_eq!(Some(rgba(10, 0, 0, 1.0)), parse_color(\"rgba(10,0,0, 1)\"));\n\n assert_eq!(Some(rgba(10, 0, 0, 1.0)), parse_color(\"rgba(10,0,0,1.0)\"));\n\n assert_eq!(Some(rgba(10, 0, 0, 1.0)), parse_color(\"rgba(10,0,0, 1.0)\"));\n\n\n\n // hsl/hsla\n\n assert_eq!(\n\n Some(Color::from_hsla(10.0, 0.5, 0.5, 1.0)),\n\n parse_color(\"hsl(10,50%,50%,1)\")\n\n );\n\n assert_eq!(\n\n Some(Color::from_hsla(10.0, 0.5, 0.5, 1.0)),\n", "file_path": "src/parser.rs", "rank": 65, "score": 44783.992218743304 }, { "content": "#[test]\n\nfn test_max_precision() {\n\n assert_eq!(format!(\"{}\", MaxPrecision::wrap(3, 0.5)), \"0.5\");\n\n assert_eq!(format!(\"{}\", MaxPrecision::wrap(3, 0.51)), \"0.51\");\n\n assert_eq!(format!(\"{}\", MaxPrecision::wrap(3, 0.512)), \"0.512\");\n\n assert_eq!(format!(\"{}\", MaxPrecision::wrap(3, 0.5124)), \"0.512\");\n\n assert_eq!(format!(\"{}\", MaxPrecision::wrap(3, 0.5125)), \"0.513\");\n\n}\n", "file_path": "src/helper.rs", "rank": 66, "score": 44783.992218743304 }, { "content": "#[test]\n\nfn parse_hsl_syntax() {\n\n assert_eq!(\n\n Some(Color::from_hsl(280.0, 0.2, 0.5)),\n\n parse_color(\"hsl(280,20%,50%)\")\n\n );\n\n assert_eq!(\n\n Some(Color::from_hsl(280.0, 0.2, 0.5)),\n\n parse_color(\"hsl(280deg,20%,50%)\")\n\n );\n\n assert_eq!(\n\n Some(Color::from_hsl(280.0, 0.2, 0.5)),\n\n parse_color(\"hsl(280°,20%,50%)\")\n\n );\n\n assert_eq!(\n\n Some(Color::from_hsl(280.33, 0.123, 0.456)),\n\n parse_color(\"hsl(280.33001,12.3%,45.6%)\")\n\n );\n\n assert_eq!(\n\n Some(Color::from_hsl(280.0, 0.2, 0.5)),\n\n parse_color(\"hsl( 280 , 20% , 50%)\")\n", "file_path": "src/parser.rs", "rank": 67, "score": 44783.992218743304 }, { "content": "#[test]\n\nfn sort_by_basic() {\n\n pastel()\n\n .arg(\"sort-by\")\n\n .arg(\"luminance\")\n\n .arg(\"gray\")\n\n .arg(\"white\")\n\n .arg(\"black\")\n\n .assert()\n\n .success()\n\n .stdout(\"hsl(0,0.0%,0.0%)\\nhsl(0,0.0%,50.2%)\\nhsl(0,0.0%,100.0%)\\n\");\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 68, "score": 44783.992218743304 }, { "content": "#[test]\n\nfn format_basic() {\n\n pastel()\n\n .arg(\"format\")\n\n .arg(\"hex\")\n\n .arg(\"red\")\n\n .assert()\n\n .success()\n\n .stdout(\"#ff0000\\n\");\n\n\n\n pastel()\n\n .arg(\"format\")\n\n .arg(\"rgb\")\n\n .arg(\"red\")\n\n .arg(\"blue\")\n\n .assert()\n\n .success()\n\n .stdout(\"rgb(255, 0, 0)\\nrgb(0, 0, 255)\\n\");\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 69, "score": 44783.992218743304 }, { "content": "#[test]\n\nfn parse_gray_syntax() {\n\n assert_eq!(Some(Color::graytone(0.2)), parse_color(\"gray(0.2)\"));\n\n assert_eq!(Some(Color::black()), parse_color(\"gray(0.0)\"));\n\n assert_eq!(Some(Color::black()), parse_color(\"gray(0)\"));\n\n assert_eq!(Some(Color::white()), parse_color(\"gray(1.0)\"));\n\n assert_eq!(Some(Color::white()), parse_color(\"gray(1)\"));\n\n assert_eq!(Some(Color::white()), parse_color(\"gray(7.3)\"));\n\n\n\n assert_eq!(Some(Color::graytone(0.32)), parse_color(\"gray(.32)\"));\n\n\n\n assert_eq!(\n\n Some(Color::graytone(0.41)),\n\n parse_color(\" gray( 0.41 ) \")\n\n );\n\n\n\n assert_eq!(Some(Color::graytone(0.2)), parse_color(\"gray(20%)\"));\n\n assert_eq!(Some(Color::black()), parse_color(\"gray(0%)\"));\n\n assert_eq!(Some(Color::black()), parse_color(\"gray(0.0%)\"));\n\n assert_eq!(Some(Color::white()), parse_color(\"gray(100%)\"));\n\n assert_eq!(Some(Color::graytone(0.5)), parse_color(\"gray(50%)\"));\n\n\n\n assert_eq!(None, parse_color(\"gray(-1)\"));\n\n assert_eq!(None, parse_color(\"gray(-1%)\"));\n\n assert_eq!(None, parse_color(\"gray(-4.%)\"));\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 70, "score": 44783.992218743304 }, { "content": "#[test]\n\nfn test_interpolate_angle() {\n\n assert_eq!(15.0, interpolate_angle(0.0, 30.0, Fraction::from(0.5)));\n\n assert_eq!(20.0, interpolate_angle(0.0, 100.0, Fraction::from(0.2)));\n\n assert_eq!(0.0, interpolate_angle(10.0, 350.0, Fraction::from(0.5)));\n\n assert_eq!(0.0, interpolate_angle(350.0, 10.0, Fraction::from(0.5)));\n\n}\n\n\n", "file_path": "src/helper.rs", "rank": 71, "score": 44783.992218743304 }, { "content": "#[test]\n\nfn verify_cmd() {\n\n build_cli().debug_assert();\n\n}\n", "file_path": "src/cli/cli.rs", "rank": 72, "score": 44783.992218743304 }, { "content": "#[test]\n\nfn parse_lab_syntax() {\n\n assert_eq!(\n\n Some(Color::from_lab(12.43, -35.5, 43.4, 1.0)),\n\n parse_color(\"Lab(12.43,-35.5,43.4)\")\n\n );\n\n assert_eq!(\n\n Some(Color::from_lab(15.0, -23.0, 43.0, 0.5)),\n\n parse_color(\"lab(15,-23,43,0.5)\")\n\n );\n\n assert_eq!(\n\n Some(Color::from_lab(15.0, 23.0, -43.0, 1.0)),\n\n parse_color(\"CIELab(15,23,-43)\")\n\n );\n\n assert_eq!(\n\n Some(Color::from_lab(15.0, 35.5, -43.4, 1.0)),\n\n parse_color(\"CIELab(15,35.5,-43.4)\")\n\n );\n\n assert_eq!(\n\n Some(Color::from_lab(15.0, -35.5, -43.4, 0.4)),\n\n parse_color(\"cieLab(15,-35.5,-43.4,0.4)\")\n", "file_path": "src/parser.rs", "rank": 73, "score": 44783.992218743304 }, { "content": "#[test]\n\nfn set_basic() {\n\n pastel()\n\n .arg(\"set\")\n\n .arg(\"hsl-hue\")\n\n .arg(\"120\")\n\n .arg(\"red\")\n\n .assert()\n\n .success()\n\n .stdout(\"hsl(120,100.0%,50.0%)\\n\");\n\n\n\n pastel()\n\n .arg(\"set\")\n\n .arg(\"hsl-saturation\")\n\n .arg(\"0.1\")\n\n .arg(\"red\")\n\n .assert()\n\n .success()\n\n .stdout(\"hsl(0,10.0%,50.0%)\\n\");\n\n\n\n pastel()\n\n .arg(\"set\")\n\n .arg(\"hsl-lightness\")\n\n .arg(\"0.5\")\n\n .arg(\"white\")\n\n .assert()\n\n .success()\n\n .stdout(\"hsl(0,0.0%,50.0%)\\n\");\n\n}\n", "file_path": "tests/integration_tests.rs", "rank": 74, "score": 44783.992218743304 }, { "content": "#[test]\n\nfn parse_named_syntax() {\n\n assert_eq!(Some(Color::black()), parse_color(\"black\"));\n\n assert_eq!(Some(Color::blue()), parse_color(\"blue\"));\n\n assert_eq!(Some(Color::blue()), parse_color(\"Blue\"));\n\n assert_eq!(Some(Color::blue()), parse_color(\"BLUE\"));\n\n assert_eq!(Some(rgb(255, 20, 147)), parse_color(\"deeppink\"));\n\n assert_eq!(None, parse_color(\"whatever\"));\n\n assert_eq!(None, parse_color(\"red blue\"));\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 75, "score": 44783.992218743304 }, { "content": "#[test]\n\nfn pipe_into_format_command() {\n\n let first = pastel()\n\n .arg(\"color\")\n\n .arg(\"red\")\n\n .arg(\"teal\")\n\n .arg(\"hotpink\")\n\n .assert()\n\n .success();\n\n\n\n pastel()\n\n .arg(\"format\")\n\n .arg(\"name\")\n\n .write_stdin(String::from_utf8(first.get_output().stdout.clone()).unwrap())\n\n .assert()\n\n .success()\n\n .stdout(\"red\\nteal\\nhotpink\\n\");\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 76, "score": 43494.6915452264 }, { "content": "#[test]\n\nfn parse_rgb_hex_syntax() {\n\n assert_eq!(Some(rgb(255, 0, 153)), parse_color(\"f09\"));\n\n assert_eq!(Some(rgb(255, 0, 153)), parse_color(\"#f09\"));\n\n assert_eq!(Some(rgb(255, 0, 153)), parse_color(\"#F09\"));\n\n\n\n assert_eq!(Some(rgb(255, 0, 153)), parse_color(\"#ff0099\"));\n\n assert_eq!(Some(rgb(255, 0, 153)), parse_color(\"#FF0099\"));\n\n assert_eq!(Some(rgb(255, 0, 153)), parse_color(\"ff0099\"));\n\n\n\n assert_eq!(Some(rgb(87, 166, 206)), parse_color(\"57A6CE\"));\n\n assert_eq!(Some(rgb(255, 0, 119)), parse_color(\" #ff0077 \"));\n\n\n\n assert_eq!(None, parse_color(\"#1\"));\n\n assert_eq!(None, parse_color(\"#12\"));\n\n assert_eq!(None, parse_color(\"#12345\"));\n\n assert_eq!(None, parse_color(\"#1234567\"));\n\n assert_eq!(None, parse_color(\"#hh0033\"));\n\n assert_eq!(None, parse_color(\"#h03\"));\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 77, "score": 43494.6915452264 }, { "content": "fn print_pastel_warning() {\n\n write_stderr(\n\n Color::yellow(),\n\n \"pastel warning\",\n\n \"Your terminal emulator does not appear to support 24-bit colors \\\n\n (this means that the COLORTERM environment variable is not set to \\\n\n 'truecolor' or '24bit'). \\\n\n pastel will fall back to 8-bit colors, but you will only be able \\\n\n to see rough approximations of the real colors.\\n\\n\\\n\n To fix this, follow these steps:\\n \\\n\n 1. Run 'pastel colorcheck' to test if your terminal\\n \\\n\n emulator does support 24-bit colors. If this is the\\n \\\n\n case, set 'PASTEL_COLOR_MODE=24bit' to force 24-bit\\n \\\n\n mode and to remove this warning. Alternatively, make\\n \\\n\n sure that COLORTERM is properly set by your terminal\\n \\\n\n emulator.\\n \\\n\n 2. If your terminal emulator does not support 24-bit\\n \\\n\n colors, set 'PASTEL_COLOR_MODE=8bit' to remove this\\n \\\n\n warning or try a different terminal emulator.\\n\\n\\\n\n \\\n\n For more information, see https://gist.github.com/XVilka/8346728\\n\",\n\n );\n\n}\n\n\n", "file_path": "src/cli/main.rs", "rank": 78, "score": 43494.6915452264 }, { "content": "#[test]\n\nfn parse_rgb_standalone_syntax() {\n\n assert_eq!(\n\n Some(rgb(255, 8, 119)),\n\n parse_color(\" rgb( 255 , 8 , 119 ) \")\n\n );\n\n\n\n assert_eq!(rgb(255, 0, 153), parse_color(\"255,0,153\").unwrap());\n\n assert_eq!(rgb(255, 0, 153), parse_color(\"255, 0, 153\").unwrap());\n\n assert_eq!(\n\n rgb(255, 0, 153),\n\n parse_color(\" 255 , 0 , 153 \").unwrap()\n\n );\n\n assert_eq!(rgb(255, 0, 153), parse_color(\"255 0 153\").unwrap());\n\n assert_eq!(rgb(255, 0, 153), parse_color(\"255 0 153.0\").unwrap());\n\n\n\n assert_eq!(Some(rgb(1, 2, 3)), parse_color(\"1,2,3\"));\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 79, "score": 43494.6915452264 }, { "content": "#[test]\n\nfn parse_rgb_functional_syntax() {\n\n assert_eq!(Some(rgb(255, 0, 153)), parse_color(\"rgb(255,0,153)\"));\n\n assert_eq!(Some(rgb(255, 0, 153)), parse_color(\"rgb(255, 0, 153)\"));\n\n assert_eq!(Some(rgb(255, 0, 153)), parse_color(\"rgb( 255 , 0 , 153 )\"));\n\n assert_eq!(Some(rgb(255, 0, 153)), parse_color(\"rgb(255, 0, 153.0)\"));\n\n assert_eq!(Some(rgb(255, 0, 153)), parse_color(\"rgb(255 0 153)\"));\n\n\n\n assert_eq!(\n\n Some(rgb(255, 8, 119)),\n\n parse_color(\" rgb( 255 , 8 , 119 ) \")\n\n );\n\n\n\n assert_eq!(Some(rgb(255, 0, 127)), parse_color(\"rgb(100%,0%,49.8%)\"));\n\n assert_eq!(Some(rgb(255, 0, 153)), parse_color(\"rgb(100%,0%,60%)\"));\n\n assert_eq!(Some(rgb(255, 0, 119)), parse_color(\"rgb(100%,0%,46.7%)\"));\n\n assert_eq!(Some(rgb(3, 54, 119)), parse_color(\"rgb(1%,21.2%,46.7%)\"));\n\n assert_eq!(Some(rgb(255, 0, 119)), parse_color(\"rgb(255 0 119)\"));\n\n assert_eq!(\n\n Some(rgb(255, 0, 119)),\n\n parse_color(\"rgb( 255 0 119)\")\n", "file_path": "src/parser.rs", "rank": 80, "score": 43494.6915452264 }, { "content": "fn pastel() -> Command {\n\n let mut cmd = Command::cargo_bin(\"pastel\").unwrap();\n\n cmd.env_remove(\"PASTEL_COLOR_MODE\");\n\n cmd\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 81, "score": 43484.594388290396 }, { "content": "fn print_distance_matrix(\n\n out: &mut dyn Write,\n\n brush: Brush,\n\n colors: &[Color],\n\n metric: DistanceMetric,\n\n) -> Result<()> {\n\n let count = colors.len();\n\n\n\n let distance = |c1: &Color, c2: &Color| match metric {\n\n DistanceMetric::CIE76 => c1.distance_delta_e_cie76(c2),\n\n DistanceMetric::CIEDE2000 => c1.distance_delta_e_ciede2000(c2),\n\n };\n\n\n\n let mut min = std::f64::MAX;\n\n let mut max = 0.0;\n\n for i in 0..count {\n\n for j in 0..count {\n\n if i != j {\n\n let dist = distance(&colors[i], &colors[j]);\n\n if dist < min {\n", "file_path": "src/cli/commands/distinct.rs", "rank": 82, "score": 42312.15181629201 }, { "content": "fn cube_to_8bit(code: u8) -> u8 {\n\n assert!(code < 6);\n\n match code {\n\n 0 => 0,\n\n _ => 55 + 40 * code,\n\n }\n\n}\n\n\n", "file_path": "src/ansi.rs", "rank": 83, "score": 38991.808183652334 }, { "content": "fn hex_to_u8_unsafe(num: &str) -> u8 {\n\n u8::from_str_radix(num, 16).unwrap()\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 84, "score": 37903.29603443151 }, { "content": "fn parse_separator(input: &str) -> IResult<&str, &str> {\n\n alt((comma_separated, space1))(input)\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 85, "score": 35398.88139378665 }, { "content": "fn comma_separated(input: &str) -> IResult<&str, &str> {\n\n let (input, _) = space0(input)?;\n\n let (input, _) = char(',')(input)?;\n\n space0(input)\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 86, "score": 35398.88139378665 }, { "content": "use pastel::ansi::Brush;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Config<'p> {\n\n pub padding: usize,\n\n pub colorpicker_width: usize,\n\n pub colorcheck_width: usize,\n\n pub colorpicker: Option<&'p str>,\n\n pub interactive_mode: bool,\n\n pub brush: Brush,\n\n}\n", "file_path": "src/cli/config.rs", "rank": 87, "score": 34199.235178276984 }, { "content": "use std::io::Write;\n\n\n\nuse crate::config::Config;\n\nuse crate::error::Result;\n\nuse crate::hdcanvas::Canvas;\n\nuse crate::utility::similar_colors;\n\n\n\nuse pastel::Color;\n\nuse pastel::Format;\n\n\n\n// #[derive(Debug)]\n\npub struct Output<'a> {\n\n pub handle: &'a mut dyn Write,\n\n colors_shown: usize,\n\n}\n\n\n\nimpl Output<'_> {\n\n pub fn new(handle: &mut dyn Write) -> Output {\n\n Output {\n\n handle,\n", "file_path": "src/cli/output.rs", "rank": 88, "score": 34148.05195594544 }, { "content": "\n\n canvas.print(self.handle)\n\n }\n\n\n\n pub fn show_color(&mut self, config: &Config, color: &Color) -> Result<()> {\n\n if config.interactive_mode {\n\n if self.colors_shown < 1 {\n\n writeln!(self.handle)?\n\n };\n\n self.show_color_tty(config, color)?;\n\n writeln!(self.handle)?;\n\n } else {\n\n writeln!(self.handle, \"{}\", color.to_hsl_string(Format::NoSpaces))?;\n\n }\n\n self.colors_shown += 1;\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/cli/output.rs", "rank": 89, "score": 34137.83053958249 }, { "content": " colors_shown: 0,\n\n }\n\n }\n\n\n\n pub fn show_color_tty(&mut self, config: &Config, color: &Color) -> Result<()> {\n\n let checkerboard_size: usize = 16;\n\n let color_panel_size: usize = 12;\n\n\n\n let checkerboard_position_y: usize = 0;\n\n let checkerboard_position_x: usize = config.padding;\n\n let color_panel_position_y: usize =\n\n checkerboard_position_y + (checkerboard_size - color_panel_size) / 2;\n\n let color_panel_position_x: usize =\n\n config.padding + (checkerboard_size - color_panel_size) / 2;\n\n let text_position_x: usize = checkerboard_size + 2 * config.padding;\n\n let text_position_y: usize = 0;\n\n\n\n let mut canvas = Canvas::new(checkerboard_size, 60, config.brush);\n\n canvas.draw_checkerboard(\n\n checkerboard_position_y,\n", "file_path": "src/cli/output.rs", "rank": 90, "score": 34136.86024440189 }, { "content": " checkerboard_position_x,\n\n checkerboard_size,\n\n checkerboard_size,\n\n &Color::graytone(0.94),\n\n &Color::graytone(0.71),\n\n );\n\n canvas.draw_rect(\n\n color_panel_position_y,\n\n color_panel_position_x,\n\n color_panel_size,\n\n color_panel_size,\n\n color,\n\n );\n\n\n\n let mut text_y_offset = 0;\n\n let similar = similar_colors(color);\n\n\n\n for (i, nc) in similar.iter().enumerate().take(3) {\n\n if nc.color == *color {\n\n canvas.draw_text(\n", "file_path": "src/cli/output.rs", "rank": 91, "score": 34131.02380549978 }, { "content": " text_position_y + 0 + text_y_offset,\n\n text_position_x,\n\n &format!(\"Hex: {}\", color.to_rgb_hex_string(true)),\n\n );\n\n canvas.draw_text(\n\n text_position_y + 2 + text_y_offset,\n\n text_position_x,\n\n &format!(\"RGB: {}\", color.to_rgb_string(Format::Spaces)),\n\n );\n\n canvas.draw_text(\n\n text_position_y + 4 + text_y_offset,\n\n text_position_x,\n\n &format!(\"HSL: {}\", color.to_hsl_string(Format::Spaces)),\n\n );\n\n\n\n canvas.draw_text(\n\n text_position_y + 8 + text_y_offset,\n\n text_position_x,\n\n \"Most similar:\",\n\n );\n", "file_path": "src/cli/output.rs", "rank": 92, "score": 34127.73724544361 }, { "content": " text_position_y,\n\n text_position_x,\n\n &format!(\"Name: {}\", nc.name),\n\n );\n\n text_y_offset = 2;\n\n continue;\n\n }\n\n\n\n canvas.draw_text(text_position_y + 10 + 2 * i, text_position_x + 7, nc.name);\n\n canvas.draw_rect(\n\n text_position_y + 10 + 2 * i,\n\n text_position_x + 1,\n\n 2,\n\n 5,\n\n &nc.color,\n\n );\n\n }\n\n\n\n #[allow(clippy::identity_op)]\n\n canvas.draw_text(\n", "file_path": "src/cli/output.rs", "rank": 93, "score": 34126.95125874111 }, { "content": "fn opt_hash_char(s: &str) -> IResult<&str, Option<char>> {\n\n opt(char('#'))(s)\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 94, "score": 33854.43506680201 }, { "content": "use criterion::{criterion_group, criterion_main, Criterion};\n\nuse pastel::parser::parse_color;\n\n\n", "file_path": "benches/parse_color.rs", "rank": 95, "score": 33635.02762928185 }, { "content": "use crate::commands::prelude::*;\n\n\n\npub struct ShowCommand;\n\n\n\nimpl ColorCommand for ShowCommand {\n\n fn run(&self, out: &mut Output, _: &ArgMatches, config: &Config, color: &Color) -> Result<()> {\n\n out.show_color(config, color)\n\n }\n\n}\n", "file_path": "src/cli/commands/show.rs", "rank": 99, "score": 27.267342931457037 } ]
Rust
mshv-ioctls/src/ioctls/system.rs
russell-islam/mshv
99da566389546aedb56fc3d279e01c5dbde79bce
use crate::ioctls::vm::{new_vmfd, VmFd}; use crate::ioctls::Result; use crate::mshv_ioctls::*; use libc::{open, O_CLOEXEC, O_NONBLOCK}; use mshv_bindings::*; use std::fs::File; use std::os::raw::c_char; use std::os::unix::io::{FromRawFd, RawFd}; use vmm_sys_util::errno; use vmm_sys_util::ioctl::ioctl_with_ref; pub struct Mshv { hv: File, } impl Mshv { #[allow(clippy::new_ret_no_self)] pub fn new() -> Result<Self> { let fd = Self::open_with_cloexec(true)?; let ret = unsafe { Self::new_with_fd_number(fd) }; Ok(ret) } pub unsafe fn new_with_fd_number(fd: RawFd) -> Self { Mshv { hv: File::from_raw_fd(fd), } } pub fn open_with_cloexec(close_on_exec: bool) -> Result<RawFd> { let open_flags = O_NONBLOCK | if close_on_exec { O_CLOEXEC } else { 0 }; let ret = unsafe { open("/dev/mshv\0".as_ptr() as *const c_char, open_flags) }; if ret < 0 { Err(errno::Error::last()) } else { Ok(ret) } } pub fn create_vm(&self) -> Result<VmFd> { let creation_flags: u64 = HV_PARTITION_CREATION_FLAG_LAPIC_ENABLED as u64; let mut pr = mshv_create_partition { partition_creation_properties: hv_partition_creation_properties { disabled_processor_features: hv_partition_processor_features { as_uint64: [0; 2] }, disabled_processor_xsave_features: hv_partition_processor_xsave_features { as_uint64: 0_u64, }, }, synthetic_processor_features: hv_partition_synthetic_processor_features { as_uint64: [0; 1], }, flags: creation_flags, }; /* TODO pass in arg for this */ unsafe { pr.synthetic_processor_features .__bindgen_anon_1 .set_hypervisor_present(1); pr.synthetic_processor_features.__bindgen_anon_1.set_hv1(1); pr.synthetic_processor_features .__bindgen_anon_1 .set_access_partition_reference_counter(1); pr.synthetic_processor_features .__bindgen_anon_1 .set_access_synic_regs(1); pr.synthetic_processor_features .__bindgen_anon_1 .set_access_synthetic_timer_regs(1); pr.synthetic_processor_features .__bindgen_anon_1 .set_access_partition_reference_tsc(1); /* Need this for linux on CH, as there's no PIT or HPET */ pr.synthetic_processor_features .__bindgen_anon_1 .set_access_frequency_regs(1); /* Linux I'm using appears to require vp assist page... */ pr.synthetic_processor_features .__bindgen_anon_1 .set_access_intr_ctrl_regs(1); /* According to Hv#1 spec, these must be set also, but they aren't in KVM? */ pr.synthetic_processor_features .__bindgen_anon_1 .set_access_vp_index(1); pr.synthetic_processor_features .__bindgen_anon_1 .set_access_hypercall_regs(1); /* Windows requires this */ pr.synthetic_processor_features .__bindgen_anon_1 .set_access_guest_idle_reg(1); } let ret = unsafe { ioctl_with_ref(&self.hv, MSHV_CREATE_PARTITION(), &pr) }; if ret >= 0 { let vm_file = unsafe { File::from_raw_fd(ret) }; Ok(new_vmfd(vm_file)) } else { Err(errno::Error::last()) } } pub fn check_stable(&self) -> Result<bool> { let cap: u32 = MSHV_CAP_CORE_API_STABLE; let ret = unsafe { ioctl_with_ref(&self.hv, MSHV_CHECK_EXTENSION(), &cap) }; match ret { 0 => Ok(false), r if r > 0 => Ok(true), _ => Err(errno::Error::last()), } } pub fn get_msr_index_list(&self) -> Result<MsrList> { /* return all the MSRs we currently support */ Ok(MsrList::from_entries(&[ IA32_MSR_TSC, IA32_MSR_EFER, IA32_MSR_KERNEL_GS_BASE, IA32_MSR_APIC_BASE, IA32_MSR_PAT, IA32_MSR_SYSENTER_CS, IA32_MSR_SYSENTER_ESP, IA32_MSR_SYSENTER_EIP, IA32_MSR_STAR, IA32_MSR_LSTAR, IA32_MSR_CSTAR, IA32_MSR_SFMASK, IA32_MSR_MTRR_DEF_TYPE, IA32_MSR_MTRR_PHYSBASE0, IA32_MSR_MTRR_PHYSMASK0, IA32_MSR_MTRR_PHYSBASE1, IA32_MSR_MTRR_PHYSMASK1, IA32_MSR_MTRR_PHYSBASE2, IA32_MSR_MTRR_PHYSMASK2, IA32_MSR_MTRR_PHYSBASE3, IA32_MSR_MTRR_PHYSMASK3, IA32_MSR_MTRR_PHYSBASE4, IA32_MSR_MTRR_PHYSMASK4, IA32_MSR_MTRR_PHYSBASE5, IA32_MSR_MTRR_PHYSMASK5, IA32_MSR_MTRR_PHYSBASE6, IA32_MSR_MTRR_PHYSMASK6, IA32_MSR_MTRR_PHYSBASE7, IA32_MSR_MTRR_PHYSMASK7, IA32_MSR_MTRR_FIX64K_00000, IA32_MSR_MTRR_FIX16K_80000, IA32_MSR_MTRR_FIX16K_a0000, IA32_MSR_MTRR_FIX4K_c0000, IA32_MSR_MTRR_FIX4K_c8000, IA32_MSR_MTRR_FIX4K_d0000, IA32_MSR_MTRR_FIX4K_d8000, IA32_MSR_MTRR_FIX4K_e0000, IA32_MSR_MTRR_FIX4K_e8000, IA32_MSR_MTRR_FIX4K_f0000, IA32_MSR_MTRR_FIX4K_f8000, IA32_MSR_TSC_AUX, IA32_MSR_BNDCFGS, IA32_MSR_DEBUG_CTL, IA32_MSR_SPEC_CTRL, ]) .unwrap()) } } #[allow(dead_code)] #[cfg(test)] mod tests { use super::*; #[test] #[ignore] fn test_create_vm() { let hv = Mshv::new().unwrap(); let vm = hv.create_vm(); assert!(vm.is_ok()); } #[test] #[ignore] fn test_get_msr_index_list() { let hv = Mshv::new().unwrap(); let msr_list = hv.get_msr_index_list().unwrap(); assert!(msr_list.as_fam_struct_ref().nmsrs == 44); let mut found = false; for index in msr_list.as_slice() { if *index == IA32_MSR_SYSENTER_CS { found = true; break; } } assert!(found); /* Test all MSRs in the list individually and determine which can be get/set */ let vm = hv.create_vm().unwrap(); let vcpu = vm.create_vcpu(0).unwrap(); let mut num_errors = 0; for idx in hv.get_msr_index_list().unwrap().as_slice().iter() { let mut get_set_msrs = Msrs::from_entries(&[msr_entry { index: *idx, ..Default::default() }]) .unwrap(); vcpu.get_msrs(&mut get_set_msrs).unwrap_or_else(|_| { println!("Error getting MSR: 0x{:x}", *idx); num_errors += 1; 0 }); vcpu.set_msrs(&get_set_msrs).unwrap_or_else(|_| { println!("Error setting MSR: 0x{:x}", *idx); num_errors += 1; 0 }); } assert!(num_errors == 0); } }
use crate::ioctls::vm::{new_vmfd, VmFd}; use crate::ioctls::Result; use crate::mshv_ioctls::*; use libc::{open, O_CLOEXEC, O_NONBLOCK}; use mshv_bindings::*; use std::fs::File; use std::os::raw::c_char; use std::os::unix::io::{FromRawFd, RawFd}; use vmm_sys_util::errno; use vmm_sys_util::ioctl::ioctl_with_ref; pub struct Mshv { hv: File, } impl Mshv { #[allow(clippy::new_ret_no_self)] pub fn new() -> Result<Self> { let fd = Self::open_with_cloexec(true)?; let ret = unsafe { Self::new_with_fd_number(fd) }; Ok(ret) } pub unsafe fn new_with_fd_number(fd: RawFd) -> Self { Mshv { hv: File::from_raw_fd(fd), } } pub fn open_with_cloexec(close_on_exec: bool) -> Result<RawFd> { let open_flags = O_NONBLOCK | if close_on_exec { O_CLOEXEC } else { 0 }; let ret = unsafe { open("/dev/mshv\0".as_ptr() as *const c_char, open_flags) }; if ret < 0 { Err(errno::Error::last()) } else { Ok(ret) } }
pub fn check_stable(&self) -> Result<bool> { let cap: u32 = MSHV_CAP_CORE_API_STABLE; let ret = unsafe { ioctl_with_ref(&self.hv, MSHV_CHECK_EXTENSION(), &cap) }; match ret { 0 => Ok(false), r if r > 0 => Ok(true), _ => Err(errno::Error::last()), } } pub fn get_msr_index_list(&self) -> Result<MsrList> { /* return all the MSRs we currently support */ Ok(MsrList::from_entries(&[ IA32_MSR_TSC, IA32_MSR_EFER, IA32_MSR_KERNEL_GS_BASE, IA32_MSR_APIC_BASE, IA32_MSR_PAT, IA32_MSR_SYSENTER_CS, IA32_MSR_SYSENTER_ESP, IA32_MSR_SYSENTER_EIP, IA32_MSR_STAR, IA32_MSR_LSTAR, IA32_MSR_CSTAR, IA32_MSR_SFMASK, IA32_MSR_MTRR_DEF_TYPE, IA32_MSR_MTRR_PHYSBASE0, IA32_MSR_MTRR_PHYSMASK0, IA32_MSR_MTRR_PHYSBASE1, IA32_MSR_MTRR_PHYSMASK1, IA32_MSR_MTRR_PHYSBASE2, IA32_MSR_MTRR_PHYSMASK2, IA32_MSR_MTRR_PHYSBASE3, IA32_MSR_MTRR_PHYSMASK3, IA32_MSR_MTRR_PHYSBASE4, IA32_MSR_MTRR_PHYSMASK4, IA32_MSR_MTRR_PHYSBASE5, IA32_MSR_MTRR_PHYSMASK5, IA32_MSR_MTRR_PHYSBASE6, IA32_MSR_MTRR_PHYSMASK6, IA32_MSR_MTRR_PHYSBASE7, IA32_MSR_MTRR_PHYSMASK7, IA32_MSR_MTRR_FIX64K_00000, IA32_MSR_MTRR_FIX16K_80000, IA32_MSR_MTRR_FIX16K_a0000, IA32_MSR_MTRR_FIX4K_c0000, IA32_MSR_MTRR_FIX4K_c8000, IA32_MSR_MTRR_FIX4K_d0000, IA32_MSR_MTRR_FIX4K_d8000, IA32_MSR_MTRR_FIX4K_e0000, IA32_MSR_MTRR_FIX4K_e8000, IA32_MSR_MTRR_FIX4K_f0000, IA32_MSR_MTRR_FIX4K_f8000, IA32_MSR_TSC_AUX, IA32_MSR_BNDCFGS, IA32_MSR_DEBUG_CTL, IA32_MSR_SPEC_CTRL, ]) .unwrap()) } } #[allow(dead_code)] #[cfg(test)] mod tests { use super::*; #[test] #[ignore] fn test_create_vm() { let hv = Mshv::new().unwrap(); let vm = hv.create_vm(); assert!(vm.is_ok()); } #[test] #[ignore] fn test_get_msr_index_list() { let hv = Mshv::new().unwrap(); let msr_list = hv.get_msr_index_list().unwrap(); assert!(msr_list.as_fam_struct_ref().nmsrs == 44); let mut found = false; for index in msr_list.as_slice() { if *index == IA32_MSR_SYSENTER_CS { found = true; break; } } assert!(found); /* Test all MSRs in the list individually and determine which can be get/set */ let vm = hv.create_vm().unwrap(); let vcpu = vm.create_vcpu(0).unwrap(); let mut num_errors = 0; for idx in hv.get_msr_index_list().unwrap().as_slice().iter() { let mut get_set_msrs = Msrs::from_entries(&[msr_entry { index: *idx, ..Default::default() }]) .unwrap(); vcpu.get_msrs(&mut get_set_msrs).unwrap_or_else(|_| { println!("Error getting MSR: 0x{:x}", *idx); num_errors += 1; 0 }); vcpu.set_msrs(&get_set_msrs).unwrap_or_else(|_| { println!("Error setting MSR: 0x{:x}", *idx); num_errors += 1; 0 }); } assert!(num_errors == 0); } }
pub fn create_vm(&self) -> Result<VmFd> { let creation_flags: u64 = HV_PARTITION_CREATION_FLAG_LAPIC_ENABLED as u64; let mut pr = mshv_create_partition { partition_creation_properties: hv_partition_creation_properties { disabled_processor_features: hv_partition_processor_features { as_uint64: [0; 2] }, disabled_processor_xsave_features: hv_partition_processor_xsave_features { as_uint64: 0_u64, }, }, synthetic_processor_features: hv_partition_synthetic_processor_features { as_uint64: [0; 1], }, flags: creation_flags, }; /* TODO pass in arg for this */ unsafe { pr.synthetic_processor_features .__bindgen_anon_1 .set_hypervisor_present(1); pr.synthetic_processor_features.__bindgen_anon_1.set_hv1(1); pr.synthetic_processor_features .__bindgen_anon_1 .set_access_partition_reference_counter(1); pr.synthetic_processor_features .__bindgen_anon_1 .set_access_synic_regs(1); pr.synthetic_processor_features .__bindgen_anon_1 .set_access_synthetic_timer_regs(1); pr.synthetic_processor_features .__bindgen_anon_1 .set_access_partition_reference_tsc(1); /* Need this for linux on CH, as there's no PIT or HPET */ pr.synthetic_processor_features .__bindgen_anon_1 .set_access_frequency_regs(1); /* Linux I'm using appears to require vp assist page... */ pr.synthetic_processor_features .__bindgen_anon_1 .set_access_intr_ctrl_regs(1); /* According to Hv#1 spec, these must be set also, but they aren't in KVM? */ pr.synthetic_processor_features .__bindgen_anon_1 .set_access_vp_index(1); pr.synthetic_processor_features .__bindgen_anon_1 .set_access_hypercall_regs(1); /* Windows requires this */ pr.synthetic_processor_features .__bindgen_anon_1 .set_access_guest_idle_reg(1); } let ret = unsafe { ioctl_with_ref(&self.hv, MSHV_CREATE_PARTITION(), &pr) }; if ret >= 0 { let vm_file = unsafe { File::from_raw_fd(ret) }; Ok(new_vmfd(vm_file)) } else { Err(errno::Error::last()) } }
function_block-full_function
[ { "content": "/// Helper function to create a new `VmFd`.\n\n///\n\n/// This should not be exported as a public function because the preferred way is to use\n\n/// `create_vm` from `Mshv`. The function cannot be part of the `VmFd` implementation because\n\n/// then it would be exported with the public `VmFd` interface.\n\npub fn new_vmfd(vm: File) -> VmFd {\n\n VmFd { vm }\n\n}\n\n#[cfg(test)]\n\nmod tests {\n\n use libc::c_void;\n\n\n\n // Note this useful idiom: importing names from outer (for mod tests) scope.\n\n use super::*;\n\n use crate::ioctls::system::Mshv;\n\n\n\n #[test]\n\n #[ignore]\n\n fn test_user_memory() {\n\n let hv = Mshv::new().unwrap();\n\n let vm = hv.create_vm().unwrap();\n\n let addr = unsafe {\n\n libc::mmap(\n\n std::ptr::null_mut(),\n\n 0x1000,\n", "file_path": "mshv-ioctls/src/ioctls/vm.rs", "rank": 0, "score": 161208.19848076237 }, { "content": "/// Helper function for creating a new device.\n\npub fn new_device(dev_fd: File) -> DeviceFd {\n\n DeviceFd { fd: dev_fd }\n\n}\n\n\n\nimpl AsRawFd for DeviceFd {\n\n fn as_raw_fd(&self) -> RawFd {\n\n self.fd.as_raw_fd()\n\n }\n\n}\n\n\n\nimpl FromRawFd for DeviceFd {\n\n /// This function is also unsafe as the primitives currently returned have the contract that\n\n /// they are the sole owner of the file descriptor they are wrapping. Usage of this function\n\n /// could accidentally allow violating this contract which can cause memory unsafety in code\n\n /// that relies on it being true.\n\n unsafe fn from_raw_fd(fd: RawFd) -> Self {\n\n DeviceFd {\n\n fd: File::from_raw_fd(fd),\n\n }\n\n }\n", "file_path": "mshv-ioctls/src/ioctls/device.rs", "rank": 1, "score": 139609.07999298672 }, { "content": "/// Helper function to create a new `VcpuFd`.\n\n///\n\n/// This should not be exported as a public function because the preferred way is to use\n\n/// `create_vcpu` from `VmFd`. The function cannot be part of the `VcpuFd` implementation because\n\n/// then it would be exported with the public `VcpuFd` interface.\n\npub fn new_vcpu(vcpu: File) -> VcpuFd {\n\n VcpuFd { vcpu }\n\n}\n\n\n\nimpl AsRawFd for VcpuFd {\n\n fn as_raw_fd(&self) -> RawFd {\n\n self.vcpu.as_raw_fd()\n\n }\n\n}\n\n\n\nimpl VcpuFd {\n\n /// Get the register values by providing an array of register names\n\n #[cfg(not(any(target_arch = \"arm\", target_arch = \"aarch64\")))]\n\n pub fn get_reg(&self, reg_names: &mut [hv_register_assoc]) -> Result<()> {\n\n //TODO: Error if input register len is zero\n\n let mut mshv_vp_register_args = mshv_vp_registers {\n\n count: reg_names.len() as i32,\n\n regs: reg_names.as_mut_ptr(),\n\n };\n\n // Safe because we know that our file is a vCPU fd, we know the kernel will only read the\n", "file_path": "mshv-ioctls/src/ioctls/vcpu.rs", "rank": 2, "score": 132932.03369973158 }, { "content": "pub fn msr_to_hv_reg_name(msr: u32) -> Result<hv_register_name, &'static str> {\n\n match msr {\n\n IA32_MSR_TSC => Ok(hv_register_name::HV_X64_REGISTER_TSC),\n\n\n\n IA32_MSR_EFER => Ok(hv_register_name::HV_X64_REGISTER_EFER),\n\n IA32_MSR_KERNEL_GS_BASE => Ok(hv_register_name::HV_X64_REGISTER_KERNEL_GS_BASE),\n\n IA32_MSR_APIC_BASE => Ok(hv_register_name::HV_X64_REGISTER_APIC_BASE),\n\n IA32_MSR_PAT => Ok(hv_register_name::HV_X64_REGISTER_PAT),\n\n IA32_MSR_SYSENTER_CS => Ok(hv_register_name::HV_X64_REGISTER_SYSENTER_CS),\n\n IA32_MSR_SYSENTER_ESP => Ok(hv_register_name::HV_X64_REGISTER_SYSENTER_ESP),\n\n IA32_MSR_SYSENTER_EIP => Ok(hv_register_name::HV_X64_REGISTER_SYSENTER_EIP),\n\n IA32_MSR_STAR => Ok(hv_register_name::HV_X64_REGISTER_STAR),\n\n IA32_MSR_LSTAR => Ok(hv_register_name::HV_X64_REGISTER_LSTAR),\n\n IA32_MSR_CSTAR => Ok(hv_register_name::HV_X64_REGISTER_CSTAR),\n\n IA32_MSR_SFMASK => Ok(hv_register_name::HV_X64_REGISTER_SFMASK),\n\n\n\n IA32_MSR_MTRR_CAP => Ok(hv_register_name::HV_X64_REGISTER_MSR_MTRR_CAP),\n\n IA32_MSR_MTRR_DEF_TYPE => Ok(hv_register_name::HV_X64_REGISTER_MSR_MTRR_DEF_TYPE),\n\n IA32_MSR_MTRR_PHYSBASE0 => Ok(hv_register_name::HV_X64_REGISTER_MSR_MTRR_PHYS_BASE0),\n\n IA32_MSR_MTRR_PHYSMASK0 => Ok(hv_register_name::HV_X64_REGISTER_MSR_MTRR_PHYS_MASK0),\n", "file_path": "mshv-bindings/src/regs.rs", "rank": 3, "score": 76733.55513153323 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_message() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_message>(),\n\n 256usize,\n\n concat!(\"Size of: \", stringify!(hv_message))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_message>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_message))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<hv_message>())).header as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_message),\n\n \"::\",\n\n stringify!(header)\n\n )\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 4, "score": 66245.23511505898 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_u128() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_u128>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(hv_u128))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_u128>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_u128))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<hv_u128>())).high_part as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_u128),\n\n \"::\",\n\n stringify!(high_part)\n\n )\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 5, "score": 66245.23511505898 }, { "content": "#[test]\n\nfn bindgen_test_layout___kernel_fd_set() {\n\n assert_eq!(\n\n ::std::mem::size_of::<__kernel_fd_set>(),\n\n 128usize,\n\n concat!(\"Size of: \", stringify!(__kernel_fd_set))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<__kernel_fd_set>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(__kernel_fd_set))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<__kernel_fd_set>())).fds_bits as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(__kernel_fd_set),\n\n \"::\",\n\n stringify!(fds_bits)\n\n )\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 6, "score": 64455.468934854915 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_connection_id() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_connection_id>(),\n\n 4usize,\n\n concat!(\"Size of: \", stringify!(hv_connection_id))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_connection_id>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(hv_connection_id))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<hv_connection_id>())).asu32 as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_connection_id),\n\n \"::\",\n\n stringify!(asu32)\n\n )\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 7, "score": 63951.469696580316 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_port_id() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_port_id>(),\n\n 4usize,\n\n concat!(\"Size of: \", stringify!(hv_port_id))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_port_id>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(hv_port_id))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<hv_port_id>())).asu32 as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_port_id),\n\n \"::\",\n\n stringify!(asu32)\n\n )\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 8, "score": 63951.469696580316 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_message_header() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_message_header>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(hv_message_header))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_message_header>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_message_header))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<hv_message_header>())).message_type as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_message_header),\n\n \"::\",\n\n stringify!(message_type)\n\n )\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 9, "score": 63951.469696580316 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_register_value() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_register_value>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(hv_register_value))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_register_value>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(hv_register_value))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<hv_register_value>())).reg128 as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_register_value),\n\n \"::\",\n\n stringify!(reg128)\n\n )\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 10, "score": 63951.469696580316 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_interrupt_control() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_interrupt_control>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(hv_interrupt_control))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_interrupt_control>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(hv_interrupt_control))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<hv_interrupt_control>())).as_uint64 as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_interrupt_control),\n\n \"::\",\n\n stringify!(as_uint64)\n\n )\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 11, "score": 63951.469696580316 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_intercept_parameters() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_intercept_parameters>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(hv_intercept_parameters))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_intercept_parameters>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(hv_intercept_parameters))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_intercept_parameters>())).as_uint64 as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_intercept_parameters),\n\n \"::\",\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 12, "score": 63951.469696580316 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_message_flags() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_message_flags>(),\n\n 1usize,\n\n concat!(\"Size of: \", stringify!(hv_message_flags))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_message_flags>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_message_flags))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<hv_message_flags>())).asu8 as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_message_flags),\n\n \"::\",\n\n stringify!(asu8)\n\n )\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 13, "score": 63951.469696580316 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_register_assoc() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_register_assoc>(),\n\n 32usize,\n\n concat!(\"Size of: \", stringify!(hv_register_assoc))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_register_assoc>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_register_assoc))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<hv_register_assoc>())).name as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_register_assoc),\n\n \"::\",\n\n stringify!(name)\n\n )\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 14, "score": 63951.469696580316 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_intercept_suspend_register() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_intercept_suspend_register>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(hv_intercept_suspend_register))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_intercept_suspend_register>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(hv_intercept_suspend_register))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_intercept_suspend_register>())).as_uint64 as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_intercept_suspend_register),\n\n \"::\",\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 15, "score": 61843.95239341707 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_exception_info() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_exception_info>(),\n\n 1usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_exception_info))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_exception_info>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_x64_exception_info))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<hv_x64_exception_info>())).as_uint8 as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_x64_exception_info),\n\n \"::\",\n\n stringify!(as_uint8)\n\n )\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 16, "score": 61843.95239341707 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_halt_message() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_halt_message>(),\n\n 40usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_halt_message))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_halt_message>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_x64_halt_message))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<hv_x64_halt_message>())).header as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_x64_halt_message),\n\n \"::\",\n\n stringify!(header)\n\n )\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 17, "score": 61843.95239341707 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_fp_register() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_fp_register>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_fp_register))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_fp_register>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_x64_fp_register))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<hv_x64_fp_register>())).as_uint128 as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_x64_fp_register),\n\n \"::\",\n\n stringify!(as_uint128)\n\n )\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 18, "score": 61843.95239341707 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_partition_creation_properties() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_partition_creation_properties>(),\n\n 24usize,\n\n concat!(\"Size of: \", stringify!(hv_partition_creation_properties))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_partition_creation_properties>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_partition_creation_properties)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_partition_creation_properties>())).disabled_processor_features\n\n as *const _ as usize\n\n },\n\n 0usize,\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 19, "score": 61843.95239341707 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_translate_gva_result() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_translate_gva_result>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(hv_translate_gva_result))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_translate_gva_result>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(hv_translate_gva_result))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_translate_gva_result>())).as_uint64 as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_translate_gva_result),\n\n \"::\",\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 20, "score": 61843.95239341707 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_segment_register() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_segment_register>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_segment_register))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_segment_register>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_x64_segment_register))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<hv_x64_segment_register>())).base as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_x64_segment_register),\n\n \"::\",\n\n stringify!(base)\n\n )\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 21, "score": 61843.95239341707 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_dispatch_suspend_register() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_dispatch_suspend_register>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(hv_dispatch_suspend_register))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_dispatch_suspend_register>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(hv_dispatch_suspend_register))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_dispatch_suspend_register>())).as_uint64 as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_dispatch_suspend_register),\n\n \"::\",\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 22, "score": 61843.95239341707 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_partition_processor_features() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_partition_processor_features>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(hv_partition_processor_features))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_partition_processor_features>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(hv_partition_processor_features))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_partition_processor_features>())).as_uint64 as *const _\n\n as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_partition_processor_features),\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 23, "score": 61843.95239341707 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_vp_register_page() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_vp_register_page>(),\n\n 416usize,\n\n concat!(\"Size of: \", stringify!(hv_vp_register_page))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_vp_register_page>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_vp_register_page))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<hv_vp_register_page>())).version as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_vp_register_page),\n\n \"::\",\n\n stringify!(version)\n\n )\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 24, "score": 61843.95239341707 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_table_register() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_table_register>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_table_register))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_table_register>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_x64_table_register))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<hv_x64_table_register>())).pad as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_x64_table_register),\n\n \"::\",\n\n stringify!(pad)\n\n )\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 25, "score": 61843.95239341707 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_explicit_suspend_register() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_explicit_suspend_register>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(hv_explicit_suspend_register))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_explicit_suspend_register>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(hv_explicit_suspend_register))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_explicit_suspend_register>())).as_uint64 as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_explicit_suspend_register),\n\n \"::\",\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 26, "score": 61843.95239341707 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_message__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_message__bindgen_ty_1>(),\n\n 240usize,\n\n concat!(\"Size of: \", stringify!(hv_message__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_message__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(hv_message__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_message__bindgen_ty_1>())).payload as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_message__bindgen_ty_1),\n\n \"::\",\n\n stringify!(payload)\n\n )\n\n );\n\n}\n\nimpl Default for hv_message__bindgen_ty_1 {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 27, "score": 61843.95239341707 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_unsupported_feature_message() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_unsupported_feature_message>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_unsupported_feature_message))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_unsupported_feature_message>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_unsupported_feature_message)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_unsupported_feature_message>())).vp_index as *const _\n\n as usize\n\n },\n\n 0usize,\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 28, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_vp_execution_state() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_vp_execution_state>(),\n\n 2usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_vp_execution_state))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_vp_execution_state>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(hv_x64_vp_execution_state))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_vp_execution_state>())).as_uint16 as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_x64_vp_execution_state),\n\n \"::\",\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 29, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_gpa_page_access_state() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_gpa_page_access_state>(),\n\n 1usize,\n\n concat!(\"Size of: \", stringify!(hv_gpa_page_access_state))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_gpa_page_access_state>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_gpa_page_access_state))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_gpa_page_access_state>())).as_uint8 as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_gpa_page_access_state),\n\n \"::\",\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 30, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_unrecoverable_exception_message() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_unrecoverable_exception_message>(),\n\n 40usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_x64_unrecoverable_exception_message)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_unrecoverable_exception_message>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_unrecoverable_exception_message)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_unrecoverable_exception_message>())).header as *const _\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 31, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_vp_state_data_xsave() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_vp_state_data_xsave>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(hv_vp_state_data_xsave))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_vp_state_data_xsave>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_vp_state_data_xsave))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<hv_vp_state_data_xsave>())).flags as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_vp_state_data_xsave),\n\n \"::\",\n\n stringify!(flags)\n\n )\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 32, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_partition_synthetic_processor_features() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_partition_synthetic_processor_features>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_partition_synthetic_processor_features)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_partition_synthetic_processor_features>(),\n\n 8usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_partition_synthetic_processor_features)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_partition_synthetic_processor_features>())).as_uint64\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 33, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_pending_interruption_register() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_pending_interruption_register>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_x64_pending_interruption_register)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_pending_interruption_register>(),\n\n 8usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_pending_interruption_register)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_pending_interruption_register>())).as_uint64 as *const _\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 34, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_hypercall_intercept_message() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_hypercall_intercept_message>(),\n\n 196usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_hypercall_intercept_message))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_hypercall_intercept_message>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_hypercall_intercept_message)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_hypercall_intercept_message>())).header as *const _\n\n as usize\n\n },\n\n 0usize,\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 35, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_interrupt_state_register() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_interrupt_state_register>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_interrupt_state_register))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_interrupt_state_register>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(hv_x64_interrupt_state_register))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_interrupt_state_register>())).as_uint64 as *const _\n\n as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_x64_interrupt_state_register),\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 36, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_partition_processor_xsave_features() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_partition_processor_xsave_features>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_partition_processor_xsave_features)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_partition_processor_xsave_features>(),\n\n 8usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_partition_processor_xsave_features)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_partition_processor_xsave_features>())).as_uint64 as *const _\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 37, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_msr_intercept_message() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_msr_intercept_message>(),\n\n 64usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_msr_intercept_message))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_msr_intercept_message>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_x64_msr_intercept_message))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_msr_intercept_message>())).header as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_x64_msr_intercept_message),\n\n \"::\",\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 38, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_xsave_xfem_register() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_xsave_xfem_register>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_xsave_xfem_register))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_xsave_xfem_register>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(hv_x64_xsave_xfem_register))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_xsave_xfem_register>())).as_uint64 as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_x64_xsave_xfem_register),\n\n \"::\",\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 39, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_cpuid_intercept_message() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_cpuid_intercept_message>(),\n\n 104usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_cpuid_intercept_message))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_cpuid_intercept_message>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_x64_cpuid_intercept_message))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_cpuid_intercept_message>())).header as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_x64_cpuid_intercept_message),\n\n \"::\",\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 40, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_sipi_intercept_message() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_sipi_intercept_message>(),\n\n 48usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_sipi_intercept_message))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_sipi_intercept_message>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_x64_sipi_intercept_message))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_sipi_intercept_message>())).header as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_x64_sipi_intercept_message),\n\n \"::\",\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 41, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_apic_eoi_message() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_apic_eoi_message>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_apic_eoi_message))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_apic_eoi_message>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_x64_apic_eoi_message))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_apic_eoi_message>())).vp_index as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_x64_apic_eoi_message),\n\n \"::\",\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 42, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_register_intercept_message() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_register_intercept_message>(),\n\n 64usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_register_intercept_message))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_register_intercept_message>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_register_intercept_message)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_register_intercept_message>())).header as *const _\n\n as usize\n\n },\n\n 0usize,\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 43, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_exception_intercept_message() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_exception_intercept_message>(),\n\n 240usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_exception_intercept_message))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_exception_intercept_message>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_exception_intercept_message)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_exception_intercept_message>())).header as *const _\n\n as usize\n\n },\n\n 0usize,\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 44, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_interruption_deliverable_message() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_interruption_deliverable_message>(),\n\n 48usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_x64_interruption_deliverable_message)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_interruption_deliverable_message>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_interruption_deliverable_message)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_interruption_deliverable_message>())).header as *const _\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 45, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_memory_access_info() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_memory_access_info>(),\n\n 1usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_memory_access_info))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_memory_access_info>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_x64_memory_access_info))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_memory_access_info>())).as_uint8 as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_x64_memory_access_info),\n\n \"::\",\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 46, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_register_access_info() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_register_access_info>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_register_access_info))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_register_access_info>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(hv_x64_register_access_info))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_register_access_info>())).source_value as *const _\n\n as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_x64_register_access_info),\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 47, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_port_id__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_port_id__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\"Size of: \", stringify!(hv_port_id__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_port_id__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_port_id__bindgen_ty_1))\n\n );\n\n}\n\nimpl hv_port_id__bindgen_ty_1 {\n\n #[inline]\n\n pub fn id(&self) -> __u32 {\n\n unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 24u8) as u32) }\n\n }\n\n #[inline]\n\n pub fn set_id(&mut self, val: __u32) {\n\n unsafe {\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 48, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_interrupt_control__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_interrupt_control__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(hv_interrupt_control__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_interrupt_control__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_interrupt_control__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_interrupt_control__bindgen_ty_1>())).interrupt_type\n\n as *const _ as usize\n\n },\n\n 0usize,\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 49, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_local_interrupt_controller_state() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_local_interrupt_controller_state>(),\n\n 176usize,\n\n concat!(\"Size of: \", stringify!(hv_local_interrupt_controller_state))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_local_interrupt_controller_state>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_local_interrupt_controller_state)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_local_interrupt_controller_state>())).apic_id as *const _\n\n as usize\n\n },\n\n 0usize,\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 50, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_connection_id__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_connection_id__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\"Size of: \", stringify!(hv_connection_id__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_connection_id__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(hv_connection_id__bindgen_ty_1))\n\n );\n\n}\n\nimpl hv_connection_id__bindgen_ty_1 {\n\n #[inline]\n\n pub fn id(&self) -> __u32 {\n\n unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 24u8) as u32) }\n\n }\n\n #[inline]\n\n pub fn set_id(&mut self, val: __u32) {\n\n unsafe {\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 51, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_message_header__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_message_header__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(hv_message_header__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_message_header__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(hv_message_header__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_message_header__bindgen_ty_1>())).sender as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_message_header__bindgen_ty_1),\n\n \"::\",\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 52, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_message_flags__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_message_flags__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\"Size of: \", stringify!(hv_message_flags__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_message_flags__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_message_flags__bindgen_ty_1))\n\n );\n\n}\n\nimpl hv_message_flags__bindgen_ty_1 {\n\n #[inline]\n\n pub fn msg_pending(&self) -> __u8 {\n\n unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u8) }\n\n }\n\n #[inline]\n\n pub fn set_msg_pending(&mut self, val: __u8) {\n\n unsafe {\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 53, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_memory_intercept_message() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_memory_intercept_message>(),\n\n 80usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_memory_intercept_message))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_memory_intercept_message>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_x64_memory_intercept_message))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_memory_intercept_message>())).header as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_x64_memory_intercept_message),\n\n \"::\",\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 54, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_pending_exception_event() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_pending_exception_event>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_pending_exception_event))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_pending_exception_event>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(hv_x64_pending_exception_event))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_pending_exception_event>())).as_uint64 as *const _\n\n as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_x64_pending_exception_event),\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 55, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_intercept_message_header() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_intercept_message_header>(),\n\n 40usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_intercept_message_header))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_intercept_message_header>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_x64_intercept_message_header))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_intercept_message_header>())).vp_index as *const _\n\n as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_x64_intercept_message_header),\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 56, "score": 59900.883896800246 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_fp_register__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_fp_register__bindgen_ty_1>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_fp_register__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_fp_register__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_fp_register__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_fp_register__bindgen_ty_1>())).mantissa as *const _\n\n as usize\n\n },\n\n 0usize,\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 57, "score": 58103.73913829705 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_vp_register_page__bindgen_ty_2() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_vp_register_page__bindgen_ty_2>(),\n\n 96usize,\n\n concat!(\"Size of: \", stringify!(hv_vp_register_page__bindgen_ty_2))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_vp_register_page__bindgen_ty_2>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_vp_register_page__bindgen_ty_2)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_vp_register_page__bindgen_ty_2>())).xmm_registers as *const _\n\n as usize\n\n },\n\n 0usize,\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 58, "score": 58103.73913829705 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_xmm_control_status_register() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_xmm_control_status_register>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_xmm_control_status_register))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_xmm_control_status_register>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_xmm_control_status_register)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_xmm_control_status_register>())).as_uint128 as *const _\n\n as usize\n\n },\n\n 0usize,\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 59, "score": 58103.73913829705 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_partition_processor_features__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_partition_processor_features__bindgen_ty_1>(),\n\n 16usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_partition_processor_features__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_partition_processor_features__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_partition_processor_features__bindgen_ty_1)\n\n )\n\n );\n\n}\n\nimpl hv_partition_processor_features__bindgen_ty_1 {\n\n #[inline]\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 60, "score": 58103.73913829705 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_io_port_access_info() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_io_port_access_info>(),\n\n 1usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_io_port_access_info))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_io_port_access_info>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_x64_io_port_access_info))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_io_port_access_info>())).as_uint8 as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_x64_io_port_access_info),\n\n \"::\",\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 61, "score": 58103.73913829705 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_io_port_intercept_message() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_io_port_intercept_message>(),\n\n 128usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_io_port_intercept_message))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_io_port_intercept_message>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_io_port_intercept_message)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_io_port_intercept_message>())).header as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 62, "score": 58103.73913829705 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_explicit_suspend_register__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_explicit_suspend_register__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_explicit_suspend_register__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_explicit_suspend_register__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_explicit_suspend_register__bindgen_ty_1)\n\n )\n\n );\n\n}\n\nimpl hv_explicit_suspend_register__bindgen_ty_1 {\n\n #[inline]\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 63, "score": 58103.73913829705 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_intercept_suspend_register__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_intercept_suspend_register__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_intercept_suspend_register__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_intercept_suspend_register__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_intercept_suspend_register__bindgen_ty_1)\n\n )\n\n );\n\n}\n\nimpl hv_intercept_suspend_register__bindgen_ty_1 {\n\n #[inline]\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 64, "score": 58103.73913829705 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_msr_npiep_config_contents() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_msr_npiep_config_contents>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_msr_npiep_config_contents))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_msr_npiep_config_contents>(),\n\n 8usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_msr_npiep_config_contents)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_msr_npiep_config_contents>())).as_uint64 as *const _\n\n as usize\n\n },\n\n 0usize,\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 65, "score": 58103.73913829705 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_pending_virtualization_fault_event() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_pending_virtualization_fault_event>(),\n\n 16usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_x64_pending_virtualization_fault_event)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_pending_virtualization_fault_event>(),\n\n 8usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_pending_virtualization_fault_event)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_pending_virtualization_fault_event>())).as_uint64\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 66, "score": 58103.73913829705 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_vp_register_page__bindgen_ty_3() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_vp_register_page__bindgen_ty_3>(),\n\n 96usize,\n\n concat!(\"Size of: \", stringify!(hv_vp_register_page__bindgen_ty_3))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_vp_register_page__bindgen_ty_3>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_vp_register_page__bindgen_ty_3)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_vp_register_page__bindgen_ty_3>())).segment_registers\n\n as *const _ as usize\n\n },\n\n 0usize,\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 67, "score": 58103.73913829705 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_translate_gva_result__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_translate_gva_result__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_translate_gva_result__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_translate_gva_result__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_translate_gva_result__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_translate_gva_result__bindgen_ty_1>())).result_code\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 68, "score": 58103.73913829705 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_gpa_page_access_state_flags() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_gpa_page_access_state_flags>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(hv_gpa_page_access_state_flags))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_gpa_page_access_state_flags>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(hv_gpa_page_access_state_flags))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_gpa_page_access_state_flags>())).as_uint64 as *const _\n\n as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(hv_gpa_page_access_state_flags),\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 69, "score": 58103.73913829705 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_vp_register_page__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_vp_register_page__bindgen_ty_1>(),\n\n 128usize,\n\n concat!(\"Size of: \", stringify!(hv_vp_register_page__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_vp_register_page__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_vp_register_page__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_vp_register_page__bindgen_ty_1>())).gp_registers as *const _\n\n as usize\n\n },\n\n 0usize,\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 70, "score": 58103.73913829705 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_invalid_vp_register_message() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_invalid_vp_register_message>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_invalid_vp_register_message))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_invalid_vp_register_message>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_invalid_vp_register_message)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_invalid_vp_register_message>())).vp_index as *const _\n\n as usize\n\n },\n\n 0usize,\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 71, "score": 58103.73913829705 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_dispatch_suspend_register__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_dispatch_suspend_register__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_dispatch_suspend_register__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_dispatch_suspend_register__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_dispatch_suspend_register__bindgen_ty_1)\n\n )\n\n );\n\n}\n\nimpl hv_dispatch_suspend_register__bindgen_ty_1 {\n\n #[inline]\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 72, "score": 58103.73913829705 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_segment_register__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_segment_register__bindgen_ty_1>(),\n\n 2usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_x64_segment_register__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_segment_register__bindgen_ty_1>(),\n\n 2usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_segment_register__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_segment_register__bindgen_ty_1>())).attributes as *const _\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 73, "score": 58103.73913829705 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_exception_info__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_exception_info__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_exception_info__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_exception_info__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_exception_info__bindgen_ty_1)\n\n )\n\n );\n\n}\n\nimpl hv_x64_exception_info__bindgen_ty_1 {\n\n #[inline]\n\n pub fn error_code_valid(&self) -> __u8 {\n\n unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u8) }\n\n }\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 74, "score": 58103.73913829705 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_fp_control_status_register() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_fp_control_status_register>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(hv_x64_fp_control_status_register))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_fp_control_status_register>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_fp_control_status_register)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_fp_control_status_register>())).as_uint128 as *const _\n\n as usize\n\n },\n\n 0usize,\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 75, "score": 58103.73913829705 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_partition_property_page_access_tracking_config() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_partition_property_page_access_tracking_config>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_partition_property_page_access_tracking_config)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_partition_property_page_access_tracking_config>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_partition_property_page_access_tracking_config)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_partition_property_page_access_tracking_config>())).as_uint64\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 76, "score": 56436.674801972666 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_gpa_page_access_state__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_gpa_page_access_state__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_gpa_page_access_state__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_gpa_page_access_state__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_gpa_page_access_state__bindgen_ty_1)\n\n )\n\n );\n\n}\n\nimpl hv_gpa_page_access_state__bindgen_ty_1 {\n\n #[inline]\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 77, "score": 56436.674801972666 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_pending_interruption_register__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_pending_interruption_register__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_x64_pending_interruption_register__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_pending_interruption_register__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_pending_interruption_register__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_pending_interruption_register__bindgen_ty_1>()))\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 78, "score": 56436.674801972666 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_xsave_xfem_register__bindgen_ty_2() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_xsave_xfem_register__bindgen_ty_2>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_x64_xsave_xfem_register__bindgen_ty_2)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_xsave_xfem_register__bindgen_ty_2>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_xsave_xfem_register__bindgen_ty_2)\n\n )\n\n );\n\n}\n\nimpl hv_x64_xsave_xfem_register__bindgen_ty_2 {\n\n #[inline]\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 79, "score": 56436.674801972666 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_register_intercept_message__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_register_intercept_message__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_x64_register_intercept_message__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_register_intercept_message__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_register_intercept_message__bindgen_ty_1)\n\n )\n\n );\n\n}\n\nimpl hv_x64_register_intercept_message__bindgen_ty_1 {\n\n #[inline]\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 80, "score": 56436.674801972666 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_hypercall_intercept_message__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_hypercall_intercept_message__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_x64_hypercall_intercept_message__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_hypercall_intercept_message__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_hypercall_intercept_message__bindgen_ty_1)\n\n )\n\n );\n\n}\n\nimpl hv_x64_hypercall_intercept_message__bindgen_ty_1 {\n\n #[inline]\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 81, "score": 56436.674801972666 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_partition_synthetic_processor_features__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_partition_synthetic_processor_features__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_partition_synthetic_processor_features__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_partition_synthetic_processor_features__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_partition_synthetic_processor_features__bindgen_ty_1)\n\n )\n\n );\n\n}\n\nimpl hv_partition_synthetic_processor_features__bindgen_ty_1 {\n\n #[inline]\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 82, "score": 56436.674801972666 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_pending_exception_event__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_pending_exception_event__bindgen_ty_1>(),\n\n 16usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_x64_pending_exception_event__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_pending_exception_event__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_pending_exception_event__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_pending_exception_event__bindgen_ty_1>())).error_code\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 83, "score": 56436.674801972666 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_interrupt_state_register__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_interrupt_state_register__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_x64_interrupt_state_register__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_interrupt_state_register__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_interrupt_state_register__bindgen_ty_1)\n\n )\n\n );\n\n}\n\nimpl hv_x64_interrupt_state_register__bindgen_ty_1 {\n\n #[inline]\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 84, "score": 56436.674801972666 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_xsave_xfem_register__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_xsave_xfem_register__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_x64_xsave_xfem_register__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_xsave_xfem_register__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_xsave_xfem_register__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_xsave_xfem_register__bindgen_ty_1>())).low_uint32\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 85, "score": 56436.674801972666 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_input_get_gpa_pages_access_state() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_input_get_gpa_pages_access_state>(),\n\n 24usize,\n\n concat!(\"Size of: \", stringify!(hv_input_get_gpa_pages_access_state))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_input_get_gpa_pages_access_state>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_input_get_gpa_pages_access_state)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_input_get_gpa_pages_access_state>())).partition_id as *const _\n\n as usize\n\n },\n\n 0usize,\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 86, "score": 56436.674801972666 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_vp_execution_state__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_vp_execution_state__bindgen_ty_1>(),\n\n 2usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_x64_vp_execution_state__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_vp_execution_state__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_vp_execution_state__bindgen_ty_1)\n\n )\n\n );\n\n}\n\nimpl hv_x64_vp_execution_state__bindgen_ty_1 {\n\n #[inline]\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 87, "score": 56436.674801972666 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_partition_processor_xsave_features__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_partition_processor_xsave_features__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_partition_processor_xsave_features__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_partition_processor_xsave_features__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_partition_processor_xsave_features__bindgen_ty_1)\n\n )\n\n );\n\n}\n\nimpl hv_partition_processor_xsave_features__bindgen_ty_1 {\n\n #[inline]\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 88, "score": 56436.674801972666 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_memory_access_info__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_memory_access_info__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_x64_memory_access_info__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_memory_access_info__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_memory_access_info__bindgen_ty_1)\n\n )\n\n );\n\n}\n\nimpl hv_x64_memory_access_info__bindgen_ty_1 {\n\n #[inline]\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 89, "score": 56436.674801972666 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_fp_control_status_register__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_fp_control_status_register__bindgen_ty_1>(),\n\n 16usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_x64_fp_control_status_register__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_fp_control_status_register__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_fp_control_status_register__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_fp_control_status_register__bindgen_ty_1>())).fp_control\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 90, "score": 54886.06099887096 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_vp_register_page__bindgen_ty_3__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_vp_register_page__bindgen_ty_3__bindgen_ty_1>(),\n\n 96usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_vp_register_page__bindgen_ty_3__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_vp_register_page__bindgen_ty_3__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_vp_register_page__bindgen_ty_3__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_vp_register_page__bindgen_ty_3__bindgen_ty_1>())).es\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 91, "score": 54886.06099887096 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_pending_virtualization_fault_event__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_pending_virtualization_fault_event__bindgen_ty_1>(),\n\n 16usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_x64_pending_virtualization_fault_event__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_pending_virtualization_fault_event__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_pending_virtualization_fault_event__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_pending_virtualization_fault_event__bindgen_ty_1>())).code\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 92, "score": 54886.06099887096 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_msr_npiep_config_contents__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_msr_npiep_config_contents__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_x64_msr_npiep_config_contents__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_msr_npiep_config_contents__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_msr_npiep_config_contents__bindgen_ty_1)\n\n )\n\n );\n\n}\n\nimpl hv_x64_msr_npiep_config_contents__bindgen_ty_1 {\n\n #[inline]\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 93, "score": 54886.06099887096 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_vp_register_page__bindgen_ty_2__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_vp_register_page__bindgen_ty_2__bindgen_ty_1>(),\n\n 96usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_vp_register_page__bindgen_ty_2__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_vp_register_page__bindgen_ty_2__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_vp_register_page__bindgen_ty_2__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_vp_register_page__bindgen_ty_2__bindgen_ty_1>())).xmm0\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 94, "score": 54886.06099887096 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_vp_register_page__bindgen_ty_1__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_vp_register_page__bindgen_ty_1__bindgen_ty_1>(),\n\n 128usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_vp_register_page__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_vp_register_page__bindgen_ty_1__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_vp_register_page__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_vp_register_page__bindgen_ty_1__bindgen_ty_1>())).rax\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 95, "score": 54886.06099887096 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_xmm_control_status_register__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_xmm_control_status_register__bindgen_ty_1>(),\n\n 16usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_x64_xmm_control_status_register__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_xmm_control_status_register__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_xmm_control_status_register__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<hv_x64_xmm_control_status_register__bindgen_ty_1>()))\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 96, "score": 54886.06099887096 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_gpa_page_access_state_flags__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_gpa_page_access_state_flags__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_gpa_page_access_state_flags__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_gpa_page_access_state_flags__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_gpa_page_access_state_flags__bindgen_ty_1)\n\n )\n\n );\n\n}\n\nimpl hv_gpa_page_access_state_flags__bindgen_ty_1 {\n\n #[inline]\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 97, "score": 54886.06099887096 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_segment_register__bindgen_ty_1__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_segment_register__bindgen_ty_1__bindgen_ty_1>(),\n\n 2usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_x64_segment_register__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_segment_register__bindgen_ty_1__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_segment_register__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n}\n\nimpl hv_x64_segment_register__bindgen_ty_1__bindgen_ty_1 {\n\n #[inline]\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 98, "score": 54886.06099887096 }, { "content": "#[test]\n\nfn bindgen_test_layout_hv_x64_io_port_access_info__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<hv_x64_io_port_access_info__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(hv_x64_io_port_access_info__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<hv_x64_io_port_access_info__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(hv_x64_io_port_access_info__bindgen_ty_1)\n\n )\n\n );\n\n}\n\nimpl hv_x64_io_port_access_info__bindgen_ty_1 {\n\n #[inline]\n", "file_path": "mshv-bindings/src/bindings.rs", "rank": 99, "score": 54886.06099887096 } ]
Rust
src/test/debuginfo/recursive-struct.rs
komaeda/rust
b2c6b8c29f13f8d1f242da89e587960b95337819
#![allow(unused_variables)] #![feature(box_syntax)] #![feature(omit_gdb_pretty_printer_section)] #![omit_gdb_pretty_printer_section] use self::Opt::{Empty, Val}; enum Opt<T> { Empty, Val { val: T } } struct UniqueNode<T> { next: Opt<Box<UniqueNode<T>>>, value: T } struct LongCycle1<T> { next: Box<LongCycle2<T>>, value: T, } struct LongCycle2<T> { next: Box<LongCycle3<T>>, value: T, } struct LongCycle3<T> { next: Box<LongCycle4<T>>, value: T, } struct LongCycle4<T> { next: Option<Box<LongCycle1<T>>>, value: T, } struct LongCycleWithAnonymousTypes { next: Opt<Box<Box<Box<Box<Box<LongCycleWithAnonymousTypes>>>>>>, value: usize, } fn main() { let stack_unique: UniqueNode<u16> = UniqueNode { next: Val { val: box UniqueNode { next: Empty, value: 1, } }, value: 0, }; let unique_unique: Box<UniqueNode<u32>> = box UniqueNode { next: Val { val: box UniqueNode { next: Empty, value: 3, } }, value: 2, }; let vec_unique: [UniqueNode<f32>; 1] = [UniqueNode { next: Val { val: box UniqueNode { next: Empty, value: 7.5, } }, value: 6.5, }]; let borrowed_unique: &UniqueNode<f64> = &UniqueNode { next: Val { val: box UniqueNode { next: Empty, value: 9.5, } }, value: 8.5, }; let long_cycle1: LongCycle1<u16> = LongCycle1 { next: box LongCycle2 { next: box LongCycle3 { next: box LongCycle4 { next: None, value: 23, }, value: 22, }, value: 21 }, value: 20 }; let long_cycle2: LongCycle2<u32> = LongCycle2 { next: box LongCycle3 { next: box LongCycle4 { next: None, value: 26, }, value: 25, }, value: 24 }; let long_cycle3: LongCycle3<u64> = LongCycle3 { next: box LongCycle4 { next: None, value: 28, }, value: 27, }; let long_cycle4: LongCycle4<f32> = LongCycle4 { next: None, value: 29.5, }; let long_cycle_w_anonymous_types = box box box box box LongCycleWithAnonymousTypes { next: Val { val: box box box box box LongCycleWithAnonymousTypes { next: Empty, value: 31, } }, value: 30 }; zzz(); } fn zzz() {()}
#![allow(unused_variables)] #![feature(box_syntax)] #![feature(omit_gdb_pretty_printer_section)] #![omit_gdb_pretty_printer_section] use self::Opt::{Empty, Val}; enum Opt<T> { Empty, Val { val: T } } struct UniqueNode<T> { next: Opt<Box<UniqueNode<T>>>, value: T } struct LongCycle1<T> { next: Box<LongCycle2<T>>, value: T, } struct LongCycle2<T> { next: Box<LongCycle3<T>>, value: T, } struct LongCycle3<T> { next: Box<LongCycle4<T>>, value: T, } struct LongCycle4<T> { next: Option<Box<LongCycle1<T>>>, value: T, } struct LongCycleWithAnonymousTypes { next: Opt<Box<Box<Box<Box<Box<LongCycleWithAnonymousTypes>>>>>>, value: usize, } fn main() { let stack_unique: UniqueNode<u16> = UniqueNode { next: Val { val: box UniqueNode { next: Empty, value: 1, } }, value: 0, };
let vec_unique: [UniqueNode<f32>; 1] = [UniqueNode { next: Val { val: box UniqueNode { next: Empty, value: 7.5, } }, value: 6.5, }]; let borrowed_unique: &UniqueNode<f64> = &UniqueNode { next: Val { val: box UniqueNode { next: Empty, value: 9.5, } }, value: 8.5, }; let long_cycle1: LongCycle1<u16> = LongCycle1 { next: box LongCycle2 { next: box LongCycle3 { next: box LongCycle4 { next: None, value: 23, }, value: 22, }, value: 21 }, value: 20 }; let long_cycle2: LongCycle2<u32> = LongCycle2 { next: box LongCycle3 { next: box LongCycle4 { next: None, value: 26, }, value: 25, }, value: 24 }; let long_cycle3: LongCycle3<u64> = LongCycle3 { next: box LongCycle4 { next: None, value: 28, }, value: 27, }; let long_cycle4: LongCycle4<f32> = LongCycle4 { next: None, value: 29.5, }; let long_cycle_w_anonymous_types = box box box box box LongCycleWithAnonymousTypes { next: Val { val: box box box box box LongCycleWithAnonymousTypes { next: Empty, value: 31, } }, value: 30 }; zzz(); } fn zzz() {()}
let unique_unique: Box<UniqueNode<u32>> = box UniqueNode { next: Val { val: box UniqueNode { next: Empty, value: 3, } }, value: 2, };
assignment_statement
[]
Rust
shared/rust/src/api/endpoints/jig.rs
corinnewo/ji-cloud
58fc898703ca0fc5b962e644dfcfbcce8547c525
use crate::{ api::Method, domain::{ jig::{ JigBrowseQuery, JigBrowseResponse, JigCountResponse, JigCreateRequest, JigId, JigResponse, JigSearchQuery, JigSearchResponse, JigUpdateDraftDataRequest, JigUpdateRequest, }, CreateResponse, }, error::{EmptyError, MetadataNotFound}, }; use super::ApiEndpoint; pub mod module; pub mod additional_resource; pub mod player; pub struct Create; impl ApiEndpoint for Create { type Req = JigCreateRequest; type Res = CreateResponse<JigId>; type Err = MetadataNotFound; const PATH: &'static str = "/v1/jig"; const METHOD: Method = Method::Post; } pub struct Update; impl ApiEndpoint for Update { type Req = JigUpdateRequest; type Res = (); type Err = EmptyError; const PATH: &'static str = "/v1/jig/{id}"; const METHOD: Method = Method::Patch; } pub struct GetLive; impl ApiEndpoint for GetLive { type Req = (); type Res = JigResponse; type Err = EmptyError; const PATH: &'static str = "/v1/jig/{id}/live"; const METHOD: Method = Method::Get; } pub struct GetDraft; impl ApiEndpoint for GetDraft { type Req = (); type Res = JigResponse; type Err = EmptyError; const PATH: &'static str = "/v1/jig/{id}/draft"; const METHOD: Method = Method::Get; } pub struct UpdateDraftData; impl ApiEndpoint for UpdateDraftData { type Req = JigUpdateDraftDataRequest; type Res = (); type Err = MetadataNotFound; const PATH: &'static str = "/v1/jig/{id}/draft"; const METHOD: Method = Method::Patch; } pub struct Publish; impl ApiEndpoint for Publish { type Req = (); type Res = (); type Err = EmptyError; const PATH: &'static str = "/v1/jig/{id}/draft/publish"; const METHOD: Method = Method::Put; } pub struct Browse; impl ApiEndpoint for Browse { type Req = JigBrowseQuery; type Res = JigBrowseResponse; type Err = EmptyError; const PATH: &'static str = "/v1/jig/browse"; const METHOD: Method = Method::Get; } pub struct Search; impl ApiEndpoint for Search { type Req = JigSearchQuery; type Res = JigSearchResponse; type Err = EmptyError; const PATH: &'static str = "/v1/jig"; const METHOD: Method = Method::Get; } pub struct Clone; impl ApiEndpoint for Clone { type Req = (); type Res = CreateResponse<JigId>; type Err = EmptyError; const PATH: &'static str = "/v1/jig/{id}/clone"; const METHOD: Method = Method::Post; } pub struct Delete; impl ApiEndpoint for Delete { type Req = (); type Res = (); type Err = EmptyError; const PATH: &'static str = "/v1/jig/{id}"; const METHOD: Method = Method::Delete; } pub struct Count; impl ApiEndpoint for Count { type Req = (); type Res = JigCountResponse; type Err = EmptyError; const PATH: &'static str = "/v1/jig/count"; const METHOD: Method = Method::Get; }
use crate::{ api::Method, domain::{ jig::{ JigBrowseQuery, JigBrowseResponse, JigCountResponse, JigCreateRequest, JigId, JigResponse, JigSearchQuery, JigSearchResponse, JigUpdateDraftDataRequest, JigUpdateRequest, }, CreateResponse, }, error::{EmptyError, MetadataNotFound}, }; use super::ApiEndpoint; pub
ApiEndpoint for Browse { type Req = JigBrowseQuery; type Res = JigBrowseResponse; type Err = EmptyError; const PATH: &'static str = "/v1/jig/browse"; const METHOD: Method = Method::Get; } pub struct Search; impl ApiEndpoint for Search { type Req = JigSearchQuery; type Res = JigSearchResponse; type Err = EmptyError; const PATH: &'static str = "/v1/jig"; const METHOD: Method = Method::Get; } pub struct Clone; impl ApiEndpoint for Clone { type Req = (); type Res = CreateResponse<JigId>; type Err = EmptyError; const PATH: &'static str = "/v1/jig/{id}/clone"; const METHOD: Method = Method::Post; } pub struct Delete; impl ApiEndpoint for Delete { type Req = (); type Res = (); type Err = EmptyError; const PATH: &'static str = "/v1/jig/{id}"; const METHOD: Method = Method::Delete; } pub struct Count; impl ApiEndpoint for Count { type Req = (); type Res = JigCountResponse; type Err = EmptyError; const PATH: &'static str = "/v1/jig/count"; const METHOD: Method = Method::Get; }
mod module; pub mod additional_resource; pub mod player; pub struct Create; impl ApiEndpoint for Create { type Req = JigCreateRequest; type Res = CreateResponse<JigId>; type Err = MetadataNotFound; const PATH: &'static str = "/v1/jig"; const METHOD: Method = Method::Post; } pub struct Update; impl ApiEndpoint for Update { type Req = JigUpdateRequest; type Res = (); type Err = EmptyError; const PATH: &'static str = "/v1/jig/{id}"; const METHOD: Method = Method::Patch; } pub struct GetLive; impl ApiEndpoint for GetLive { type Req = (); type Res = JigResponse; type Err = EmptyError; const PATH: &'static str = "/v1/jig/{id}/live"; const METHOD: Method = Method::Get; } pub struct GetDraft; impl ApiEndpoint for GetDraft { type Req = (); type Res = JigResponse; type Err = EmptyError; const PATH: &'static str = "/v1/jig/{id}/draft"; const METHOD: Method = Method::Get; } pub struct UpdateDraftData; impl ApiEndpoint for UpdateDraftData { type Req = JigUpdateDraftDataRequest; type Res = (); type Err = MetadataNotFound; const PATH: &'static str = "/v1/jig/{id}/draft"; const METHOD: Method = Method::Patch; } pub struct Publish; impl ApiEndpoint for Publish { type Req = (); type Res = (); type Err = EmptyError; const PATH: &'static str = "/v1/jig/{id}/draft/publish"; const METHOD: Method = Method::Put; } pub struct Browse; impl
random
[ { "content": "pub fn delete_jig(state: Rc<State>, jig_id: JigId) {\n\n state.loader.load(clone!(state => async move {\n\n let path = Delete::PATH.replace(\"{id}\",&jig_id.0.to_string());\n\n match api_with_auth_empty::<EmptyError, ()>(&path, Delete::METHOD, None).await {\n\n Ok(_) => {\n\n state.jigs.lock_mut().retain(|jig| {\n\n jig.id != jig_id\n\n });\n\n },\n\n Err(_) => {}\n\n }\n\n }));\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/gallery/actions.rs", "rank": 0, "score": 248539.08709984063 }, { "content": "pub fn copy_jig(state: Rc<State>, jig_id: &JigId) {\n\n let path = Clone::PATH.replace(\"{id}\", &jig_id.0.to_string());\n\n\n\n state.loader.load(clone!(state => async move {\n\n match api_with_auth::<CreateResponse<JigId>, EmptyError, ()>(&path, Clone::METHOD, None).await {\n\n Ok(resp) => {\n\n\n\n let path = GetDraft::PATH.replace(\"{id}\", &resp.id.0.to_string());\n\n match api_with_auth::<JigResponse, EmptyError, ()>(&path, GetDraft::METHOD, None).await {\n\n Ok(resp) => {\n\n state.jigs.lock_mut().push_cloned(resp);\n\n },\n\n Err(_) => {},\n\n };\n\n\n\n },\n\n Err(_) => {},\n\n };\n\n }));\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/gallery/actions.rs", "rank": 1, "score": 248539.08709984063 }, { "content": "pub fn init(jig_id: JigId, module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS.set(DebugSettings::debug(Some(InitData{\n\n stickers: vec![\n\n InitSticker::Text, //InitSticker::Sprite\n\n ],\n\n }))).unwrap_ji();\n\n \n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/cover/edit/src/debug.rs", "rank": 2, "score": 243115.3589902591 }, { "content": "pub fn init(jig_id: JigId, module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS.set(DebugSettings::debug(Some(InitData{\n\n stickers: vec![\n\n InitSticker::Text, //InitSticker::Sprite\n\n ],\n\n }))).unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/poster/play/src/debug.rs", "rank": 3, "score": 243115.3589902591 }, { "content": "pub fn init(jig_id: JigId, module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS.set(DebugSettings::debug(Some(InitData { with_pairs: false }))).unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/memory/edit/src/debug.rs", "rank": 4, "score": 243115.3589902591 }, { "content": "pub fn init(jig_id: JigId, module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS.set(DebugSettings::debug(Some(InitData{\n\n stickers: vec![\n\n InitSticker::Text, //InitSticker::Sprite\n\n ],\n\n }))).unwrap_ji();\n\n \n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/poster/edit/src/debug.rs", "rank": 5, "score": 243115.3589902591 }, { "content": "pub fn init(jig_id: JigId, _module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS\n\n .set(DebugSettings::debug(Some(InitData {\n\n stickers: vec![\n\n InitSticker::Text,\n\n InitSticker::Video, //InitSticker::Sprite\n\n ],\n\n })))\n\n .unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/video/play/src/debug.rs", "rank": 6, "score": 243115.3589902591 }, { "content": "pub fn init(jig_id: JigId, module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS.set(DebugSettings::debug(Some(InitData{\n\n with_pairs: true\n\n }))).unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/flashcards/play/src/debug.rs", "rank": 7, "score": 243115.3589902591 }, { "content": "pub fn init(jig_id: JigId, _module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS\n\n .set(DebugSettings::debug(Some(InitData {\n\n stickers: vec![\n\n // InitSticker::Text, InitSticker::Sprite\n\n ],\n\n })))\n\n .unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/video/edit/src/debug.rs", "rank": 8, "score": 243115.3589902591 }, { "content": "pub fn init(jig_id: JigId, module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n //SETTINGS.set(DebugSettings::debug(Some(InitData { with_pairs: true }))).unwrap_ji();\n\n SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/flashcards/edit/src/debug.rs", "rank": 9, "score": 243115.3589902591 }, { "content": "pub fn init(jig_id: JigId, module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS.set(DebugSettings::debug(Some(InitData{\n\n stickers: vec![\n\n InitSticker::Text, //InitSticker::Sprite\n\n ],\n\n }))).unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/cover/play/src/debug.rs", "rank": 10, "score": 243115.3589902591 }, { "content": "pub fn init(jig_id: JigId, module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS.set(DebugSettings::debug(Some(InitData{\n\n with_pairs: true\n\n }))).unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/memory/play/src/debug.rs", "rank": 11, "score": 243115.3589902591 }, { "content": "pub fn init(jig_id: JigId, module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS.set(DebugSettings::debug(Some(InitData{\n\n with_pairs: true\n\n }))).unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/matching/play/src/debug.rs", "rank": 12, "score": 243115.3589902591 }, { "content": "pub fn init(jig_id: JigId, module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n //SETTINGS.set(DebugSettings::debug(Some(InitData { with_pairs: true }))).unwrap_ji();\n\n SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/matching/edit/src/debug.rs", "rank": 13, "score": 243115.3589902591 }, { "content": "pub fn init(jig_id: JigId, module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS.set(DebugSettings::debug(Some(InitData{\n\n with_pairs: true\n\n }))).unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/card-quiz/play/src/debug.rs", "rank": 14, "score": 241545.9922149301 }, { "content": "pub fn init(jig_id: JigId, module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS.set(DebugSettings::debug(Some(InitData { with_pairs: true }))).unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/card-quiz/edit/src/debug.rs", "rank": 15, "score": 241545.9922149301 }, { "content": "pub fn init(jig_id: JigId, module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS.set(DebugSettings::debug(Some(InitData{\n\n stickers: vec![\n\n InitSticker::Text,// InitSticker::Sprite\n\n ],\n\n traces: vec![\n\n InitTrace::Ellipse(0.3, 0.4, 0.2, 0.1),\n\n InitTrace::Ellipse(0.1, 0.1, 0.1, 0.1),\n\n ]\n\n }))).unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/tapping-board/play/src/debug.rs", "rank": 16, "score": 241545.9922149301 }, { "content": "pub fn init(jig_id: JigId, module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS.set(DebugSettings::debug(Some(InitData{\n\n stickers: vec![\n\n (InitSticker::Text, ItemKind::Static, (0.3, 0.3)),\n\n /*\n\n (\n\n InitSticker::Text, \n\n ItemKind::Interactive(\n\n Interactive {\n\n audio: None,\n\n target_transform: {\n\n let mut t = Transform::identity();\n\n Some(t)\n\n }\n\n }\n\n ),\n\n (-0.3, -0.3)\n\n ),\n\n */\n", "file_path": "frontend/apps/crates/entry/module/drag-drop/play/src/debug.rs", "rank": 17, "score": 241545.9922149301 }, { "content": "pub fn init(jig_id: JigId, module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS.set(DebugSettings::debug(Some(InitData{\n\n\n\n stickers: vec![\n\n (InitSticker::Text, ItemKind::Static, (0.3, 0.3)),\n\n (\n\n InitSticker::Text, \n\n ItemKind::Interactive(\n\n Interactive {\n\n audio: None,\n\n target_transform: None,\n\n }\n\n ),\n\n (-0.3, -0.3)\n\n ),\n\n (\n\n InitSticker::Sprite, \n\n ItemKind::Interactive(\n\n Interactive {\n", "file_path": "frontend/apps/crates/entry/module/drag-drop/edit/src/debug.rs", "rank": 18, "score": 241545.9922149301 }, { "content": "pub fn init(jig_id: JigId, module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS.set(DebugSettings::debug(Some(InitData{\n\n stickers: vec![\n\n InitSticker::Text, //InitSticker::Sprite\n\n ],\n\n traces: vec![\n\n InitTrace::Ellipse(0.3, 0.4, 0.2, 0.1)\n\n ]\n\n }))).unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/tapping-board/edit/src/debug.rs", "rank": 19, "score": 241545.9922149301 }, { "content": "pub fn render(jig_id: JigId, jig_edit_state: Rc<JigEditState>) -> Dom {\n\n let state = Rc::new(State::new(jig_id, jig_edit_state));\n\n\n\n let share_anchor = html!(\"post-publish-action\", {\n\n .property(\"kind\", \"share\")\n\n .property_signal(\"active\", state.share_state.active_popup.signal_cloned().map(|active| active.is_some()))\n\n });\n\n\n\n html!(\"post-publish\", {\n\n .property(\"slot\", \"main\")\n\n .children(&mut [\n\n share_jig::dom::render(Rc::clone(&state.share_state), share_anchor, Some(\"actions\")),\n\n html!(\"post-publish-action\", {\n\n .property(\"slot\", \"actions\")\n\n .property(\"kind\", \"new-jig\")\n\n .event(clone!(state => move |_: events::Click| {\n\n actions::create_jig(Rc::clone(&state));\n\n }))\n\n }),\n\n html!(\"post-publish-action\", {\n\n .property(\"kind\", \"play-jig\")\n\n .property(\"slot\", \"actions\")\n\n .event(clone!(state => move |_: events::Click| {\n\n state.jig_edit_state.play_jig.set(Some(JigPlayerSettings::default()));\n\n }))\n\n }),\n\n ])\n\n })\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/post_publish/dom.rs", "rank": 20, "score": 239177.57089070458 }, { "content": "pub trait JigAudioExt {\n\n fn display_name(&self) -> &'static str;\n\n}\n\n\n\n\n\nimpl JigAudioExt for AudioBackground {\n\n fn display_name(&self) -> &'static str {\n\n match self {\n\n AudioBackground::FunForKids => \"Fun for Kids\",\n\n AudioBackground::DancingHappy => \"Dancing Happy\",\n\n AudioBackground::Jigzi1 => \"Jigzi 1\",\n\n AudioBackground::Jigzi2 => \"Jigzi 2\",\n\n AudioBackground::Jigzi3 => \"Jigzi 3\",\n\n }\n\n }\n\n}\n\n\n\n\n\nimpl JigAudioExt for AudioFeedbackPositive {\n\n fn display_name(&self) -> &'static str {\n", "file_path": "frontend/apps/crates/utils/src/jig.rs", "rank": 21, "score": 234334.54934916346 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new( jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone(); \n\n opts.is_main_scrollable = false;\n\n opts.skip_save_for_debug = crate::debug::settings().skip_save;\n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(\n\n opts,\n\n init_from_raw, \n\n )\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/poster/edit/src/state.rs", "rank": 22, "score": 228977.7836843365 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new( jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone(); \n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(opts, Base::new)\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/cover/play/src/state.rs", "rank": 23, "score": 228977.7836843365 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new( jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone(); \n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(opts, Base::new)\n\n}\n", "file_path": "frontend/apps/crates/entry/module/flashcards/play/src/state.rs", "rank": 24, "score": 228977.7836843365 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n let debug_settings = crate::debug::settings();\n\n\n\n let mut opts = StateOpts::new( jig_id, module_id);\n\n opts.force_raw = debug_settings.data.clone();\n\n opts.is_main_scrollable = true;\n\n opts.skip_save_for_debug = debug_settings.skip_save; \n\n opts.skip_load_jig = debug_settings.skip_load_jig; \n\n\n\n AppState::new(\n\n opts,\n\n init_from_raw, \n\n )\n\n}\n\n\n\npub async fn init_from_raw(init_args: BaseInitFromRawArgs<RawData, Mode, Step>) -> BaseInit<Step, Base, Main, Sidebar, Header, Footer, Overlay> {\n\n\n\n\n\n let force_step = {\n", "file_path": "frontend/apps/crates/entry/module/matching/edit/src/state.rs", "rank": 25, "score": 228977.7836843365 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new( jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone(); \n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(opts, Base::new)\n\n}\n", "file_path": "frontend/apps/crates/entry/module/memory/play/src/state.rs", "rank": 26, "score": 228977.7836843365 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new( jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone(); \n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n opts.skip_play = crate::debug::settings().skip_play;\n\n\n\n AppState::new(opts, Base::new)\n\n}\n", "file_path": "frontend/apps/crates/entry/module/matching/play/src/state.rs", "rank": 27, "score": 228977.7836843365 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new(jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone();\n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(opts, Base::new)\n\n}\n", "file_path": "frontend/apps/crates/entry/module/video/play/src/state.rs", "rank": 28, "score": 228977.7836843365 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new(jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone();\n\n opts.is_main_scrollable = false;\n\n opts.skip_save_for_debug = crate::debug::settings().skip_save;\n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(opts, init_from_raw)\n\n}\n", "file_path": "frontend/apps/crates/entry/module/video/edit/src/state.rs", "rank": 29, "score": 228977.7836843365 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new( jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone(); \n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(opts, Base::new)\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/poster/play/src/state.rs", "rank": 30, "score": 228977.7836843365 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n let debug_settings = crate::debug::settings();\n\n\n\n let mut opts = StateOpts::new( jig_id, module_id);\n\n opts.force_raw = debug_settings.data.clone();\n\n opts.is_main_scrollable = true;\n\n opts.skip_save_for_debug = debug_settings.skip_save; \n\n opts.skip_load_jig = debug_settings.skip_load_jig; \n\n\n\n AppState::new(\n\n opts,\n\n init_from_raw, \n\n )\n\n}\n\n\n\npub async fn init_from_raw(init_args: BaseInitFromRawArgs<RawData, Mode, Step>) -> BaseInit<Step, Base, Main, Sidebar, Header, Footer, Overlay> {\n\n\n\n\n\n let force_step = {\n", "file_path": "frontend/apps/crates/entry/module/flashcards/edit/src/state.rs", "rank": 31, "score": 228977.7836843365 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n let debug_settings = crate::debug::settings();\n\n\n\n let mut opts = StateOpts::new( jig_id, module_id);\n\n opts.force_raw = debug_settings.data.clone();\n\n opts.is_main_scrollable = true;\n\n opts.skip_save_for_debug = debug_settings.skip_save; \n\n opts.skip_load_jig = debug_settings.skip_load_jig; \n\n\n\n AppState::new(\n\n opts,\n\n init_from_raw, \n\n )\n\n}\n\n\n\npub async fn init_from_raw(init_args: BaseInitFromRawArgs<RawData, Mode, Step>) -> BaseInit<Step, Base, Main, Sidebar, Header, Footer, Overlay> {\n\n\n\n\n\n let force_step = {\n", "file_path": "frontend/apps/crates/entry/module/memory/edit/src/state.rs", "rank": 32, "score": 228977.7836843365 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new( jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone(); \n\n opts.is_main_scrollable = false;\n\n opts.skip_save_for_debug = crate::debug::settings().skip_save;\n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(\n\n opts,\n\n init_from_raw, \n\n )\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/cover/edit/src/state.rs", "rank": 33, "score": 228977.7836843365 }, { "content": "pub fn use_module_as(state: Rc<State>, target_kind: ModuleKind, source_module_id: ModuleId) {\n\n state.loader.load(clone!(state => async move {\n\n let target_module_id: Result<ModuleId, EmptyError> = async {\n\n let path = endpoints::jig::module::GetDraft::PATH\n\n .replace(\"{id}\", &state.jig.id.0.to_string())\n\n .replace(\"{module_id}\", &source_module_id.0.to_string());\n\n\n\n let source_module = api_with_auth::<ModuleResponse, EmptyError, ()>(\n\n &path,\n\n endpoints::jig::module::GetDraft::METHOD,\n\n None\n\n ).await?.module;\n\n\n\n let target_body = source_module.body.convert_to_body(target_kind).unwrap_ji();\n\n\n\n let path = endpoints::jig::module::Create::PATH\n\n .replace(\"{id}\", &state.jig.id.0.to_string());\n\n\n\n let req = ModuleCreateRequest { body: target_body };\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/actions.rs", "rank": 34, "score": 228602.692312716 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new( jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone(); \n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(opts, Base::new)\n\n}\n", "file_path": "frontend/apps/crates/entry/module/card-quiz/play/src/state.rs", "rank": 35, "score": 227522.57366456735 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new( jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone(); \n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(opts, Base::new)\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/tapping-board/play/src/state.rs", "rank": 36, "score": 227522.57366456735 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new( jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone(); \n\n opts.is_main_scrollable = false;\n\n opts.skip_save_for_debug = crate::debug::settings().skip_save;\n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(\n\n opts,\n\n init_from_raw, \n\n )\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/drag-drop/edit/src/state.rs", "rank": 37, "score": 227522.57366456735 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new( jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone(); \n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n opts.skip_play = crate::debug::settings().skip_play;\n\n\n\n AppState::new(opts, Base::new)\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/drag-drop/play/src/state.rs", "rank": 38, "score": 227522.57366456735 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n let debug_settings = crate::debug::settings();\n\n\n\n let mut opts = StateOpts::new( jig_id, module_id);\n\n opts.force_raw = debug_settings.data.clone();\n\n opts.is_main_scrollable = true;\n\n opts.skip_save_for_debug = debug_settings.skip_save; \n\n opts.skip_load_jig = debug_settings.skip_load_jig; \n\n\n\n AppState::new(\n\n opts,\n\n init_from_raw, \n\n )\n\n}\n\n\n\npub async fn init_from_raw(init_args: BaseInitFromRawArgs<RawData, Mode, Step>) -> BaseInit<Step, Base, Main, Sidebar, Header, Footer, Overlay> {\n\n\n\n\n\n let force_step = {\n", "file_path": "frontend/apps/crates/entry/module/card-quiz/edit/src/state.rs", "rank": 39, "score": 227522.57366456735 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new( jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone(); \n\n opts.is_main_scrollable = false;\n\n opts.skip_save_for_debug = crate::debug::settings().skip_save;\n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(\n\n opts,\n\n init_from_raw, \n\n )\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/tapping-board/edit/src/state.rs", "rank": 40, "score": 227522.57366456735 }, { "content": "pub fn get_jig() -> JigResponse {\n\n let module_id = ModuleId(Uuid::from_u128(0));\n\n JigResponse {\n\n id: JigId(Uuid::from_u128(0)),\n\n creator_id: None,\n\n author_id: None,\n\n author_name: None,\n\n published_at: None,\n\n jig_data: JigData {\n\n draft_or_live: DraftOrLive::Draft,\n\n display_name: \"hello world\".to_string(),\n\n //TODO - delete me: https://github.com/ji-devs/ji-cloud/issues/835\n\n modules: vec![\n\n LiteModule {\n\n id: module_id,\n\n kind: ModuleKind::Cover,\n\n },\n\n LiteModule {\n\n id: module_id,\n\n kind: ModuleKind::Memory,\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/debug.rs", "rank": 41, "score": 227378.5268293215 }, { "content": "/// These will all error by default.\n\n/// Modules that can be converted between eachother must override\n\n/// The relevant methods\n\npub trait BodyConvert {\n\n /// Get a list of valid conversion targets\n\n fn convertable_list() -> Vec<ModuleKind> {\n\n Vec::new()\n\n }\n\n /// Memory game\n\n fn convert_to_memory(&self) -> Result<memory::ModuleData, &'static str> {\n\n Err(\"cannot convert to memory game!\")\n\n }\n\n /// Matching\n\n fn convert_to_matching(&self) -> Result<matching::ModuleData, &'static str> {\n\n Err(\"cannot convert to matching!\")\n\n }\n\n /// Flashcards\n\n fn convert_to_flashcards(&self) -> Result<flashcards::ModuleData, &'static str> {\n\n Err(\"cannot convert to matching!\")\n\n }\n\n /// Card quiz\n\n fn convert_to_card_quiz(&self) -> Result<card_quiz::ModuleData, &'static str> {\n\n Err(\"cannot convert to card quiz!\")\n", "file_path": "shared/rust/src/domain/jig/module/body.rs", "rank": 42, "score": 225210.54739557576 }, { "content": "pub fn create_jig(state: Rc<State>) {\n\n state.loader.load(clone!(state => async move {\n\n let req = Some(JigCreateRequest::default());\n\n\n\n match api_with_auth::<CreateResponse<JigId>, MetadataNotFound, _>(&Create::PATH, Create::METHOD, req).await {\n\n Ok(resp) => {\n\n let url:String = Route::Jig(JigRoute::Edit(resp.id, JigEditRoute::Landing)).into();\n\n dominator::routing::go_to_url(&url);\n\n },\n\n Err(_) => {},\n\n }\n\n }));\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/gallery/actions.rs", "rank": 43, "score": 215721.38015605722 }, { "content": "pub fn load_jig(state: Rc<State>) {\n\n state.loader.load(clone!(state => async move {\n\n\n\n let resp = match state.player_options.draft {\n\n false => {\n\n let path = jig::GetLive::PATH.replace(\"{id}\", &state.jig_id.0.to_string());\n\n api_no_auth::<JigResponse, EmptyError, ()>(&path, jig::GetLive::METHOD, None).await\n\n },\n\n true => {\n\n let path = jig::GetDraft::PATH.replace(\"{id}\", &state.jig_id.0.to_string());\n\n api_no_auth::<JigResponse, EmptyError, ()>(&path, jig::GetDraft::METHOD, None).await\n\n },\n\n };\n\n\n\n match resp {\n\n Ok(resp) => {\n\n // state.active_module.set(Some(resp.jig.modules[0].clone()));\n\n state.jig.set(Some(resp));\n\n },\n\n Err(_) => {},\n\n }\n\n }));\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/play/src/player/actions.rs", "rank": 44, "score": 215721.38015605722 }, { "content": "pub fn item_duplicate(\n\n state: Rc<State>,\n\n sidebar_state: Rc<SidebarState>,\n\n module_id: ModuleId,\n\n) -> Dom {\n\n html!(\"menu-line\", {\n\n .property(\"slot\", \"lines\")\n\n .property(\"icon\", \"duplicate\")\n\n .event(clone!(state, module_id => move |_:events::Click| {\n\n state.close_menu();\n\n duplicate_module(sidebar_state.clone(), &module_id);\n\n }))\n\n })\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/menu/dom.rs", "rank": 45, "score": 214264.39423997732 }, { "content": "pub fn load_jigs_regular(state: Rc<State>) {\n\n state.loader.load(clone!(state => async move {\n\n load_jigs(Rc::clone(&state)).await\n\n }));\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/gallery/actions.rs", "rank": 46, "score": 214023.32184755313 }, { "content": "pub fn save_jig(state: Rc<State>) {\n\n if form_invalid(Rc::clone(&state)) {\n\n state.submission_tried.set(true);\n\n return;\n\n };\n\n\n\n state.loader.load(clone!(state => async move {\n\n match save_and_publish(Rc::clone(&state)).await {\n\n Ok(_) => {\n\n state.submission_tried.set(false);\n\n\n\n state.jig_edit_state.route.set_neq(JigEditRoute::PostPublish);\n\n\n\n let url: String = Route::Jig(JigRoute::Edit(state.jig.id, JigEditRoute::PostPublish)).into();\n\n log::info!(\"{}\", url);\n\n\n\n /* this will cause a full refresh - but preserves history\n\n * see the .future in EditPage too\n\n dominator::routing::go_to_url(&url);\n\n */\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/publish/actions.rs", "rank": 47, "score": 214023.32184755313 }, { "content": "pub fn create_jig(state: Rc<State>) {\n\n state.loader.load(async {\n\n let req = Some(JigCreateRequest::default());\n\n\n\n match api_with_auth::<CreateResponse<JigId>, EmptyError, _>(&jig::Create::PATH, jig::Create::METHOD, req).await {\n\n Ok(resp) => {\n\n let url:String = Route::Jig(JigRoute::Edit(resp.id, JigEditRoute::Landing)).into();\n\n dominator::routing::go_to_url(&url);\n\n },\n\n Err(_) => {},\n\n }\n\n });\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/post_publish/actions.rs", "rank": 48, "score": 212358.93451844103 }, { "content": "pub fn render(jig_edit_state: Rc<JigEditState>) -> Dom {\n\n let state: Mutable<Option<Rc<State>>> = Mutable::new(None);\n\n\n\n html!(\"empty-fragment\", {\n\n .future(clone!(state => async move {\n\n let _state = State::load_new(jig_edit_state).await;\n\n state.set(Some(Rc::new(_state)));\n\n }))\n\n .property(\"slot\", \"main\")\n\n .child_signal(state.signal_cloned().map(|state| {\n\n state.map(|state| render_page(state.clone()))\n\n }))\n\n .child(html!(\"window-loader-block\", {\n\n .property_signal(\"visible\", state.signal_ref(|state| state.is_none()))\n\n }))\n\n })\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/publish/dom.rs", "rank": 49, "score": 211407.72126942826 }, { "content": "pub fn update_jig_settings(state: Rc<State>) {\n\n let req = state.get_jig_update_req();\n\n\n\n let path = endpoints::jig::Update::PATH.replace(\"{id}\", &state.jig_id.0.to_string());\n\n\n\n state.loader.load(async move {\n\n match api_with_auth_empty::<EmptyError, _>(&path, endpoints::jig::Update::METHOD, Some(req)).await {\n\n Ok(_) => {}\n\n Err(_) => {}\n\n };\n\n });\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/settings/actions.rs", "rank": 50, "score": 210727.1915693922 }, { "content": "pub fn search_jigs(state: Rc<State>, q: String) {\n\n state.loader.load(clone!(state => async move {\n\n let is_published = match *state.visible_jigs.lock_ref() {\n\n VisibleJigs::All => None,\n\n VisibleJigs::Published => Some(true),\n\n VisibleJigs::Draft => Some(false),\n\n };\n\n\n\n let req = Some(JigSearchQuery {\n\n q,\n\n is_published,\n\n ..Default::default()\n\n });\n\n\n\n match api_with_auth::<JigSearchResponse, EmptyError, _>(&Search::PATH, Search::METHOD, req).await {\n\n Ok(resp) => {\n\n state.jigs.lock_mut().replace_cloned(resp.jigs);\n\n },\n\n Err(_) => {},\n\n }\n\n }));\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/gallery/actions.rs", "rank": 51, "score": 209518.02534352217 }, { "content": "pub fn get_player_settings(state: Rc<State>) -> JigPlayerSettings {\n\n let direction = state.settings.direction.get_cloned();\n\n let display_score = state.settings.display_score.get();\n\n let track_assessments = state.settings.track_assessments.get();\n\n let drag_assist = state.settings.drag_assist.get();\n\n\n\n JigPlayerSettings {\n\n direction: direction,\n\n display_score: display_score,\n\n track_assessments: track_assessments,\n\n drag_assist: drag_assist,\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/actions.rs", "rank": 52, "score": 205164.12131512404 }, { "content": "pub fn on_iframe_message(state: Rc<State>, message: ModuleToJigPlayerMessage) {\n\n match message {\n\n ModuleToJigPlayerMessage::AddPoints(amount) => {\n\n let mut points = state.points.lock_mut();\n\n *points += amount;\n\n }\n\n ModuleToJigPlayerMessage::Start(time) => {\n\n if let Some(time) = time {\n\n start_timer(Rc::clone(&state), time);\n\n }\n\n }\n\n ModuleToJigPlayerMessage::Next => {\n\n navigate_forward(Rc::clone(&state));\n\n },\n\n ModuleToJigPlayerMessage::Stop => {\n\n state.timer.set(None);\n\n },\n\n };\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/play/src/player/actions.rs", "rank": 53, "score": 203052.4454743166 }, { "content": "pub fn navigate_back(state: Rc<State>) {\n\n let mut active_module = state.active_module.lock_mut();\n\n if *active_module != 0 {\n\n *active_module -= 1;\n\n state.timer.set(None);\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/play/src/player/actions.rs", "rank": 54, "score": 202799.5205342083 }, { "content": "pub fn toggle_paused(state: Rc<State>) {\n\n let paused = !state.paused.get();\n\n\n\n // set state to paused\n\n state.paused.set(paused);\n\n\n\n // pause timer if exists\n\n match &*state.timer.lock_ref() {\n\n None => {}\n\n Some(timer) => {\n\n *timer.paused.borrow_mut() = paused;\n\n }\n\n }\n\n\n\n // let iframe know that paused\n\n let iframe_message = match paused {\n\n false => JigToModulePlayerMessage::Play,\n\n true => JigToModulePlayerMessage::Pause,\n\n };\n\n sent_iframe_message(Rc::clone(&state), iframe_message);\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/play/src/player/actions.rs", "rank": 55, "score": 202799.5205342083 }, { "content": "pub fn reload_iframe(state: Rc<State>) {\n\n match &*state.iframe.borrow() {\n\n None => {}\n\n Some(iframe) => {\n\n iframe.set_src(&iframe.src());\n\n state.timer.set(None);\n\n }\n\n };\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/play/src/player/actions.rs", "rank": 56, "score": 202799.5205342083 }, { "content": "pub fn load_data(state: Rc<State>) {\n\n state.loader.load(clone!(state => async move {\n\n join!(\n\n load_jigs(Rc::clone(&state)),\n\n load_ages(Rc::clone(&state)),\n\n );\n\n }));\n\n}\n\n\n\nasync fn load_jigs(state: Rc<State>) {\n\n let is_published = match *state.visible_jigs.lock_ref() {\n\n VisibleJigs::All => None,\n\n VisibleJigs::Published => Some(true),\n\n VisibleJigs::Draft => Some(false),\n\n };\n\n\n\n let req = Some(JigBrowseQuery {\n\n is_published,\n\n author_id: Some(UserOrMe::Me),\n\n page: None,\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/gallery/actions.rs", "rank": 57, "score": 202799.5205342083 }, { "content": "pub fn navigate_forward(state: Rc<State>) {\n\n let mut active_module = state.active_module.lock_mut();\n\n if let Some(jig) = &*state.jig.lock_ref() {\n\n if *active_module < jig.jig_data.modules.len() - 1 {\n\n\n\n *active_module += 1;\n\n state.timer.set(None);\n\n\n\n } else {\n\n\n\n state.done.set(true);\n\n\n\n }\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/play/src/player/actions.rs", "rank": 58, "score": 202799.5205342083 }, { "content": "pub fn sent_iframe_message(state: Rc<State>, data: JigToModulePlayerMessage) {\n\n let iframe_origin: String = Route::Home(HomeRoute::Home).into();\n\n let iframe_origin = unsafe {\n\n SETTINGS\n\n .get_unchecked()\n\n .remote_target\n\n .spa_iframe(&iframe_origin)\n\n };\n\n\n\n match &*state.iframe.borrow() {\n\n None => todo!(),\n\n Some(iframe) => {\n\n let m = IframeAction::new(data);\n\n let _ = iframe\n\n .content_window()\n\n .unwrap_ji()\n\n .post_message(&m.into(), &iframe_origin);\n\n }\n\n };\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/play/src/player/actions.rs", "rank": 59, "score": 201512.8887385088 }, { "content": "pub fn on_iframe_message(state: Rc<State>, message: ModuleToJigEditorMessage) {\n\n match message {\n\n ModuleToJigEditorMessage::AppendModule(module) => {\n\n populate_added_module(Rc::clone(&state), module);\n\n },\n\n ModuleToJigEditorMessage::Next => {\n\n state.collapsed.set(false);\n\n },\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/actions.rs", "rank": 60, "score": 201512.8887385088 }, { "content": "/// Extension trait for interop\n\n/// impl on inner body data\n\npub trait BodyExt<Mode: ModeExt, Step: StepExt>:\n\n BodyConvert + TryFrom<Body> + Serialize + DeserializeOwned + Clone + Debug\n\n{\n\n /// get choose mode list. By default it's the full list\n\n /// but that can be overridden to re-order or hide some modes\n\n fn choose_mode_list() -> Vec<Mode> {\n\n Mode::get_list()\n\n }\n\n\n\n /// get self as a Body\n\n fn as_body(&self) -> Body;\n\n\n\n /// is complete\n\n fn is_complete(&self) -> bool;\n\n\n\n /// get the kind from the type itself\n\n fn kind() -> super::ModuleKind;\n\n\n\n /// given a Mode, get a new Self\n\n /// will usually populate an inner .content\n", "file_path": "shared/rust/src/domain/jig/module/body.rs", "rank": 61, "score": 201311.56198073586 }, { "content": "pub fn send_report(state: Rc<State>) {\n\n state.player_state.loader.load(clone!(state => async move {\n\n let report_type = state.report_type.lock_ref();\n\n log::info!(\"Sending report: {:?}\", report_type);\n\n // TODO: actually send report\n\n state.report_status.set(ReportStatus::Sent);\n\n\n\n spawn_local(clone!(state => async move {\n\n TimeoutFuture::new(5_000).await;\n\n let mut report_status = state.report_status.lock_mut();\n\n // only update if status hasn't changed\n\n if *report_status == ReportStatus::Sent {\n\n *report_status = ReportStatus::Default;\n\n }\n\n }));\n\n }));\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/play/src/player/sidebar/actions.rs", "rank": 62, "score": 200955.5302554462 }, { "content": "pub fn edit(state: Rc<State>) {\n\n if let Some(module) = &*state.module {\n\n let module_id = module.id;\n\n state.sidebar.jig_edit_state.route.set_neq(JigEditRoute::Module(module_id));\n\n state.sidebar.collapsed.set(true);\n\n\n\n let jig_id = state.sidebar.jig.id;\n\n let url: String =\n\n Route::Jig(JigRoute::Edit(jig_id, JigEditRoute::Module(module_id))).into();\n\n log::info!(\"{}\", url);\n\n\n\n /* this will cause a full refresh - but preserves history\n\n * see the .future in EditPage too\n\n dominator::routing::go_to_url(&url);\n\n */\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/module/actions.rs", "rank": 63, "score": 200955.5302554462 }, { "content": "pub fn load_ages(state: Rc<State>) {\n\n state.loader.load(clone!(state => async move {\n\n match api_no_auth::<MetadataResponse, EmptyError, ()>(meta::Get::PATH, meta::Get::METHOD, None).await {\n\n Err(_) => {},\n\n Ok(res) => {\n\n state.all_ages.set(res.age_ranges);\n\n },\n\n }\n\n }));\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/play/src/player/sidebar/actions.rs", "rank": 64, "score": 200955.53025544618 }, { "content": "pub fn delete(state: Rc<State>) {\n\n let index = state.index;\n\n\n\n state.sidebar.loader.load(clone!(state => async move {\n\n if let Some(module) = &*state.module {\n\n let path = endpoints::jig::module::Delete::PATH\n\n .replace(\"{id}\",&state.sidebar.jig.id.0.to_string())\n\n .replace(\"{module_id}\",&module.id.0.to_string());\n\n match api_with_auth_empty::<EmptyError, ()>(&path, endpoints::jig::module::Delete::METHOD, None).await {\n\n Ok(_) => {\n\n state.sidebar.modules.lock_mut().remove(index);\n\n },\n\n Err(_) => {}\n\n }\n\n }\n\n }));\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/module/actions.rs", "rank": 65, "score": 200955.53025544618 }, { "content": "pub fn navigate_to_publish(state: Rc<State>) {\n\n state.jig_edit_state.route.set_neq(JigEditRoute::Publish);\n\n state.collapsed.set(true);\n\n\n\n let jig_id = state.jig.id;\n\n let url: String = Route::Jig(JigRoute::Edit(jig_id, JigEditRoute::Publish)).into();\n\n log::info!(\"{}\", url);\n\n\n\n /* this will cause a full refresh - but preserves history\n\n * see the .future in EditPage too\n\n dominator::routing::go_to_url(&url);\n\n */\n\n}\n\n\n\npub async fn update_jig(jig_id: &JigId, req: JigUpdateDraftDataRequest) -> Result<(), EmptyError> {\n\n let path = endpoints::jig::Update::PATH.replace(\"{id}\", &jig_id.0.to_string());\n\n api_with_auth_empty::<EmptyError, _>(&path, endpoints::jig::Update::METHOD, Some(req)).await\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/actions.rs", "rank": 66, "score": 200955.5302554462 }, { "content": "pub fn render(state: Rc<State>) -> Dom {\n\n actions::load_jig(state.clone());\n\n\n\n html!(\"jig-play-landing\", {\n\n .property_signal(\"paused\", state.paused.signal())\n\n .global_event(clone!(state => move |evt:Message| {\n\n match evt.try_serde_data::<IframeAction<ModuleToJigPlayerMessage>>() {\n\n Err(_) => {},\n\n Ok(m) => {\n\n actions::on_iframe_message(Rc::clone(&state), m.data)\n\n },\n\n };\n\n }))\n\n .apply(|dom| {\n\n if state.player_options.is_student {\n\n dom\n\n } else {\n\n let sidebar_state = Rc::new(sidebar::state::State::new(state.clone()));\n\n dom.child(sidebar::dom::render(sidebar_state))\n\n }\n", "file_path": "frontend/apps/crates/entry/jig/play/src/player/dom.rs", "rank": 67, "score": 200405.89987098478 }, { "content": "/// This extension trait makes it possible to keep the Step\n\n/// functionality generic and at a higher level than the module itself\n\npub trait StepExt: Copy + Default + PartialEq + Eq + Hash {\n\n /// Get the next step from current step\n\n fn next(&self) -> Option<Self>;\n\n /// Get the step as a number\n\n fn as_number(&self) -> usize;\n\n /// Label to display (will be localized)\n\n fn label(&self) -> &'static str;\n\n /// List of all available steps\n\n fn get_list() -> Vec<Self>;\n\n /// Get the step which is synonymous with \"preview\"\n\n /// TODO: this could probably be derived as a combo\n\n /// of get_list() and next() (i.e. the first step to return None)\n\n fn get_preview() -> Self;\n\n /// Auto-implemented, check whether current step is \"preview\"\n\n fn is_preview(&self) -> bool {\n\n *self == Self::get_preview()\n\n }\n\n}\n\n\n\n/// Theme Ids. Used in various modules\n", "file_path": "shared/rust/src/domain/jig/module/body.rs", "rank": 68, "score": 199199.8861399284 }, { "content": "/// Extenstion trait for modes\n\npub trait ModeExt: Copy + Default + PartialEq + Eq + Hash {\n\n /// get a list of all the modes\n\n /// (becomes the default in Choose page, which can be overriden in BodyExt)\n\n fn get_list() -> Vec<Self>;\n\n\n\n /// get the mode itself as a string id\n\n fn as_str_id(&self) -> &'static str;\n\n /// for headers, labels, etc.\n\n fn label(&self) -> &'static str;\n\n}\n\n\n\n/// impl ModeExt for empty modes\n\n/// this is a special case and should only be used\n\n/// where the module genuinely ignores the mode\n\n/// one example is the Cover module\n\nimpl ModeExt for () {\n\n fn get_list() -> Vec<Self> {\n\n vec![]\n\n }\n\n\n", "file_path": "shared/rust/src/domain/jig/module/body.rs", "rank": 69, "score": 199199.88613992842 }, { "content": "pub fn paste_module(state: Rc<State>) {\n\n match get_module_to_paste() {\n\n None => log::warn!(\"No module to paste\"),\n\n Some((jig_id, module_id)) => {\n\n state.loader.load(clone!(state => async move {\n\n\n\n let module = super::module_cloner::clone_module(&jig_id, &module_id, &state.jig.id).await.unwrap_ji();\n\n state.modules.lock_mut().push_cloned(Rc::new(Some(module)));\n\n\n\n }));\n\n }\n\n }\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/copy_paste_module.rs", "rank": 70, "score": 197388.81648262328 }, { "content": "pub fn add_empty_module_after(state: Rc<State>) {\n\n state\n\n .sidebar\n\n .modules\n\n .lock_mut()\n\n .insert_cloned(state.index + 1, Rc::new(None));\n\n state.sidebar.jig_edit_state.route.set_neq(JigEditRoute::Landing);\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/module/actions.rs", "rank": 71, "score": 197388.81648262328 }, { "content": "pub fn render(state: Rc<State>) -> Dom {\n\n html!(\"input-select\", {\n\n .property(\"slot\", \"goal\")\n\n .property(\"label\", STR_TEACHING_GOAL_LABEL)\n\n .property(\"placeholder\", STR_TEACHING_GOAL_PLACEHOLDER)\n\n .property(\"multiple\", true)\n\n .property_signal(\"value\", goal_value_signal(state.clone()))\n\n .property_signal(\"error\", {\n\n (map_ref! {\n\n let submission_tried = state.submission_tried.signal(),\n\n let value = state.jig.goals.signal_cloned()\n\n => (*submission_tried, value.clone())\n\n })\n\n .map(|(submission_tried, value)| {\n\n submission_tried && value.is_empty()\n\n })\n\n })\n\n .children_signal_vec(state.goals.signal_cloned().map(clone!(state => move |goals| {\n\n goals.iter().map(|goal| {\n\n render_goal(&goal, state.clone())\n\n }).collect()\n\n })).to_signal_vec())\n\n })\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/publish/components/goal.rs", "rank": 72, "score": 196758.78953475785 }, { "content": "pub fn render(state: Rc<State>) -> Dom {\n\n\n\n let state = Rc::new(share_jig::state::State::new(state.player_state.jig_id));\n\n\n\n let anchor = html!(\"jig-play-sidebar-action\", {\n\n .property(\"kind\", \"share\")\n\n .property_signal(\"active\", state.active_popup.signal_cloned().map(|active| active.is_some()))\n\n });\n\n\n\n share_jig::dom::render(state, anchor, Some(\"actions\"))\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/play/src/player/sidebar/dom/share.rs", "rank": 73, "score": 196758.78953475785 }, { "content": "pub fn render(state: Rc<State>) -> Dom {\n\n load_ages(Rc::clone(&state));\n\n\n\n html!(\"jig-play-sidebar\", {\n\n .property(\"slot\", \"sidebar\")\n\n .property_signal(\"jigName\", state.player_state.jig.signal_cloned().map(|jig| {\n\n match jig {\n\n None => String::new(),\n\n Some(jig) => jig.jig_data.display_name,\n\n }\n\n }))\n\n .property_signal(\"open\", state.sidebar_open.signal())\n\n .child(html!(\"button-empty\", {\n\n .property(\"slot\", \"close\")\n\n .text(\"<\")\n\n .event(clone!(state => move |_: events::Click| {\n\n state.sidebar_open.set(false);\n\n }))\n\n }))\n\n .child(html!(\"button\", {\n", "file_path": "frontend/apps/crates/entry/jig/play/src/player/sidebar/dom/mod.rs", "rank": 74, "score": 196758.78953475785 }, { "content": "pub fn render(state: Rc<State>) -> Dom {\n\n html!(\"input-select\", {\n\n .property(\"slot\", \"language\")\n\n .property(\"label\", STR_LANGUAGE_LABEL)\n\n .property(\"placeholder\", STR_LANGUAGE_PLACEHOLDER)\n\n .property_signal(\"value\", language_value_signal(state.clone()))\n\n .property_signal(\"error\", {\n\n (map_ref! {\n\n let submission_tried = state.submission_tried.signal(),\n\n let value = state.jig.language.signal_cloned()\n\n => (*submission_tried, value.clone())\n\n })\n\n .map(|(submission_tried, value)| {\n\n submission_tried && value.is_empty()\n\n })\n\n })\n\n .children(state.languages.iter().map(clone!(state => move |language| {\n\n render_language(language, state.clone())\n\n })))\n\n })\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/publish/components/language.rs", "rank": 75, "score": 196758.78953475785 }, { "content": "pub fn render(state: Rc<State>) -> Dom {\n\n html!(\"input-select\", {\n\n .property(\"slot\", \"age\")\n\n .property(\"label\", STR_AGE_LABEL)\n\n .property(\"placeholder\", STR_AGE_PLACEHOLDER)\n\n .property(\"multiple\", true)\n\n .property_signal(\"value\", age_value_signal(state.clone()))\n\n .property_signal(\"error\", {\n\n (map_ref! {\n\n let submission_tried = state.submission_tried.signal(),\n\n let value = state.jig.age_ranges.signal_cloned()\n\n => (*submission_tried, value.clone())\n\n })\n\n .map(|(submission_tried, value)| {\n\n submission_tried && value.is_empty()\n\n })\n\n })\n\n .children_signal_vec(state.ages.signal_cloned().map(clone!(state => move |ages| {\n\n ages.iter().map(|age| {\n\n render_age(&age, state.clone())\n\n }).collect()\n\n })).to_signal_vec())\n\n })\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/publish/components/age.rs", "rank": 76, "score": 196758.78953475785 }, { "content": "pub fn render(state: Rc<State>) -> Dom {\n\n html!(\"anchored-overlay\", {\n\n .property(\"positionY\", \"bottom-out\")\n\n .property(\"positionX\", \"left-in\")\n\n .property(\"styled\", true)\n\n .property(\"slot\", \"actions\")\n\n .property_signal(\"open\", info_open_signal(Rc::clone(&state)))\n\n .event(clone!(state => move |_: events::Close| {\n\n state.info_popup_active.set(false);\n\n }))\n\n .child(html!(\"jig-play-sidebar-action\", {\n\n .property(\"slot\", \"anchor\")\n\n .property(\"kind\", \"info\")\n\n .property_signal(\"active\", info_open_signal(Rc::clone(&state)))\n\n .event(clone!(state => move |_: events::Click| {\n\n let mut info_popup_active = state.info_popup_active.lock_mut();\n\n *info_popup_active = !*info_popup_active;\n\n }))\n\n }))\n\n .child_signal({\n", "file_path": "frontend/apps/crates/entry/jig/play/src/player/sidebar/dom/info.rs", "rank": 77, "score": 196758.78953475785 }, { "content": "pub fn render(state: Rc<State>) -> Dom {\n\n html!(\"div\", {\n\n .property(\"slot\", \"category-labels\")\n\n .children_signal_vec(state.jig.categories.signal_cloned().map(clone!(state => move|categories| {\n\n categories.iter()\n\n .map(|category_id| render_pill(state.clone(), category_id.clone()))\n\n .collect()\n\n })).to_signal_vec())\n\n })\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/publish/components/categories_pills.rs", "rank": 78, "score": 194995.19581939979 }, { "content": "pub fn render(state: Rc<State>) -> Dom {\n\n html!(\"jig-settings\", {\n\n .property(\"slot\", \"overlay\")\n\n .children(&mut [\n\n html!(\"button-icon\", {\n\n .property(\"icon\", \"x\")\n\n .property(\"slot\", \"close\")\n\n .event(clone!(state => move |_:events::Click| {\n\n state.active_popup.set(None);\n\n }))\n\n }),\n\n html!(\"jig-settings-button\", {\n\n .property(\"slot\", \"creator\")\n\n .property(\"kind\", \"theme\")\n\n .event(clone!(state => move|_: events::Click| {\n\n set_active_popup(Rc::clone(&state), ActiveSettingsPopup::Theme);\n\n }))\n\n }),\n\n html!(\"jig-settings-button\", {\n\n .property(\"slot\", \"creator\")\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/settings/dom/main.rs", "rank": 79, "score": 194995.19581939979 }, { "content": "pub fn render(state: Rc<State>) -> Dom {\n\n html!(\"anchored-overlay\", {\n\n .class(class! {\n\n .pseudo!(\"::part(overlay)\", {\n\n .style(\"z-index\", \"2\")\n\n })\n\n })\n\n .property(\"slot\", \"settings\")\n\n .property(\"positionX\", \"right-out\")\n\n .property(\"positionY\", \"top-in\")\n\n .property(\"styled\", true)\n\n .property_signal(\"open\", state.active_popup.signal_cloned().map(|x| x.is_some()))\n\n .event(clone!(state => move |_: events::Close| {\n\n state.active_popup.set(None);\n\n }))\n\n .child(html!(\"jig-edit-sidebar-action-button\", {\n\n .property(\"slot\", \"anchor\")\n\n .property(\"kind\", \"settings\")\n\n .event(clone!(state => move |_: events::Click| {\n\n let mut active_popup = state.active_popup.lock_mut();\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/settings/dom/mod.rs", "rank": 80, "score": 194995.19581939979 }, { "content": "pub fn render(state: Rc<State>) -> Dom {\n\n let audio_handles: Vec<Mutable<Option<AudioHandle>>> = AudioBackground::variants().iter().map(|_| Mutable::new(None)).collect();\n\n let audio_handles = Rc::new(audio_handles);\n\n\n\n html!(\"jig-audio-body\", {\n\n .property(\"slot\", \"overlay\")\n\n .property(\"kind\", \"background\")\n\n .children(&mut [\n\n html!(\"button-rect\", {\n\n .property(\"kind\", \"text\")\n\n .property(\"slot\", \"back\")\n\n .property(\"color\", \"blue\")\n\n .child(html!(\"fa-icon\", {.property(\"icon\", \"fa-light fa-chevron-left\")}))\n\n .text(STR_BACK_TO_SETTINGS)\n\n .event(clone!(state => move|_: events::Click| {\n\n set_active_popup(Rc::clone(&state), ActiveSettingsPopup::Main);\n\n }))\n\n }),\n\n html!(\"button-icon\", {\n\n .property(\"icon\", \"x\")\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/settings/dom/background.rs", "rank": 81, "score": 194995.19581939979 }, { "content": "pub fn render(state: Rc<State>) -> Dom {\n\n html!(\"jig-settings-themes\", {\n\n .property(\"slot\", \"overlay\")\n\n .children(&mut [\n\n html!(\"button-rect\", {\n\n .property(\"kind\", \"text\")\n\n .property(\"slot\", \"back\")\n\n .property(\"color\", \"blue\")\n\n .child(html!(\"fa-icon\", {.property(\"icon\", \"fa-light fa-chevron-left\")}))\n\n .text(STR_BACK_TO_SETTINGS)\n\n .event(clone!(state => move|_: events::Click| {\n\n set_active_popup(Rc::clone(&state), ActiveSettingsPopup::Main);\n\n }))\n\n }),\n\n html!(\"button-icon\", {\n\n .property(\"icon\", \"x\")\n\n .property(\"slot\", \"close\")\n\n .event(clone!(state => move |_:events::Click| {\n\n state.active_popup.set(None);\n\n }))\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/settings/dom/themes.rs", "rank": 82, "score": 194995.19581939979 }, { "content": "pub fn render(state: Rc<State>) -> Dom {\n\n html!(\"input-select\", {\n\n .property(\"slot\", \"catagories-select\")\n\n .property(\"label\", STR_CATEGORIES_LABEL)\n\n .property(\"placeholder\", STR_CATEGORIES_PLACEHOLDER)\n\n .property(\"multiple\", true)\n\n .property_signal(\"value\", category_value_signal(state.clone()))\n\n .property_signal(\"error\", {\n\n (map_ref! {\n\n let submission_tried = state.submission_tried.signal(),\n\n let value = state.jig.categories.signal_cloned()\n\n => (*submission_tried, value.clone())\n\n })\n\n .map(|(submission_tried, value)| {\n\n submission_tried && value.is_empty()\n\n })\n\n })\n\n .children_signal_vec(state.categories.signal_cloned().map(clone!(state => move |categories| {\n\n render_categories(state.clone(), &categories)\n\n })).to_signal_vec())\n\n })\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/publish/components/categories_select.rs", "rank": 83, "score": 194995.19581939979 }, { "content": "pub fn start_timer(state: Rc<State>, time: u32) {\n\n let timer = Timer::new(time);\n\n\n\n spawn_local(timer.time.signal().for_each(clone!(state => move|time| {\n\n if time == 0 {\n\n sent_iframe_message(Rc::clone(&state), JigToModulePlayerMessage::TimerDone);\n\n }\n\n async {}\n\n })));\n\n\n\n state.timer.set(Some(timer));\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/play/src/player/actions.rs", "rank": 84, "score": 194647.1136939504 }, { "content": "pub fn published_at_string(time: DateTime<Utc>, short: bool) -> String {\n\n let millis_since_published = Utc::now().timestamp_millis() - time.timestamp_millis();\n\n let millis_since_published = millis_since_published as u64;\n\n\n\n let unit = PublishedAtUnit::new(millis_since_published);\n\n\n\n match short {\n\n true => format!(\"{} {} ago\", unit.get_number(), unit.to_string_short()),\n\n false => {\n\n let num = unit.get_number();\n\n match num {\n\n 1 => format!(\"1 {} ago\", unit.to_string_long()),\n\n num => format!(\"{} {}s ago\", num, unit.to_string_long()),\n\n }\n\n },\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug)]\n\npub struct JigPlayerOptions {\n", "file_path": "frontend/apps/crates/utils/src/jig.rs", "rank": 85, "score": 194573.45797245312 }, { "content": "pub fn player_settings_change_signal(state: Rc<State>) -> impl Signal<Item = JigPlayerSettings> {\n\n let sig = map_ref! {\n\n let direction = state.settings.direction.signal_cloned(),\n\n let display_score = state.settings.display_score.signal(),\n\n let track_assessments = state.settings.track_assessments.signal(),\n\n let drag_assist = state.settings.drag_assist.signal()\n\n => ( direction.clone(), display_score.clone(), track_assessments.clone(), drag_assist.clone())\n\n };\n\n\n\n sig.map(\n\n |(direction, display_score, track_assessments, drag_assist)| JigPlayerSettings {\n\n direction: direction.clone(),\n\n display_score: display_score.clone(),\n\n track_assessments: track_assessments.clone(),\n\n drag_assist: drag_assist.clone(),\n\n },\n\n )\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/actions.rs", "rank": 86, "score": 193508.76901011646 }, { "content": "pub fn render(_state: Rc<State>) -> Vec<Dom> {\n\n vec![\n\n html!(\"jig-edit-publish-add-resource\", {\n\n .property(\"slot\", \"additional-resources\")\n\n .property(\"label\", \"Add Lesson Plan\")\n\n .children(&mut [\n\n html!(\"jig-edit-publish-add-resource-method\", {\n\n .property(\"slot\", \"add-method\")\n\n .property(\"kind\", \"upload\")\n\n }),\n\n html!(\"jig-edit-publish-add-resource-method\", {\n\n .property(\"slot\", \"add-method\")\n\n .property(\"kind\", \"link\")\n\n }),\n\n ])\n\n }),\n\n html!(\"jig-edit-publish-add-resource\", {\n\n .property(\"slot\", \"additional-resources\")\n\n .property(\"label\", \"Add Curriculum\")\n\n }),\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/publish/components/additional_resources.rs", "rank": 87, "score": 191158.16700594337 }, { "content": "pub fn update_display_name(state: Rc<State>, value: String) {\n\n state.loader.load(clone!(state => async move {\n\n state.name.set(value.clone());\n\n\n\n let req = JigUpdateDraftDataRequest {\n\n display_name: Some(value),\n\n ..Default::default()\n\n };\n\n\n\n match update_jig(&state.jig.id, req).await {\n\n Ok(_) => {},\n\n Err(_) => {},\n\n }\n\n }));\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/actions.rs", "rank": 88, "score": 191158.16700594337 }, { "content": "pub fn assign_kind(state: Rc<State>, kind: ModuleKind) {\n\n state.sidebar.loader.load(clone!(state => async move {\n\n let req = Some(ModuleCreateRequest {\n\n body: ModuleBody::new(kind),\n\n });\n\n let path = endpoints::jig::module::Create::PATH.replace(\"{id}\",&state.sidebar.jig.id.0.to_string());\n\n\n\n match api_with_auth::<CreateResponse<ModuleId>, EmptyError, _>(&path, endpoints::jig::module::Create::METHOD, req).await {\n\n Ok(resp) => {\n\n let id = resp.id;\n\n let index = state.index;\n\n let module = Rc::new(Some(LiteModule {\n\n id,\n\n kind,\n\n }));\n\n state.sidebar.modules.lock_mut().set_cloned(index, module);\n\n let req = ModuleUpdateRequest {\n\n id: StableOrUniqueId::Unique(id.clone()),\n\n index: Some(index.try_into().unwrap_ji()),\n\n body: None,\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/module/actions.rs", "rank": 89, "score": 189469.82433102033 }, { "content": "pub fn duplicate_module(state: Rc<State>, module_id: &ModuleId) {\n\n state.loader.load(clone!(state, module_id => async move {\n\n let module = super::module_cloner::clone_module(&state.jig.id, &module_id, &state.jig.id).await.unwrap_ji();\n\n state.modules.lock_mut().push_cloned(Rc::new(Some(module)));\n\n }));\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/actions.rs", "rank": 90, "score": 189469.82433102033 }, { "content": "pub fn toggle_background_audio(state: Rc<State>, background_audio: AudioBackground) {\n\n let mut bg_audio_handle = state.bg_audio_handle.borrow_mut();\n\n\n\n match &*bg_audio_handle {\n\n Some(bg_audio_handle) => {\n\n if state.bg_audio_playing.get() {\n\n bg_audio_handle.pause();\n\n state.bg_audio_playing.set(false);\n\n } else {\n\n bg_audio_handle.play();\n\n state.bg_audio_playing.set(true);\n\n };\n\n },\n\n None => {\n\n let handle = AUDIO_MIXER.with(|mixer| mixer.add_source(background_audio.as_source(), AudioClipOptions {\n\n auto_play: true,\n\n is_loop: true,\n\n on_ended: None::<fn()>,\n\n }));\n\n\n\n *bg_audio_handle = Some(handle);\n\n state.bg_audio_playing.set(true);\n\n },\n\n };\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/play/src/player/actions.rs", "rank": 91, "score": 189469.82433102033 }, { "content": "pub fn render(state: Rc<State>, query: String, jigs: Rc<MutableVec<JigResponse>>) -> Dom {\n\n html!(\"home-search-results\", {\n\n .property_signal(\"resultsCount\", jigs.signal_vec_cloned().len().map(|len| len as u32))\n\n .property(\"query\", &query)\n\n .child(\n\n html!(\"home-search-results-section\", {\n\n .property(\"slot\", \"sections\")\n\n .property_signal(\"resultsCount\", jigs.signal_vec_cloned().len().map(|len| len as u32))\n\n .children_signal_vec(jigs.signal_vec_cloned().map(clone!(state => move |jig| {\n\n render_result(state.clone(), &jig)\n\n })))\n\n })\n\n )\n\n })\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/home/src/home/dom/search_results.rs", "rank": 92, "score": 189388.43379093357 }, { "content": "pub fn mouse_down(state: Rc<State>, x: i32, y: i32) {\n\n state\n\n .sidebar\n\n .drag\n\n .set(Some(Rc::new(DragState::new(state.clone(), x, y))));\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/module/actions.rs", "rank": 93, "score": 189327.73865524252 }, { "content": "pub fn move_index(state: Rc<State>, move_target: MoveTarget) {\n\n state.sidebar.loader.load(clone!(state => async move {\n\n if let Some(module) = &*state.module {\n\n if let Some(target) = {\n\n match move_target {\n\n MoveTarget::Up if state.index > 1 => {\n\n Some(state.index-1)\n\n },\n\n MoveTarget::Down if state.index < state.total_len-1 => {\n\n Some(state.index+1)\n\n },\n\n MoveTarget::Any(target) => Some(target),\n\n _ => None\n\n }\n\n } {\n\n state.sidebar.modules.lock_mut().move_from_to(state.index, target);\n\n let req = ModuleUpdateRequest {\n\n id: StableOrUniqueId::Unique(module.id.clone()),\n\n index: Some(target.try_into().unwrap_ji()),\n\n body: None,\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/module/actions.rs", "rank": 94, "score": 187817.3137368678 }, { "content": "pub fn mouse_up(sidebar: Rc<SidebarState>, _x: i32, _y: i32) {\n\n if let Some(_drag) = sidebar.drag.replace(None) {\n\n sidebar.drag_target_index.set_neq(None);\n\n }\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/dragging/actions.rs", "rank": 95, "score": 187602.38568259357 }, { "content": "pub fn copy_module(state: Rc<State>, module_id: &ModuleId) {\n\n let value = format!(\"{},{}\", &state.jig.id.0, &module_id.0);\n\n\n\n let local_storage = get_local_storage().unwrap_ji();\n\n\n\n local_storage.set(COPY_MODULE_KEY, &value).unwrap_ji();\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/copy_paste_module.rs", "rank": 96, "score": 186199.50649253704 }, { "content": "pub fn render(state: Rc<State>, tab: FeedbackTab) -> Dom {\n\n html!(\"jig-audio-body\", {\n\n .property(\"slot\", \"overlay\")\n\n .property(\"kind\", \"feedback\")\n\n .children(&mut [\n\n html!(\"label\", {\n\n .property(\"slot\", \"correct-mistake\")\n\n .child(html!(\"input\", {\n\n .property(\"name\", \"correct-mistake\")\n\n .property(\"type\", \"radio\")\n\n .property(\"checked\", tab == FeedbackTab::Positive)\n\n .event(clone!(state => move |_:events::Input| {\n\n state.active_popup.set(Some(ActiveSettingsPopup::Feedback(FeedbackTab::Positive)));\n\n }))\n\n }))\n\n .text(STR_CORRECT)\n\n }),\n\n html!(\"label\", {\n\n .property(\"slot\", \"correct-mistake\")\n\n .child(html!(\"input\", {\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/settings/dom/feedback.rs", "rank": 97, "score": 185940.53795790568 }, { "content": "//Mouse movements are triggered from sidebar regardless of\n\n//whether drag state exists yet or not\n\npub fn mouse_move(sidebar: Rc<SidebarState>, x: i32, y: i32) {\n\n //update via ref not lock_mut\n\n //otherwise it will replace the drag and cause a re-render\n\n //with every update\n\n //internally, drag uses Mutable and Atomic so this works in Rc\n\n if let Some(drag) = &*sidebar.drag.lock_ref() {\n\n drag.inner.update(x, y);\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/dragging/actions.rs", "rank": 98, "score": 185914.04300767052 }, { "content": "pub fn render(state: Rc<State>, items: Vec<Dom>) -> Dom {\n\n html!(\"menu-kebab\", {\n\n .property(\"slot\", \"menu\")\n\n .child(html!(\"jig-edit-sidebar-module-menu\", {\n\n .children(items)\n\n }))\n\n .after_inserted(clone!(state => move |elem| {\n\n *state.menu_ref.borrow_mut() = Some(elem);\n\n }))\n\n })\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/menu/dom.rs", "rank": 99, "score": 185914.04300767052 } ]
Rust
src/columnar_transposition.rs
sula0/embedo
fa3ddcc54ea481ecffe5c765706aa6e1b42a6be1
use crate::util; const META_ROWS: usize = 2; const EMPTY_CHAR: u32 = 32; pub fn encrypt(text: &str, key: &str) -> String { let mut result = String::new(); let cols = key.len(); let required_rows = text.len() as f32 / cols as f32; let rows = META_ROWS + (required_rows as f32).ceil() as usize; let mut matrix = vec![vec![32; cols]; rows]; let key_chars: Vec<char> = key.to_uppercase().chars().collect(); let text_chars: Vec<char> = text.chars().collect(); /* Keys vec contains tuple consisted of the alphabetized value of the key char and its index. Later we sort by the alphabetized value to get the column reading order. */ let mut keys: Vec<(u32, u32)> = vec![]; /* Fill up the matrix. First row contains the key. Second row contains the order of alphabets in the key. Rest of the rows are filled with the text which will be encrypted. */ for i in 0..rows { for j in 0..cols { if i == 0 { matrix[i][j] = key_chars[j] as u32; } else if i == 1 { let alphabetized_char = util::alphabetize_char(key_chars[j] as u32, true); matrix[i][j] = alphabetized_char; keys.push((j as u32, alphabetized_char)); } else { let char_index = (i - META_ROWS) + j + ((i - META_ROWS) * (cols - 1)); if char_index < text_chars.len() { matrix[i][j] = text_chars[char_index] as u32; } else { matrix[i][j] = EMPTY_CHAR; } } } } keys.sort_by(|a, b| a.1.cmp(&b.1)); /* Read the columns of the text. Order of reading is determined but the key alphabets. */ for j in keys.iter() { for i in META_ROWS..rows { let c = matrix[i as usize][j.0 as usize]; result += std::str::from_utf8(&[c as u8]).unwrap(); } } result } pub fn decrypt(text: &str, key: &str) -> String { let mut result = String::new(); let cols = key.len(); let required_rows = text.len() as f32 / cols as f32; let rows = META_ROWS + (required_rows as f32).ceil() as usize; let mut matrix = vec![vec![32; cols]; rows]; let key_chars: Vec<char> = key.to_uppercase().chars().collect(); let text_chars: Vec<char> = text.chars().collect(); /* Keys vec contains tuple consisted of the alphabetized value of the key char and its index. Later we sort by the alphabetized value to get the column reading order. */ let mut keys: Vec<(u32, u32)> = vec![]; /* Fill up the matrix. First row contains the key. Second row contains the order of alphabets in the key. Rest of the rows are filled with the text which will be decrypted. */ for i in 0..rows { for j in 0..cols { if i == 0 { matrix[i][j] = key_chars[j] as u32; } else if i == 1 { let alphabetized_char = util::alphabetize_char(key_chars[j] as u32, true); matrix[i][j] = alphabetized_char; keys.push((j as u32, alphabetized_char)); } } } keys.sort_by(|a, b| a.1.cmp(&b.1)); let mut char_index: usize = 0; /* Write the columns of the text. Order of writing is determined but the key alphabets. */ for j in keys.iter() { for i in META_ROWS..rows { if char_index < text_chars.len() { matrix[i as usize][j.0 as usize] = text_chars[char_index] as u32; } else { matrix[i as usize][j.0 as usize] = EMPTY_CHAR; } char_index += 1; } } for i in META_ROWS..rows { for j in 0..cols { let c = matrix[i as usize][j as usize]; result += std::str::from_utf8(&[c as u8]).unwrap(); } } result }
use crate::util; const META_ROWS: usize = 2; const EMPTY_CHAR: u32 = 32; pub fn encrypt(text: &str, key: &str) -> String { let mut result = String::new(); let cols = key.len(); let required_rows = text.len() as f32 / cols as f32; let rows = META_ROWS + (required_rows as f32).ceil() as usize; let mut matrix = vec![vec![32; cols]; rows]; let key_chars: Vec<char> = key.to_uppercase().chars().collect(); let text_chars: Vec<char> = text.chars().collect(); /* Keys vec contains tuple consisted of the alphabetized value of the key char and its index. Later we sort by the alphabetized value to get the column reading order. */ let mut keys: Vec<(u32, u32)> = vec![]; /* Fill up the matrix. First row contains the key. Second row contains the order of alphabets in the key. Rest of the rows are filled with the text which will be encrypted. */ for i in 0..rows { for j in 0..cols { if i == 0 { matrix[i][j] = key_chars[j] as u32; } else if i == 1 { let alphabetized_char = util::alphabetize_char(key_chars[j] as u32, true); matrix[i][j] = alphabetized_char; keys.push((j as u32, alphabetized_char)); } else { let char_index = (i - META_ROWS) + j + ((i - META_ROWS) * (cols - 1)); if char_index < text_chars.len() { matrix[i][j] = text_chars[char_index] as u32; } else { matrix[i][j] = EMPTY_CHAR; } } } } keys.sort_by(|a, b| a.1.cmp(&b.1)); /* Read the columns of the text. Order of reading is determined but the key alphabets. */ for j in keys.iter() { for i in META_ROWS..rows { let c = matrix[i as usize][j.0 as usize]; result += std::str::from_utf8(&[c as u8]).unwrap(); } } result } pub fn decr
f i == 1 { let alphabetized_char = util::alphabetize_char(key_chars[j] as u32, true); matrix[i][j] = alphabetized_char; keys.push((j as u32, alphabetized_char)); } } } keys.sort_by(|a, b| a.1.cmp(&b.1)); let mut char_index: usize = 0; /* Write the columns of the text. Order of writing is determined but the key alphabets. */ for j in keys.iter() { for i in META_ROWS..rows { if char_index < text_chars.len() { matrix[i as usize][j.0 as usize] = text_chars[char_index] as u32; } else { matrix[i as usize][j.0 as usize] = EMPTY_CHAR; } char_index += 1; } } for i in META_ROWS..rows { for j in 0..cols { let c = matrix[i as usize][j as usize]; result += std::str::from_utf8(&[c as u8]).unwrap(); } } result }
ypt(text: &str, key: &str) -> String { let mut result = String::new(); let cols = key.len(); let required_rows = text.len() as f32 / cols as f32; let rows = META_ROWS + (required_rows as f32).ceil() as usize; let mut matrix = vec![vec![32; cols]; rows]; let key_chars: Vec<char> = key.to_uppercase().chars().collect(); let text_chars: Vec<char> = text.chars().collect(); /* Keys vec contains tuple consisted of the alphabetized value of the key char and its index. Later we sort by the alphabetized value to get the column reading order. */ let mut keys: Vec<(u32, u32)> = vec![]; /* Fill up the matrix. First row contains the key. Second row contains the order of alphabets in the key. Rest of the rows are filled with the text which will be decrypted. */ for i in 0..rows { for j in 0..cols { if i == 0 { matrix[i][j] = key_chars[j] as u32; } else i
function_block-random_span
[ { "content": "pub fn encrypt(text: &str, n: u32) -> String {\n\n let mut result = String::new();\n\n\n\n for c in text.chars() {\n\n let is_upper = c.is_uppercase();\n\n\n\n let encrypted = (util::alphabetize_char(c as u32, is_upper) + n) % 26;\n\n result += std::str::from_utf8(&[util::asciify_char(encrypted, is_upper) as u8]).unwrap();\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "src/ceasar_cipher.rs", "rank": 1, "score": 171432.56602014243 }, { "content": "#[wasm_bindgen]\n\npub fn columnar_transposition_encrypt(text: &str, key: &str) -> String {\n\n columnar_transposition::encrypt(text, key)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 2, "score": 165927.01425742017 }, { "content": "#[wasm_bindgen]\n\npub fn ceasar_cipher_encrypt(text: &str, n: u32) -> String {\n\n ceasar_cipher::encrypt(text, n)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 3, "score": 158262.5413867413 }, { "content": "#[wasm_bindgen]\n\npub fn columnar_transposition_decrypt(text: &str, key: &str) -> String {\n\n columnar_transposition::decrypt(text, key)\n\n}\n", "file_path": "src/lib.rs", "rank": 5, "score": 151138.4733967497 }, { "content": "pub fn decrypt(text: &str, n: u32) -> String {\n\n let mut result = String::new();\n\n\n\n for c in text.chars() {\n\n let is_upper = c.is_uppercase();\n\n \n\n let deciphered = (util::alphabetize_char(c as u32, is_upper) - n) % 26;\n\n result += std::str::from_utf8(&[util::asciify_char(deciphered, is_upper) as u8]).unwrap();\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "src/ceasar_cipher.rs", "rank": 6, "score": 146358.66234658778 }, { "content": "#[wasm_bindgen]\n\npub fn ceasar_cipher_decrypt(text: &str, n: u32) -> String {\n\n ceasar_cipher::decrypt(text, n)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 7, "score": 142937.48186136846 }, { "content": "pub fn alphabetize_char(c: u32, is_upper: bool) -> u32 {\n\n /*\n\n Since ASCII is being used, need to 'alphabetize'\n\n the char. In ASCII, lowercase characters are higher in\n\n number by 32 from their uppercase equivalents.\n\n */\n\n let diff = if is_upper { 0 } else { LOWER_DIFF };\n\n\n\n /*\n\n In ASCII, letters start at 65 so we subtract it\n\n (along with the diff above) to get to the 0-26 range.\n\n */\n\n c - diff - 65\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 8, "score": 126599.666341614 }, { "content": "pub fn asciify_char(c: u32, is_upper: bool) -> u32 {\n\n /*\n\n Since ASCII is being used, need to 'alphabetize'\n\n the char. In ASCII, lowercase characters are higher in\n\n number by 32 from their uppercase equivalents.\n\n */\n\n let diff = if is_upper { 0 } else { LOWER_DIFF };\n\n\n\n /*\n\n This time we simply reverse the 'normalization' operation.\n\n Add up 65 and the diff to the char to get back to the\n\n ASCII range with correct capitalization.\n\n */\n\n c + 65 + diff\n\n}\n", "file_path": "src/util.rs", "rank": 9, "score": 109936.6544338838 }, { "content": "export const encrypt = (encryptionInput) => {\n\n const encryptionKey = document.getElementById('columnarTranspositionKey').value;\n\n return columnar_transposition_encrypt(encryptionInput, encryptionKey);\n", "file_path": "scripts/columnarTransposition.js", "rank": 10, "score": 37987.24637332653 }, { "content": "export const encrypt = (encryptionInput) => {\n\n const shiftBy = document.getElementById('ceasarShift').value;\n\n return ceasar_cipher_encrypt(encryptionInput, shiftBy);\n", "file_path": "scripts/ceasarCipher.js", "rank": 11, "score": 37987.24637332653 }, { "content": "const onEncrypt = () => {\n\n const encryptionInput = state.toEncryptInput.value;\n\n\n\n let encryptedText = '';\n\n\n\n switch (state.selectedCipher) {\n\n case ceasarCipher.CipherName: {\n\n encryptedText = ceasarCipher.encrypt(encryptionInput);\n\n break;\n\n }\n\n case columnarTransposition.CipherName: {\n\n encryptedText = columnarTransposition.encrypt(encryptionInput);\n\n break;\n\n }\n\n }\n\n\n\n state.toDecryptInput.value = encryptedText;\n", "file_path": "scripts/index.js", "rank": 12, "score": 35543.39420093096 }, { "content": "let state = {\n\n 'selectedCipher': '',\n\n 'toEncryptInput': document.getElementById('toEncrypt'),\n\n 'toDecryptInput': document.getElementById('toDecrypt'),\n\n 'cipherOptionsDiv': document.getElementById('cipherOptions')\n", "file_path": "scripts/index.js", "rank": 13, "score": 17616.88640059212 }, { "content": "const onDecrypt = () => {\n\n const decryptionInput = state.toDecryptInput.value;\n\n\n\n let decryptedText = '';\n\n\n\n switch (state.selectedCipher) {\n\n case ceasarCipher.CipherName: {\n\n decryptedText = ceasarCipher.decrypt(decryptionInput);\n\n break;\n\n }\n\n case columnarTransposition.CipherName: {\n\n decryptedText = columnarTransposition.decrypt(decryptionInput);\n\n break;\n\n }\n\n }\n\n\n\n state.toEncryptInput.value = decryptedText;\n", "file_path": "scripts/index.js", "rank": 14, "score": 17616.88640059212 }, { "content": "const clearInputs = () => {\n\n state.toEncryptInput.value = '';\n\n state.toDecryptInput.value = '';\n", "file_path": "scripts/index.js", "rank": 15, "score": 16899.700026712988 }, { "content": "const onChangeCipher = (e) => {\n\n clearChildren(state.cipherOptionsDiv);\n\n clearInputs();\n\n\n\n onSelectCipher(e.target.value);\n", "file_path": "scripts/index.js", "rank": 16, "score": 16899.700026712988 }, { "content": "const clearChildren = (node) => {\n\n while (node.firstChild) {\n\n node.removeChild(node.lastChild);\n\n }\n", "file_path": "scripts/index.js", "rank": 17, "score": 16899.700026712988 }, { "content": "const onSelectCipher = (selectedCipher) => {\n\n state.selectedCipher = selectedCipher;\n\n\n\n switch (state.selectedCipher) {\n\n case ceasarCipher.CipherName: {\n\n ceasarCipher.renderOptions(state.cipherOptionsDiv);\n\n break;\n\n }\n\n case columnarTransposition.CipherName: {\n\n columnarTransposition.renderOptions(state.cipherOptionsDiv);\n\n break;\n\n }\n\n }\n", "file_path": "scripts/index.js", "rank": 18, "score": 16899.700026712988 }, { "content": "const onClickClear = () => {\n\n clearInputs();\n", "file_path": "scripts/index.js", "rank": 19, "score": 16899.700026712988 }, { "content": "import init from './embedo.js';\n\nimport * as ceasarCipher from './ceasarCipher.js';\n\nimport * as columnarTransposition from './columnarTransposition.js';\n\n\n\nlet state = {\n\n 'selectedCipher': '',\n\n 'toEncryptInput': document.getElementById('toEncrypt'),\n\n 'toDecryptInput': document.getElementById('toDecrypt'),\n\n 'cipherOptionsDiv': document.getElementById('cipherOptions')\n\n};\n\n\n\nconst clearChildren = (node) => {\n\n while (node.firstChild) {\n\n node.removeChild(node.lastChild);\n\n }\n\n};\n\n\n\nconst onClickClear = () => {\n\n clearInputs();\n\n};\n\n\n\nconst clearInputs = () => {\n\n state.toEncryptInput.value = '';\n\n state.toDecryptInput.value = '';\n\n};\n\n\n\nconst onChangeCipher = (e) => {\n\n clearChildren(state.cipherOptionsDiv);\n\n clearInputs();\n\n\n\n onSelectCipher(e.target.value);\n\n};\n\n\n\nconst onSelectCipher = (selectedCipher) => {\n\n state.selectedCipher = selectedCipher;\n\n\n\n switch (state.selectedCipher) {\n\n case ceasarCipher.CipherName: {\n\n ceasarCipher.renderOptions(state.cipherOptionsDiv);\n\n break;\n\n }\n\n case columnarTransposition.CipherName: {\n\n columnarTransposition.renderOptions(state.cipherOptionsDiv);\n\n break;\n\n }\n\n }\n\n};\n\n\n\nconst onEncrypt = () => {\n\n const encryptionInput = state.toEncryptInput.value;\n\n\n\n let encryptedText = '';\n\n\n\n switch (state.selectedCipher) {\n\n case ceasarCipher.CipherName: {\n\n encryptedText = ceasarCipher.encrypt(encryptionInput);\n\n break;\n\n }\n\n case columnarTransposition.CipherName: {\n\n encryptedText = columnarTransposition.encrypt(encryptionInput);\n\n break;\n\n }\n\n }\n\n\n\n state.toDecryptInput.value = encryptedText;\n\n};\n\n\n\nconst onDecrypt = () => {\n\n const decryptionInput = state.toDecryptInput.value;\n\n\n\n let decryptedText = '';\n\n\n\n switch (state.selectedCipher) {\n\n case ceasarCipher.CipherName: {\n\n decryptedText = ceasarCipher.decrypt(decryptionInput);\n\n break;\n\n }\n\n case columnarTransposition.CipherName: {\n\n decryptedText = columnarTransposition.decrypt(decryptionInput);\n\n break;\n\n }\n\n }\n\n\n\n state.toEncryptInput.value = decryptedText;\n\n};\n\n\n\ninit().then(() => {\n\n document.getElementById('encryptButton').onclick = onEncrypt;\n\n document.getElementById('decryptButton').onclick = onDecrypt;\n\n\n\n document.getElementById('clearButton').onclick = onClickClear;\n\n\n\n document.getElementById('cipherSelect').onchange = onChangeCipher;\n\n onSelectCipher(document.getElementById('cipherSelect')[0].value);\n\n});\n", "file_path": "scripts/index.js", "rank": 20, "score": 13582.11827103726 }, { "content": "pub mod util;\n\n\n\nmod ceasar_cipher;\n\nmod columnar_transposition;\n\n\n\nuse wasm_bindgen::prelude::*;\n\n\n\n#[wasm_bindgen]\n", "file_path": "src/lib.rs", "rank": 27, "score": 2.1535523938226264 }, { "content": "use crate::util;\n\n\n", "file_path": "src/ceasar_cipher.rs", "rank": 28, "score": 1.6805506054402268 }, { "content": "const LOWER_DIFF: u32 = 32;\n\n\n", "file_path": "src/util.rs", "rank": 29, "score": 1.5072483915961492 } ]
Rust
experiments/src/bin/graphs.rs
petrosagg/differential-dataflow
2e38abbb62aedf02cb9b6b5debb73c29b6e97dbb
extern crate rand; extern crate timely; extern crate differential_dataflow; use std::rc::Rc; use rand::{Rng, SeedableRng, StdRng}; use timely::dataflow::*; use differential_dataflow::input::Input; use differential_dataflow::Collection; use differential_dataflow::operators::*; use differential_dataflow::trace::Trace; use differential_dataflow::operators::arrange::ArrangeByKey; use differential_dataflow::operators::arrange::ArrangeBySelf; use differential_dataflow::trace::implementations::spine_fueled::Spine; type Node = usize; use differential_dataflow::trace::implementations::ord::OrdValBatch; type GraphTrace = Spine<Node, Node, (), isize, Rc<OrdValBatch<Node, Node, (), isize>>>; fn main() { let nodes: usize = std::env::args().nth(1).unwrap().parse().unwrap(); let edges: usize = std::env::args().nth(2).unwrap().parse().unwrap(); timely::execute_from_args(std::env::args().skip(3), move |worker| { let index = worker.index(); let peers = worker.peers(); let timer = ::std::time::Instant::now(); let (mut graph, mut trace) = worker.dataflow(|scope| { let (graph_input, graph) = scope.new_collection(); let graph_indexed = graph.arrange_by_key(); (graph_input, graph_indexed.trace) }); let seed: &[_] = &[1, 2, 3, index]; let mut rng1: StdRng = SeedableRng::from_seed(seed); if index == 0 { println!("performing workload on random graph with {} nodes, {} edges:", nodes, edges); } let worker_edges = edges/peers + if index < (edges % peers) { 1 } else { 0 }; for _ in 0 .. worker_edges { graph.insert((rng1.gen_range(0, nodes) as Node, rng1.gen_range(0, nodes) as Node)); } graph.close(); while worker.step() { } if index == 0 { println!("{:?}\tgraph loaded", timer.elapsed()); } let mut roots = worker.dataflow(|scope| { let (roots_input, roots) = scope.new_collection(); reach(&mut trace, roots); roots_input }); if index == 0 { roots.insert(0); } roots.close(); while worker.step() { } if index == 0 { println!("{:?}\treach complete", timer.elapsed()); } let mut roots = worker.dataflow(|scope| { let (roots_input, roots) = scope.new_collection(); bfs(&mut trace, roots); roots_input }); if index == 0 { roots.insert(0); } roots.close(); while worker.step() { } if index == 0 { println!("{:?}\tbfs complete", timer.elapsed()); } }).unwrap(); } use differential_dataflow::operators::arrange::TraceAgent; type TraceHandle = TraceAgent<GraphTrace>; fn reach<G: Scope<Timestamp = ()>> ( graph: &mut TraceHandle, roots: Collection<G, Node> ) -> Collection<G, Node> { let graph = graph.import(&roots.scope()); roots.iterate(|inner| { let graph = graph.enter(&inner.scope()); let roots = roots.enter(&inner.scope()); graph.join_core(&inner.arrange_by_self(), |_src,&dst,&()| Some(dst)) .concat(&roots) .distinct_total() }) } fn bfs<G: Scope<Timestamp = ()>> ( graph: &mut TraceHandle, roots: Collection<G, Node> ) -> Collection<G, (Node, u32)> { let graph = graph.import(&roots.scope()); let roots = roots.map(|r| (r,0)); roots.iterate(|inner| { let graph = graph.enter(&inner.scope()); let roots = roots.enter(&inner.scope()); graph.join_map(&inner, |_src,&dest,&dist| (dest, dist+1)) .concat(&roots) .reduce(|_key, input, output| output.push((*input[0].0,1))) }) }
extern crate rand; extern crate timely; extern crate differential_dataflow; use std::rc::Rc; use rand::{Rng, SeedableRng, StdRng}; use timely::dataflow::*; use differential_dataflow::input::Input; use differential_dataflow::Collection; use differential_dataflow::operators::*; use differential_dataflow::trace::Trace; use differential_dataflow::operators::arrange::ArrangeByKey; use differential_dataflow::operators::arrange::ArrangeBySelf; use differential_dataflow::trace::implementations::spine_fueled::Spine; type Node = usize; use differential_dataflow::trace::implementations::ord::OrdValBatch; type GraphTrace = Spine<Node, Node, (), isize, Rc<OrdValBatch<Node, Node, (), isize>>>; fn main() { let nodes: usize = std::env::args().nth(1).unwrap().parse().unwrap(); let edges: usize = std::env::args().nth(2).unwrap().parse().unwrap(); timely::execute_from_args(std::env::args().skip(3), move |worker| { let index = worker.index(); let peers = worker.peers(); let timer = ::std::time::Instant::now(); let (mut graph, mut trace) = worker.dataflow(|scope| { let (graph_input, graph) = scope.new_collection(); let graph_indexed = graph.arrange_by_key(); (graph_input, graph_indexed.trace) }); let seed: &[_] = &[1, 2, 3, index]; let mut rng1: StdRng = SeedableRng::from_seed(seed); if index == 0 { println!("performing workload on random graph with {} nodes, {} edges:", nodes, edges); } let worker_edges = edges/peers + if index < (edges % peers) { 1 } else { 0 }; for _ in 0 .. worker_edges { graph.insert((rng1.gen_range(0, nodes) as Node, rng1.gen_range(0, nodes) as Node)); } graph.close(); while worker.step() { } if index == 0 { println!("{:?}\tgraph loaded", timer.elapsed()); } let mut roots = worker.dataflow(|scope| { let (roots_input, roots) = scope.new_collection(); reach(&mut trace, roots); roots_input }); if index == 0 { roots.insert(0); } roots.close(); while worker.step() { } if index == 0 { println!("{:?}\treach complete", timer.elapsed()); } let mut roots = worker.dataflow(|scope| { let (roots_input, roots) = scope.new_collection(); bfs(&mut trace, roots); roots_input }); if index == 0 { roots.insert(0); } roots.close(); while worker.step() { } if index == 0 { println!("{:?}\tbfs complete", timer.elapsed()); } }).unwrap(); } use differential_dataflow::operators::arrange::TraceAgent; type TraceHandle = TraceAgent<GraphTrace>; fn reach<G: Scope<Timestamp = ()>> ( graph: &mut TraceHandle, roots: Collection<G, Node> ) -> Collection<G, Node> { let graph = graph.import(&roots.scope()); roots.iterate(|inner| { let graph = graph.enter(&inne
fn bfs<G: Scope<Timestamp = ()>> ( graph: &mut TraceHandle, roots: Collection<G, Node> ) -> Collection<G, (Node, u32)> { let graph = graph.import(&roots.scope()); let roots = roots.map(|r| (r,0)); roots.iterate(|inner| { let graph = graph.enter(&inner.scope()); let roots = roots.enter(&inner.scope()); graph.join_map(&inner, |_src,&dest,&dist| (dest, dist+1)) .concat(&roots) .reduce(|_key, input, output| output.push((*input[0].0,1))) }) }
r.scope()); let roots = roots.enter(&inner.scope()); graph.join_core(&inner.arrange_by_self(), |_src,&dst,&()| Some(dst)) .concat(&roots) .distinct_total() }) }
function_block-function_prefixed
[ { "content": "fn dump_cursor<Tr>(round: u32, index: usize, trace: &mut Tr)\n\nwhere\n\n Tr: TraceReader,\n\n Tr::Key: Debug + Clone,\n\n Tr::Val: Debug + Clone,\n\n Tr::Time: Debug + Clone,\n\n Tr::R: Debug + Clone,\n\n{\n\n let (mut cursor, storage) = trace.cursor();\n\n for ((k, v), diffs) in cursor.to_vec(&storage).iter() {\n\n println!(\"round {}, w{} {:?}:{:?}: {:?}\", round, index, *k, *v, diffs);\n\n }\n\n}\n", "file_path": "examples/cursors.rs", "rank": 0, "score": 301682.7572724212 }, { "content": "fn assign(node: usize, root: usize, reverse: &HashMap<usize, Vec<usize>>, component: &mut HashMap<usize, usize>) {\n\n if !component.contains_key(&node) {\n\n component.insert(node, root);\n\n if let Some(edges) = reverse.get(&node) {\n\n for &edge in edges.iter() {\n\n assign(edge, root, reverse, component);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/scc.rs", "rank": 1, "score": 285284.3660070641 }, { "content": "type TraceSpine = Spine<usize, usize, RootTime, isize, Rc<TraceBatch>>;\n\npub type TraceHandle = TraceAgent<usize, usize, RootTime, isize, TraceSpine>;\n\n\n\n/// Arguments provided to each shared library to help build their dataflows and register their results.\n\npub type Environment<'a, 'b> = (\n\n &'a mut Child<'b, Worker<Allocator>,usize>,\n\n &'a mut TraceHandler,\n\n &'a mut ProbeHandle<RootTime>,\n\n &'a Instant,\n\n &'a [String]\n\n);\n\n\n\n/// A wrapper around types that keep their source libraries alive.\n\n///\n\n/// This type is meant to be a smart pointer for a type `T` that needs to keep\n\n/// a `Library` alive, perhaps because its methods would call in to the library.\n\n/// The type should have a specified drop order (viz RFC 1857) which guarentees\n\n/// that the shared library reference drops only after the element itself is\n\n/// dropped. It also implements `Deref` and `DerefMut` to provide the experience\n\n/// of a `T` itself.\n", "file_path": "server/src/lib.rs", "rank": 2, "score": 268018.24871702574 }, { "content": "type TraceBatch = OrdValBatch<usize, usize, RootTime, isize>;\n", "file_path": "server/src/lib.rs", "rank": 3, "score": 263082.020148068 }, { "content": "type Node = u32;\n\n\n", "file_path": "src/trace/implementations/graph.rs", "rank": 4, "score": 249343.75661425097 }, { "content": "// returns pairs (n, s) indicating node n can be reached from a root in s steps.\n\nfn bfs<G: Scope>(edges: &Collection<G, Edge>, roots: &Collection<G, Node>) -> Collection<G, (Node, usize)>\n\nwhere G::Timestamp: Lattice+Ord {\n\n\n\n // initialize roots as reaching themselves at distance 0\n\n let nodes = roots.map(|x| (x, 0));\n\n\n\n // repeatedly update minimal distances each node can be reached from each root\n\n nodes.iterate(|inner| {\n\n\n\n let edges = edges.enter(&inner.scope());\n\n let nodes = nodes.enter(&inner.scope());\n\n\n\n inner.join_map(&edges, |_k,l,d| (*d, l+1))\n\n .concat(&nodes)\n\n .reduce(|_, s, t| t.push((*s[0].0, 1)))\n\n })\n\n}\n", "file_path": "tests/bfs.rs", "rank": 5, "score": 248389.92295202703 }, { "content": "fn visit(node: usize, forward: &HashMap<usize, Vec<usize>>, visited: &mut HashSet<usize>, list: &mut Vec<usize>) {\n\n if !visited.contains(&node) {\n\n visited.insert(node);\n\n if let Some(edges) = forward.get(&node) {\n\n for &edge in edges.iter() {\n\n visit(edge, forward, visited, list)\n\n }\n\n }\n\n list.push(node);\n\n }\n\n}\n\n\n", "file_path": "tests/scc.rs", "rank": 6, "score": 243662.79936214344 }, { "content": "/// Enables logging of differential dataflow events.\n\npub fn enable<A, W>(worker: &mut timely::worker::Worker<A>, writer: W) -> Option<Box<dyn std::any::Any+'static>>\n\nwhere\n\n A: timely::communication::Allocate,\n\n W: std::io::Write+'static,\n\n{\n\n let writer = ::timely::dataflow::operators::capture::EventWriter::new(writer);\n\n let mut logger = ::timely::logging::BatchLogger::new(writer);\n\n worker\n\n .log_register()\n\n .insert::<DifferentialEvent,_>(\"differential/arrange\", move |time, data| logger.publish_batch(time, data))\n\n}\n\n\n\n/// Possible different differential events.\n\n#[derive(Debug, Clone, Abomonation, Ord, PartialOrd, Eq, PartialEq)]\n\npub enum DifferentialEvent {\n\n /// Batch creation.\n\n Batch(BatchEvent),\n\n /// Merge start and stop events.\n\n Merge(MergeEvent),\n\n /// Batch dropped when trace dropped.\n", "file_path": "src/logging.rs", "rank": 7, "score": 233140.2729517994 }, { "content": "fn read_u10(string: &str) -> [u8;10] { let mut buff = [0;10]; copy_from_to(string.as_bytes(), &mut buff); buff }\n", "file_path": "tpchlike/src/types.rs", "rank": 8, "score": 231454.57601472197 }, { "content": "fn read_u25(string: &str) -> [u8;25] { let mut buff = [0;25]; copy_from_to(string.as_bytes(), &mut buff); buff }\n\n\n\nunsafe_abomonate!(AbomonationWrapper<ArrayString<[u8; 25]>>);\n\nunsafe_abomonate!(AbomonationWrapper<ArrayString<[u8; 40]>>);\n\nunsafe_abomonate!(AbomonationWrapper<ArrayString<[u8; 128]>>);\n\n\n\n#[derive(Ord,PartialOrd,Eq,PartialEq,Clone,Copy,Debug,Hash,Default)]\n\npub struct AbomonationWrapper<T> {\n\n pub element: T,\n\n}\n\n\n\nuse ::std::ops::Deref;\n\nimpl<T> Deref for AbomonationWrapper<T> {\n\n type Target = T;\n\n fn deref(&self) -> &Self::Target {\n\n &self.element\n\n }\n\n}\n\n\n\nunsafe_abomonate!(Part);\n", "file_path": "tpchlike/src/types.rs", "rank": 9, "score": 231454.57601472197 }, { "content": "fn read_u01(string: &str) -> [u8;1] { let mut buff = [0;1]; copy_from_to(string.as_bytes(), &mut buff); buff }\n", "file_path": "tpchlike/src/types.rs", "rank": 10, "score": 231454.57601472197 }, { "content": "fn read_u15(string: &str) -> [u8;15] { let mut buff = [0;15]; copy_from_to(string.as_bytes(), &mut buff); buff }\n", "file_path": "tpchlike/src/types.rs", "rank": 11, "score": 231454.57601472197 }, { "content": "type Edge = (Node, Node);\n", "file_path": "examples/cursors.rs", "rank": 12, "score": 226348.37484132207 }, { "content": "type Edge = (Node, Node);\n\n\n", "file_path": "examples/bfs.rs", "rank": 13, "score": 226348.37484132207 }, { "content": "type Edge = (Node, Node);\n", "file_path": "examples/pagerank.rs", "rank": 14, "score": 226348.37484132207 }, { "content": "type Edge = (Node, Node);\n\n\n", "file_path": "examples/stackoverflow.rs", "rank": 15, "score": 226348.37484132207 }, { "content": "type Edge = (Node, Node);\n\n\n", "file_path": "examples/interpreted.rs", "rank": 16, "score": 226348.37484132207 }, { "content": "type Edge = (Node, Node);\n", "file_path": "examples/graspan.rs", "rank": 17, "score": 226348.37484132207 }, { "content": "type Edge = (Node, Node);\n\n\n\n#[test] fn bfs_10_20_1000() { test_sizes(10, 20, 1000, Config::process(3)); }\n\n#[test] fn bfs_100_200_10() { test_sizes(100, 200, 10, Config::process(3)); }\n\n#[test] fn bfs_100_2000_1() { test_sizes(100, 2000, 1, Config::process(3)); }\n\n\n", "file_path": "tests/bfs.rs", "rank": 18, "score": 226348.37484132207 }, { "content": "type Edge = (Node, Node);\n\n\n\n#[test] fn scc_10_20_1000() { test_sizes(10, 20, 1000, Config::process(3)); }\n\n#[test] fn scc_100_200_10() { test_sizes(100, 200, 10, Config::process(3)); }\n\n#[test] fn scc_100_2000_1() { test_sizes(100, 2000, 1, Config::process(3)); }\n\n\n", "file_path": "tests/scc.rs", "rank": 19, "score": 226348.37484132207 }, { "content": "type Edge = (Node, Node);\n\n\n", "file_path": "examples/capture-test.rs", "rank": 20, "score": 222835.71298371206 }, { "content": "type Edge = (Node, Node);\n\n\n\n#[derive(Abomonation, Copy, Ord, PartialOrd, Eq, PartialEq, Debug, Clone, Serialize, Deserialize, Hash)]\n\npub struct MinSum {\n\n value: u32,\n\n}\n\n\n\nuse std::ops::{AddAssign, Mul};\n\nuse differential_dataflow::difference::Semigroup;\n\n\n\nimpl<'a> AddAssign<&'a Self> for MinSum {\n\n fn add_assign(&mut self, rhs: &'a Self) {\n\n self.value = std::cmp::min(self.value, rhs.value);\n\n }\n\n}\n\n\n\nimpl Mul<Self> for MinSum {\n\n type Output = Self;\n\n fn mul(self, rhs: Self) -> Self {\n\n MinSum { value: self.value + rhs.value }\n\n }\n\n}\n\n\n\nimpl Semigroup for MinSum {\n\n fn is_zero(&self) -> bool { false }\n\n}\n\n\n", "file_path": "examples/monoid-bfs.rs", "rank": 21, "score": 222835.71298371206 }, { "content": "type Edge = (Node, Node);\n\n\n", "file_path": "dogsdogsdogs/examples/ngo.rs", "rank": 22, "score": 222835.71298371206 }, { "content": "fn load1<'a>(index: usize, prefix: &str, filename: &str, interner: Rc<RefCell<StringInterner>>) -> impl Iterator<Item=((Symbol), Time, Diff)>+'a {\n\n read_file(&format!(\"{}{}\", prefix, filename))\n\n .filter(move |_| index == 0)\n\n .map(move |line| {\n\n let mut interner = interner.borrow_mut();\n\n let mut elts = line.split('\\t');\n\n ((\n\n interner.intern(elts.next().unwrap())\n\n ), 0, 1)\n\n })\n\n}\n\n\n", "file_path": "doop/src/main.rs", "rank": 23, "score": 215977.72634790995 }, { "content": "type Time = usize;\n", "file_path": "examples/stackoverflow.rs", "rank": 24, "score": 215198.04432104863 }, { "content": "// returns pairs (n, s) indicating node n can be reached from a root in s steps.\n\nfn bfs<G: Scope>(edges: &Collection<G, Edge>, roots: &Collection<G, Node>) -> Collection<G, (Node, u32)>\n\nwhere G::Timestamp: Lattice+Ord {\n\n\n\n // initialize roots as reaching themselves at distance 0\n\n let nodes = roots.map(|x| (x, 0));\n\n\n\n // repeatedly update minimal distances each node can be reached from each root\n\n nodes.iterate(|inner| {\n\n\n\n let edges = edges.enter(&inner.scope());\n\n let nodes = nodes.enter(&inner.scope());\n\n\n\n inner.join_map(&edges, |_k,l,d| (*d, l+1))\n\n .concat(&nodes)\n\n .reduce(|_, s, t| t.push((*s[0].0, 1)))\n\n })\n\n}", "file_path": "examples/stackoverflow.rs", "rank": 25, "score": 214965.4824300761 }, { "content": "// returns pairs (n, s) indicating node n can be reached from a root in s steps.\n\nfn bfs<G: Scope>(edges: &Collection<G, Edge>, roots: &Collection<G, Node>) -> Collection<G, (Node, u32)>\n\nwhere G::Timestamp: Lattice+Ord {\n\n\n\n // initialize roots as reaching themselves at distance 0\n\n let nodes = roots.map(|x| (x, 0));\n\n\n\n // repeatedly update minimal distances each node can be reached from each root\n\n nodes.iterate(|inner| {\n\n\n\n let edges = edges.enter(&inner.scope());\n\n let nodes = nodes.enter(&inner.scope());\n\n\n\n inner.join_map(&edges, |_k,l,d| (*d, l+1))\n\n .concat(&nodes)\n\n .reduce(|_, s, t| t.push((*s[0].0, 1)))\n\n })\n\n}", "file_path": "examples/bfs.rs", "rank": 26, "score": 214965.4824300761 }, { "content": "type Node = usize;\n", "file_path": "tests/scc.rs", "rank": 27, "score": 214602.78449930373 }, { "content": "type Node = usize;\n", "file_path": "tests/bfs.rs", "rank": 28, "score": 214602.7844993037 }, { "content": "type Node = usize;\n", "file_path": "examples/graspan.rs", "rank": 29, "score": 214602.78449930373 }, { "content": "// returns pairs (n, s) indicating node n can be reached from a root in s steps.\n\nfn bfs<G: Scope>(edges: &Collection<G, Edge>, roots: &Collection<G, Node>) -> Collection<G, (Node, u32)>\n\nwhere G::Timestamp: Lattice+Ord {\n\n\n\n // initialize roots as reaching themselves at distance 0\n\n let nodes = roots.map(|x| (x, 0));\n\n\n\n // repeatedly update minimal distances each node can be reached from each root\n\n nodes.iterate(|inner| {\n\n\n\n let edges = edges.enter(&inner.scope());\n\n let nodes = nodes.enter(&inner.scope());\n\n\n\n inner.join_map(&edges, |_k,l,d| (*d, l+1))\n\n .concat(&nodes)\n\n .reduce(|_, s, t| t.push((*s[0].0, 1)))\n\n })\n\n}\n\n\n\n\n\npub mod kafka {\n", "file_path": "examples/capture-test.rs", "rank": 30, "score": 212228.71007635424 }, { "content": "fn load2<'a>(index: usize, prefix: &str, filename: &str, interner: Rc<RefCell<StringInterner>>) -> impl Iterator<Item=((Symbol, Symbol), Time, Diff)>+'a {\n\n read_file(&format!(\"{}{}\", prefix, filename))\n\n .filter(move |_| index == 0)\n\n .map(move |line| {\n\n let mut interner = interner.borrow_mut();\n\n let mut elts = line.split('\\t');\n\n ((\n\n interner.intern(elts.next().unwrap()),\n\n interner.intern(elts.next().unwrap()),\n\n ), 0, 1)\n\n })\n\n}\n\n\n", "file_path": "doop/src/main.rs", "rank": 31, "score": 211574.36479402508 }, { "content": "type ArrangedIndex<K, T> = TraceAgent<DefaultValTrace<K, T, usize, isize>>;\n\n\n\npub struct ArrangementsInScope<G: Scope<Timestamp=usize>> {\n\n customer: ArrangedScope<G, usize, Customer>,\n\n nation: ArrangedScope<G, usize, Nation>,\n\n order: ArrangedScope<G, usize, Order>,\n\n part: ArrangedScope<G, usize, Part>,\n\n partsupp: ArrangedScope<G, (usize, usize), PartSupp>,\n\n region: ArrangedScope<G, usize, Region>,\n\n supplier: ArrangedScope<G, usize, Supplier>,\n\n}\n\n\n\npub struct Arrangements {\n\n arrange: bool,\n\n customer: ArrangedIndex<usize, Customer>,\n\n nation: ArrangedIndex<usize, Nation>,\n\n order: ArrangedIndex<usize, Order>,\n\n part: ArrangedIndex<usize, Part>,\n\n partsupp: ArrangedIndex<(usize, usize), PartSupp>,\n\n region: ArrangedIndex<usize, Region>,\n", "file_path": "tpchlike/src/lib.rs", "rank": 32, "score": 211215.77571160402 }, { "content": "// Type aliases for differential execution.\n\ntype Time = u32;\n", "file_path": "doop/src/main.rs", "rank": 33, "score": 211008.44828328883 }, { "content": "/// Introduces differential options to a timely configuration.\n\npub fn configure(config: &mut timely::WorkerConfig, options: &Config) {\n\n if let Some(effort) = options.idle_merge_effort {\n\n config.set(\"differential/idle_merge_effort\".to_string(), effort);\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 34, "score": 210616.34569801207 }, { "content": "fn interpret<G: Scope>(edges: &Collection<G, Edge>, relations: &[(usize, usize)]) -> Collection<G, Vec<Node>>\n\nwhere G::Timestamp: Lattice+Hash+Ord {\n\n\n\n // arrange the edge relation three ways.\n\n let as_self = edges.arrange_by_self();\n\n let forward = edges.arrange_by_key();\n\n let reverse = edges.map_in_place(|x| ::std::mem::swap(&mut x.0, &mut x.1))\n\n .arrange_by_key();\n\n\n\n let mut field_present = ::std::collections::HashSet::new();\n\n\n\n let mut results = edges.map(|(x,y)| vec![x, y]);\n\n\n\n field_present.insert(0);\n\n field_present.insert(1);\n\n\n\n for &(src, dst) in relations.iter() {\n\n\n\n let src_present = field_present.contains(&src);\n\n let dst_present = field_present.contains(&dst);\n", "file_path": "examples/interpreted.rs", "rank": 35, "score": 209222.81150483442 }, { "content": "fn load3<'a>(index: usize, prefix: &str, filename: &str, interner: Rc<RefCell<StringInterner>>) -> impl Iterator<Item=((Symbol, Symbol, Symbol), Time, Diff)>+'a {\n\n read_file(&format!(\"{}{}\", prefix, filename))\n\n .filter(move |_| index == 0)\n\n .map(move |line| {\n\n let mut interner = interner.borrow_mut();\n\n let mut elts = line.split('\\t');\n\n ((\n\n interner.intern(elts.next().unwrap()),\n\n interner.intern(elts.next().unwrap()),\n\n interner.intern(elts.next().unwrap()),\n\n ), 0, 1)\n\n })\n\n}\n\n\n", "file_path": "doop/src/main.rs", "rank": 36, "score": 207348.744491083 }, { "content": "fn load4<'a>(index: usize, prefix: &str, filename: &str, interner: Rc<RefCell<StringInterner>>) -> impl Iterator<Item=((Symbol, Symbol, Symbol, Symbol), Time, Diff)>+'a {\n\n read_file(&format!(\"{}{}\", prefix, filename))\n\n .filter(move |_| index == 0)\n\n .map(move |line| {\n\n let mut interner = interner.borrow_mut();\n\n let mut elts = line.split('\\t');\n\n ((\n\n interner.intern(elts.next().unwrap()),\n\n interner.intern(elts.next().unwrap()),\n\n interner.intern(elts.next().unwrap()),\n\n interner.intern(elts.next().unwrap()),\n\n ), 0, 1)\n\n })\n\n}\n\n\n", "file_path": "doop/src/main.rs", "rank": 37, "score": 203290.20877422162 }, { "content": "fn triangles<G: Scope>(edges: &Collection<G, Edge>) -> Collection<G, (Node, Node, Node)>\n\nwhere G::Timestamp: Lattice+Hash+Ord {\n\n\n\n // only use forward-pointing edges.\n\n let edges = edges.filter(|&(src, dst)| src < dst);\n\n\n\n // arrange the edge relation three ways.\n\n let as_self = edges.arrange_by_self();\n\n let forward = edges.arrange_by_key();\n\n let reverse = edges.map_in_place(|x| ::std::mem::swap(&mut x.0, &mut x.1))\n\n .arrange_by_key();\n\n\n\n // arrange the count of extensions from each source.\n\n let counts = edges.map(|(src, _dst)| src)\n\n .arrange_by_self();\n\n\n\n // extract ((src, dst), idx) tuples with weights equal to the number of extensions.\n\n let cand_count1 = forward.join_core(&counts, |&src, &dst, &()| Some(((src, dst), 1)));\n\n let cand_count2 = reverse.join_core(&counts, |&dst, &src, &()| Some(((src, dst), 2)));\n\n\n", "file_path": "dogsdogsdogs/examples/ngo.rs", "rank": 38, "score": 200829.5973722318 }, { "content": "fn load5<'a>(index: usize, prefix: &str, filename: &str, interner: Rc<RefCell<StringInterner>>) -> impl Iterator<Item=((Symbol, Symbol, Symbol, Symbol, Symbol), Time, Diff)>+'a {\n\n read_file(&format!(\"{}{}\", prefix, filename))\n\n .filter(move |_| index == 0)\n\n .map(move |line| {\n\n let mut interner = interner.borrow_mut();\n\n let mut elts = line.split('\\t');\n\n ((\n\n interner.intern(elts.next().unwrap()),\n\n interner.intern(elts.next().unwrap()),\n\n interner.intern(elts.next().unwrap()),\n\n interner.intern(elts.next().unwrap()),\n\n interner.intern(elts.next().unwrap()),\n\n ), 0, 1)\n\n })\n\n}\n\n\n", "file_path": "doop/src/main.rs", "rank": 39, "score": 199388.94465529663 }, { "content": "// returns pairs (n, s) indicating node n can be reached from a root in s steps.\n\nfn bfs<G: Scope>(edges: &Collection<G, Edge, MinSum>, roots: &Collection<G, Node, MinSum>) -> Collection<G, Node, MinSum>\n\nwhere G::Timestamp: Lattice+Ord {\n\n\n\n // repeatedly update minimal distances each node can be reached from each root\n\n roots.scope().iterative::<u32,_,_>(|scope| {\n\n\n\n use differential_dataflow::operators::iterate::SemigroupVariable;\n\n use differential_dataflow::operators::reduce::ReduceCore;\n\n use differential_dataflow::trace::implementations::ord::OrdKeySpine as DefaultKeyTrace;\n\n\n\n\n\n use timely::order::Product;\n\n let variable = SemigroupVariable::new(scope, Product::new(Default::default(), 1));\n\n\n\n let edges = edges.enter(scope);\n\n let roots = roots.enter(scope);\n\n\n\n let result =\n\n variable\n\n .map(|n| (n,()))\n", "file_path": "examples/monoid-bfs.rs", "rank": 40, "score": 197548.57929519942 }, { "content": "fn load6<'a>(index: usize, prefix: &str, filename: &str, interner: Rc<RefCell<StringInterner>>) -> impl Iterator<Item=((Symbol, Symbol, Symbol, Symbol, Symbol, Symbol), Time, Diff)>+'a {\n\n read_file(&format!(\"{}{}\", prefix, filename))\n\n .filter(move |_| index == 0)\n\n .map(move |line| {\n\n let mut interner = interner.borrow_mut();\n\n let mut elts = line.split('\\t');\n\n ((\n\n interner.intern(elts.next().unwrap()),\n\n interner.intern(elts.next().unwrap()),\n\n interner.intern(elts.next().unwrap()),\n\n interner.intern(elts.next().unwrap()),\n\n interner.intern(elts.next().unwrap()),\n\n interner.intern(elts.next().unwrap()),\n\n ), 0, 1)\n\n })\n\n}\n\n\n", "file_path": "doop/src/main.rs", "rank": 41, "score": 195635.90014654596 }, { "content": "fn _reachability<G: Scope>(edges: &Collection<G, Edge>, nodes: &Collection<G, (Node, Node)>) -> Collection<G, Edge>\n\nwhere G::Timestamp: Lattice+Ord+Hash {\n\n\n\n edges.filter(|_| false)\n\n .iterate(|inner| {\n\n let edges = edges.enter(&inner.scope());\n\n let nodes = nodes.enter_at(&inner.scope(), |r| 256 * (64 - (r.0 as u64).leading_zeros() as u64));\n\n\n\n inner.join_map(&edges, |_k,l,d| (*d,*l))\n\n .concat(&nodes)\n\n .reduce(|_, s, t| t.push((*s[0].0, 1)))\n\n\n\n })\n\n}\n", "file_path": "tests/scc.rs", "rank": 42, "score": 194494.05172426775 }, { "content": "fn test_sizes(nodes: usize, edges: usize, rounds: usize, config: Config) {\n\n\n\n let root_list = vec![(1, 0, 1)];\n\n let mut edge_list = Vec::new();\n\n\n\n let seed: &[_] = &[1, 2, 3, 4];\n\n let mut rng1: StdRng = SeedableRng::from_seed(seed); // rng for edge additions\n\n let mut rng2: StdRng = SeedableRng::from_seed(seed); // rng for edge deletions\n\n\n\n for _ in 0 .. edges {\n\n edge_list.push(((rng1.gen_range(0, nodes), rng1.gen_range(0, nodes)), 0, 1));\n\n }\n\n\n\n for round in 1 .. rounds {\n\n edge_list.push(((rng1.gen_range(0, nodes), rng1.gen_range(0, nodes)), round, 1));\n\n edge_list.push(((rng2.gen_range(0, nodes), rng2.gen_range(0, nodes)), round,-1));\n\n }\n\n\n\n let mut results1 = bfs_sequential(root_list.clone(), edge_list.clone());\n\n let mut results2 = bfs_differential(root_list.clone(), edge_list.clone(), config);\n", "file_path": "tests/bfs.rs", "rank": 43, "score": 193493.8801828561 }, { "content": "fn test_sizes(nodes: usize, edges: usize, rounds: usize, config: Config) {\n\n\n\n let mut edge_list = Vec::new();\n\n\n\n let seed: &[_] = &[1, 2, 3, 4];\n\n let mut rng1: StdRng = SeedableRng::from_seed(seed); // rng for edge additions\n\n let mut rng2: StdRng = SeedableRng::from_seed(seed); // rng for edge deletions\n\n\n\n for _ in 0 .. edges {\n\n edge_list.push(((rng1.gen_range(0, nodes), rng1.gen_range(0, nodes)), 0, 1));\n\n }\n\n\n\n for round in 1 .. rounds {\n\n edge_list.push(((rng1.gen_range(0, nodes), rng1.gen_range(0, nodes)), round, 1));\n\n edge_list.push(((rng2.gen_range(0, nodes), rng2.gen_range(0, nodes)), round,-1));\n\n }\n\n\n\n // for thing in edge_list.iter() {\n\n // println!(\"input: {:?}\", thing);\n\n // }\n", "file_path": "tests/scc.rs", "rank": 44, "score": 193493.88018285614 }, { "content": "fn load7<'a>(index: usize, prefix: &str, filename: &str, interner: Rc<RefCell<StringInterner>>) -> impl Iterator<Item=((Symbol, Symbol, Symbol, Symbol, Symbol, Symbol, Symbol), Time, Diff)>+'a {\n\n read_file(&format!(\"{}{}\", prefix, filename))\n\n .filter(move |_| index == 0)\n\n .map(move |line| {\n\n let mut interner = interner.borrow_mut();\n\n let mut elts = line.split('\\t');\n\n ((\n\n interner.intern(elts.next().unwrap()),\n\n interner.intern(elts.next().unwrap()),\n\n interner.intern(elts.next().unwrap()),\n\n interner.intern(elts.next().unwrap()),\n\n interner.intern(elts.next().unwrap()),\n\n interner.intern(elts.next().unwrap()),\n\n interner.intern(elts.next().unwrap()),\n\n ), 0, 1)\n\n })\n\n}\n\n\n", "file_path": "doop/src/main.rs", "rank": 45, "score": 192022.71120903429 }, { "content": "#[no_mangle]\n\npub fn build((dataflow, handles, probe, timer, args): Environment) -> Result<(), String> {\n\n\n\n // This call either starts the production of random graph edges.\n\n //\n\n // The arguments should be\n\n //\n\n // <graph_name> <nodes> <edges> <rate>\n\n //\n\n // where <rate> is the target number of edge changes per second. The source\n\n // will play out changes to keep up with this, and timestamp them as if they\n\n // were emitted at the correct time. The timestamps use the system `timer`,\n\n // but only start whenever the method is called. This means that the data are\n\n // not deterministic, but if you subtract the elapsed time between system start\n\n // up and method call, they should be deterministic.\n\n //\n\n // The method also registers a capability with name `<graph_name>-capability`,\n\n // and will continue to execute until this capability is dropped from `handles`.\n\n // To terminate the operator it is sufficient to drop the capability, as the\n\n // operator holds only a weak reference to it.\n\n //\n", "file_path": "server/dataflows/random_graph/src/lib.rs", "rank": 46, "score": 190945.3846163027 }, { "content": "fn copy_from_to(src: &[u8], dst: &mut [u8]) {\n\n let limit = if src.len() < dst.len() { src.len() } else { dst.len() };\n\n for index in 0 .. limit {\n\n dst[index] = src[index];\n\n }\n\n}\n\n\n", "file_path": "tpchlike/src/types.rs", "rank": 47, "score": 181795.43084477726 }, { "content": "#[inline(never)]\n\npub fn advance<T, F: Fn(&T)->bool>(slice: &[T], function: F) -> usize {\n\n\n\n // start with no advance\n\n let mut index = 0;\n\n if index < slice.len() && function(&slice[index]) {\n\n\n\n // advance in exponentially growing steps.\n\n let mut step = 1;\n\n while index + step < slice.len() && function(&slice[index + step]) {\n\n index += step;\n\n step = step << 1;\n\n }\n\n\n\n // advance in exponentially shrinking steps.\n\n step = step >> 1;\n\n while step > 0 {\n\n if index + step < slice.len() && function(&slice[index + step]) {\n\n index += step;\n\n }\n\n step = step >> 1;\n\n }\n\n\n\n index += 1;\n\n }\n\n\n\n index\n\n}\n", "file_path": "src/trace/implementations/graph.rs", "rank": 48, "score": 179528.36734127984 }, { "content": "fn main() {\n\n\n\n let mut args = std::env::args().skip(1);\n\n let prefix = args.next().expect(\"must supply path to facts\");\n\n let batch: Time = args.next().unwrap_or(\"1\".to_string()).parse().expect(\"batch must be an integer\");\n\n\n\n timely::execute_from_args(std::env::args(), move |worker| {\n\n\n\n let timer = ::std::time::Instant::now();\n\n let index = worker.index();\n\n\n\n let mut probe = ProbeHandle::new();\n\n\n\n // For interning strings.\n\n let interner = Rc::new(RefCell::new(StringInterner::new()));\n\n\n\n let mut inputs = (\n\n Vec::new(),\n\n Vec::new(),\n\n Vec::new(),\n", "file_path": "doop/src/main.rs", "rank": 49, "score": 177152.9297868865 }, { "content": "fn _strongly_connected<G: Scope>(graph: &Collection<G, Edge>) -> Collection<G, Edge>\n\nwhere G::Timestamp: Lattice+Ord+Hash {\n\n graph.iterate(|inner| {\n\n let edges = graph.enter(&inner.scope());\n\n let trans = edges.map_in_place(|x| mem::swap(&mut x.0, &mut x.1));\n\n _trim_edges(&_trim_edges(inner, &edges), &trans)\n\n })\n\n}\n\n\n", "file_path": "tests/scc.rs", "rank": 50, "score": 175987.9073013793 }, { "content": "type Diff = isize;\n\n\n", "file_path": "doop/src/main.rs", "rank": 51, "score": 172365.61393865137 }, { "content": "fn main() {\n\n\n\n // define a new computational scope, in which to run BFS\n\n timely::execute_from_args(std::env::args(), move |worker| {\n\n\n\n // An input for (x,y,z) placements.\n\n let mut xyzs = InputSession::<_,_,isize>::new();\n\n\n\n // Inputs for (x,y) and (x,z) goals.\n\n let mut xy_goal = InputSession::new();\n\n let mut xz_goal = InputSession::new();\n\n\n\n let mut probe = Handle::new();\n\n\n\n // Dataflow to validate input against goals.\n\n worker.dataflow(|scope| {\n\n\n\n // Introduce inputs to the scope.\n\n let xyzs = xyzs.to_collection(scope);\n\n let xy_goal = xy_goal.to_collection(scope);\n", "file_path": "examples/projekt.rs", "rank": 52, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n let nodes: u32 = std::env::args().nth(1).unwrap().parse().unwrap();\n\n let edges: usize = std::env::args().nth(2).unwrap().parse().unwrap();\n\n let batch: usize = std::env::args().nth(3).unwrap().parse().unwrap();\n\n let pre: usize = std::env::args().nth(4).unwrap().parse().unwrap();\n\n let inspect: bool = std::env::args().nth(5).unwrap() == \"inspect\";\n\n\n\n\n\n // define a new timely dataflow computation.\n\n timely::execute_from_args(std::env::args().skip(6), move |worker| {\n\n\n\n let timer = ::std::time::Instant::now();\n\n\n\n let index = worker.index();\n\n let peers = worker.peers();\n\n\n\n let mut probe = timely::dataflow::operators::probe::Handle::new();\n\n\n\n // create a dataflow managing an ever-changing edge collection.\n", "file_path": "examples/arrange.rs", "rank": 53, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n let mut args = std::env::args();\n\n args.next();\n\n\n\n let nodes: u32 = args.next().unwrap().parse().unwrap();\n\n let edges: usize = args.next().unwrap().parse().unwrap();\n\n let batch: u32 = args.next().unwrap().parse().unwrap();\n\n let inspect: bool = args.next().unwrap() == \"inspect\";\n\n\n\n // define a new computational scope, in which to run BFS\n\n timely::execute_from_args(std::env::args().skip(5), move |worker| {\n\n\n\n let timer = ::std::time::Instant::now();\n\n\n\n let index = worker.index();\n\n let peers = worker.peers();\n\n\n\n // create a degree counting differential dataflow\n\n let (mut input, probe) = worker.dataflow::<u32,_,_>(|scope| {\n", "file_path": "examples/hello.rs", "rank": 54, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n let mut args = std::env::args().skip(1);\n\n\n\n let filename = args.next().expect(\"must specify a filename\");\n\n let root: Node = args.next().expect(\"must specify root node\").parse().expect(\"root node must be an integer\");\n\n let batch: usize = args.next().expect(\"must specify batching\").parse().expect(\"batch must be an integer\");\n\n let compression: Time = args.next().expect(\"must specify compression\").parse().expect(\"compression must be an integer\");\n\n let inspect: bool = args.next().expect(\"must specify inspect bit\").parse().expect(\"inspect must be boolean\");\n\n\n\n // define a new computational scope, in which to run BFS\n\n timely::execute_from_args(args, move |worker| {\n\n\n\n let timer = ::std::time::Instant::now();\n\n let index = worker.index();\n\n let peers = worker.peers();\n\n\n\n // define BFS dataflow; return handles to roots and edges inputs\n\n let mut roots = InputSession::new();\n\n let mut graph = InputSession::new();\n", "file_path": "examples/stackoverflow.rs", "rank": 55, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n let large: usize = std::env::args().nth(1).unwrap().parse().unwrap();\n\n let small: usize = std::env::args().nth(2).unwrap().parse().unwrap();\n\n let batch: usize = std::env::args().nth(3).unwrap().parse().unwrap();\n\n let total: usize = std::env::args().nth(4).unwrap().parse().unwrap();\n\n\n\n // define a new timely dataflow computation.\n\n timely::execute_from_args(std::env::args().skip(3), move |worker| {\n\n\n\n let timer = ::std::time::Instant::now();\n\n\n\n let mut probe = timely::dataflow::operators::probe::Handle::new();\n\n\n\n // create a dataflow managing an ever-changing edge collection.\n\n \tlet mut handle = worker.dataflow(|scope| {\n\n let (handle, input) = scope.new_collection();\n\n input.distinct().probe_with(&mut probe);\n\n handle\n\n });\n", "file_path": "examples/compact.rs", "rank": 56, "score": 169558.44478451117 }, { "content": "fn main() {\n\n let rounds: u32 = std::env::args().nth(1).unwrap().parse().unwrap();\n\n\n\n let mut summaries = timely::execute_from_args(std::env::args(), move |worker| {\n\n let mut probe = Handle::new();\n\n let (mut graph, mut graph_trace) = worker.dataflow(|scope| {\n\n let (graph_input, graph) = scope.new_collection();\n\n\n\n let graph_arr = graph.map(|(x, y): Edge| (x, (x, y))).arrange_by_key();\n\n let graph_trace = graph_arr.trace.clone();\n\n\n\n /* Be sure to attach probe to arrangements we want to enumerate;\n\n * so we know when all updates for a given epoch have been added to the arrangement. */\n\n graph_arr\n\n .as_collection(|_, v| *v)\n\n .consolidate()\n\n //.inspect(move |x| println!(\"{:?}\", x))\n\n .probe_with(&mut probe);\n\n\n\n /* Return `graph_trace`, so we can obtain cursor for the arrangement at runtime. */\n", "file_path": "examples/cursors.rs", "rank": 57, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n let nodes: u32 = std::env::args().nth(1).unwrap().parse().unwrap();\n\n let edges: u32 = std::env::args().nth(2).unwrap().parse().unwrap();\n\n let batch: u32 = std::env::args().nth(3).unwrap().parse().unwrap();\n\n let rounds: u32 = std::env::args().nth(4).unwrap().parse().unwrap();\n\n let inspect: bool = std::env::args().nth(5).unwrap() == \"inspect\";\n\n\n\n // define a new computational scope, in which to run BFS\n\n timely::execute_from_args(std::env::args(), move |worker| {\n\n\n\n if let Ok(addr) = ::std::env::var(\"DIFFERENTIAL_LOG_ADDR\") {\n\n\n\n eprintln!(\"enabled DIFFERENTIAL logging to {}\", addr);\n\n\n\n if let Ok(stream) = ::std::net::TcpStream::connect(&addr) {\n\n let writer = ::timely::dataflow::operators::capture::EventWriter::new(stream);\n\n let mut logger = ::timely::logging::BatchLogger::new(writer);\n\n worker.log_register().insert::<DifferentialEvent,_>(\"differential/arrange\", move |time, data|\n\n logger.publish_batch(time, data)\n", "file_path": "examples/bfs.rs", "rank": 58, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n let nodes: u32 = std::env::args().nth(1).unwrap().parse().unwrap();\n\n let edges: usize = std::env::args().nth(2).unwrap().parse().unwrap();\n\n let batch: usize = std::env::args().nth(3).unwrap().parse().unwrap();\n\n let inspect: bool = std::env::args().nth(4).unwrap() == \"inspect\";\n\n let open_loop: bool = std::env::args().nth(5).unwrap() == \"open-loop\";\n\n\n\n // define a new computational scope, in which to run BFS\n\n timely::execute_from_args(std::env::args().skip(6), move |worker| {\n\n\n\n let timer = ::std::time::Instant::now();\n\n\n\n let index = worker.index();\n\n let peers = worker.peers();\n\n\n\n // create a degree counting differential dataflow\n\n let (mut input, probe) = worker.dataflow(|scope| {\n\n\n\n // create edge input, count a few ways.\n", "file_path": "examples/degrees.rs", "rank": 59, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n timely::execute_from_args(std::env::args(), move |worker| {\n\n\n\n let timer = worker.timer();\n\n let mut probe = Handle::new();\n\n\n\n let (mut nodes, mut edges, mut times) = worker.dataflow::<usize,_,_>(|scope| {\n\n\n\n let (node_input, nodes) = scope.new_collection();\n\n let (edge_input, edges) = scope.new_collection();\n\n let (time_input, times) = scope.new_collection();\n\n\n\n // Detect cycles that do not increment timestamps.\n\n find_cycles::<_,usize>(nodes.clone(), edges.clone())\n\n .inspect(move |x| println!(\"{:?}\\tcycles: {:?}\", timer.elapsed(), x))\n\n .probe_with(&mut probe);\n\n\n\n // Summarize all paths to inputs of operator zero.\n\n summarize::<_,usize>(nodes.clone(), edges.clone())\n", "file_path": "examples/progress.rs", "rank": 60, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n timely::execute_from_args(std::env::args(), move |worker| {\n\n\n\n // Used to determine if our output has caught up to our input.\n\n let mut probe: ProbeHandle<Pair<isize, isize>> = ProbeHandle::new();\n\n\n\n let (mut input, mut capability, mut trace) =\n\n worker.dataflow(|scope| {\n\n\n\n // Create \"unordered\" inputs which provide their capabilities to users.\n\n // Here \"capability\" is a technical term, which is \"permission to send\n\n // data or after a certain timestamp\". When this capability is dropped\n\n // or downgraded, the input communicates that its possible timestamps\n\n // have advanced, and the system can start to make progress.\n\n let ((input, capability), data) = scope.new_unordered_input();\n\n\n\n let arrangement =\n\n data.as_collection()\n\n .count()\n", "file_path": "examples/multitemporal.rs", "rank": 61, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n // snag a filename to use for the input graph.\n\n let filename = std::env::args().nth(1).unwrap();\n\n\n\n timely::execute_from_args(std::env::args().skip(2), move |worker| {\n\n\n\n let peers = worker.peers();\n\n let index = worker.index();\n\n\n\n // // What you might do if you used GraphMMap:\n\n let graph = GraphMMap::new(&filename);\n\n let nodes = graph.nodes();\n\n let edges = (0..nodes).filter(move |node| node % peers == index)\n\n .flat_map(|node| graph.edges(node).iter().cloned().map(move |dst| ((node as u32, dst))))\n\n .map(|(src, dst)| ((src, dst), Default::default(), 1))\n\n .collect::<Vec<_>>();\n\n\n\n println!(\"loaded {} nodes, {} edges\", nodes, edges.len());\n\n\n\n worker.dataflow::<(),_,_>(|scope| {\n\n interpret(&Collection::new(edges.to_stream(scope)), &[(0,2), (1,2)]);\n\n });\n\n\n\n }).unwrap();\n\n}\n\n\n", "file_path": "examples/interpreted.rs", "rank": 62, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n timely::execute_from_args(std::env::args(), move |worker| {\n\n\n\n let timer = ::std::time::Instant::now();\n\n\n\n let peers = worker.peers();\n\n let index = worker.index();\n\n\n\n let query_filename = std::env::args().nth(1).expect(\"Argument 1 (query filename) missing.\");\n\n let query_text = std::fs::read_to_string(query_filename).expect(\"Failed to read query file\");\n\n let query = Query::build_from(query_text.lines());\n\n\n\n let mut relation_map = worker.dataflow::<(),_,_>(|scope| query.render_in(scope));\n\n\n\n if index == 0 { println!(\"{:?}:\\tDataflow assembled for {:?}\", timer.elapsed(), query); }\n\n\n\n // Build a dataflow to report final sizes.\n\n worker.dataflow(|scope| {\n\n for (name, data) in relation_map.iter_mut() {\n", "file_path": "examples/graspan.rs", "rank": 63, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n // snag a filename to use for the input graph.\n\n let filename = std::env::args().nth(1).unwrap();\n\n let iterations: Iter = std::env::args().nth(2).unwrap().parse().unwrap();\n\n let inspect = std::env::args().nth(3) == Some(\"inspect\".to_string());\n\n\n\n timely::execute_from_args(std::env::args().skip(2), move |worker| {\n\n\n\n let peers = worker.peers();\n\n let index = worker.index();\n\n let timer = worker.timer();\n\n\n\n let mut input = InputSession::new();\n\n let mut probe = ProbeHandle::new();\n\n\n\n worker.dataflow::<Time,_,_>(|scope| {\n\n let edges = input.to_collection(scope);\n\n pagerank(iterations, &edges)\n\n .filter(move |_| inspect)\n", "file_path": "examples/pagerank.rs", "rank": 64, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n let keys: usize = std::env::args().nth(1).unwrap().parse().unwrap();\n\n let batch: usize = 10_000;\n\n\n\n // This computation demonstrates in-place accumulation of arbitrarily large \n\n // volumes of input data, consuming space bounded by the number of distinct keys.\n\n timely::execute_from_args(std::env::args().skip(2), move |worker| {\n\n\n\n let index = worker.index();\n\n let peers = worker.peers();\n\n\n\n let mut input = worker.dataflow::<(), _, _>(|scope| {\n\n let (input, data) = scope.new_collection::<_, isize>();\n\n data.consolidate();\n\n input\n\n });\n\n\n\n let seed: &[_] = &[1, 2, 3, 4];\n\n let mut rng: StdRng = SeedableRng::from_seed(seed);\n", "file_path": "examples/accumulate.rs", "rank": 65, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n // define a new computational scope, in which to run BFS\n\n timely::execute_from_args(std::env::args(), move |worker| {\n\n \n\n // define BFS dataflow; return handles to roots and edges inputs\n\n let mut probe = Handle::new();\n\n let (mut rules, mut graph) = worker.dataflow(|scope| {\n\n\n\n let (rule_input, rules) = scope.new_collection();\n\n let (edge_input, graph) = scope.new_collection();\n\n\n\n let result = graph.iterate(|inner| {\n\n\n\n let rules = rules.enter(&inner.scope());\n\n let arranged = inner.arrange_by_key();\n\n\n\n // rule 0: remove self-loops:\n\n let freeze0 = freeze(&arranged, |t| {\n\n if t.inner <= 0 {\n", "file_path": "examples/freeze.rs", "rank": 66, "score": 169558.44478451117 }, { "content": "type Time = u32;\n", "file_path": "examples/cursors.rs", "rank": 67, "score": 168841.99539805713 }, { "content": "type Time = u32;\n", "file_path": "examples/pagerank.rs", "rank": 68, "score": 168841.99539805713 }, { "content": "type Node = u32;\n", "file_path": "examples/stackoverflow.rs", "rank": 69, "score": 168246.73557631223 }, { "content": "type Node = u32;\n", "file_path": "examples/cursors.rs", "rank": 70, "score": 168246.73557631223 }, { "content": "type Node = u32;\n", "file_path": "examples/bfs.rs", "rank": 71, "score": 168246.73557631223 }, { "content": "type Node = u32;\n", "file_path": "examples/interpreted.rs", "rank": 72, "score": 168246.73557631223 }, { "content": "type Node = u32;\n", "file_path": "examples/pagerank.rs", "rank": 73, "score": 168246.73557631223 }, { "content": "// A differential version of item-based collaborative filtering using Jaccard similarity for\n\n// comparing item interaction histories. See Algorithm 1 in https://ssc.io/pdf/amnesia.pdf\n\n// for details.\n\nfn main() {\n\n\n\n timely::execute_from_args(std::env::args(), move |worker| {\n\n\n\n let mut interactions_input = InputSession::new();\n\n\n\n let probe = worker.dataflow(|scope| {\n\n\n\n let interactions = interactions_input.to_collection(scope);\n\n\n\n // Find all users with less than 500 interactions\n\n let users_with_enough_interactions = interactions\n\n .map(|(user, _item)| user)\n\n .count_total()\n\n .filter(move |(_user, count): &(u32, isize)| *count < 500)\n\n .map(|(user, _count)| user);\n\n\n\n // Remove users with too many interactions\n\n let remaining_interactions = interactions\n\n .semijoin(&users_with_enough_interactions);\n", "file_path": "examples/itembased_cf.rs", "rank": 74, "score": 166740.8485840942 }, { "content": "fn main() {\n\n\n\n // snag a filename to use for the input graph.\n\n let filename = std::env::args().nth(1).unwrap();\n\n let batching = std::env::args().nth(2).unwrap().parse::<usize>().unwrap();\n\n\n\n timely::execute_from_args(std::env::args().skip(2), move |worker| {\n\n\n\n // let timer = std::time::Instant::now();\n\n\n\n let peers = worker.peers();\n\n let index = worker.index();\n\n\n\n // // What you might do if you used GraphMMap:\n\n let graph = GraphMMap::new(&filename);\n\n let nodes = graph.nodes();\n\n let edges = (0..nodes).filter(move |node| node % peers == index)\n\n .flat_map(|node| graph.edges(node).iter().cloned().map(move |dst| ((node as u32, dst))))\n\n .map(|(src, dst)| ((src, dst), Default::default(), 1))\n\n .collect::<Vec<_>>();\n", "file_path": "dogsdogsdogs/examples/dogsdogsdogs.rs", "rank": 75, "score": 166736.1804086232 }, { "content": "fn main() {\n\n\n\n // snag a filename to use for the input graph.\n\n let filename = std::env::args().nth(1).unwrap();\n\n\n\n timely::execute_from_args(std::env::args().skip(1), move |worker| {\n\n\n\n let peers = worker.peers();\n\n let index = worker.index();\n\n\n\n // // What you might do if you used GraphMMap:\n\n let graph = GraphMMap::new(&filename);\n\n let nodes = graph.nodes();\n\n let edges = (0..nodes).filter(move |node| node % peers == index)\n\n .flat_map(|node| graph.edges(node).iter().cloned().map(move |dst| ((node as u32, dst))))\n\n .map(|(src, dst)| ((src, dst), Default::default(), 1))\n\n .collect::<Vec<_>>();\n\n\n\n println!(\"loaded {} nodes, {} edges\", nodes, edges.len());\n\n\n\n worker.dataflow::<(),_,_>(|scope| {\n\n triangles(&Collection::new(edges.to_stream(scope))).inner.count().inspect(|x| println!(\"{:?}\", x));\n\n });\n\n\n\n }).unwrap();\n\n}\n\n\n", "file_path": "dogsdogsdogs/examples/ngo.rs", "rank": 76, "score": 166736.1804086232 }, { "content": "fn main() {\n\n\n\n let nodes: u32 = std::env::args().nth(1).unwrap().parse().unwrap();\n\n let edges: u32 = std::env::args().nth(2).unwrap().parse().unwrap();\n\n let batch: u32 = std::env::args().nth(3).unwrap().parse().unwrap();\n\n let topic = std::env::args().nth(4).unwrap();\n\n\n\n let write = std::env::args().any(|x| x == \"write\");\n\n let read = std::env::args().any(|x| x == \"read\");\n\n\n\n // define a new computational scope, in which to run BFS\n\n timely::execute_from_args(std::env::args(), move |worker| {\n\n\n\n let timer = ::std::time::Instant::now();\n\n\n\n // define BFS dataflow; return handles to roots and edges inputs\n\n let mut probe = Handle::new();\n\n let (mut roots, mut graph, _write_token, _read_token) = worker.dataflow(|scope| {\n\n\n\n let (root_input, roots) = scope.new_collection();\n", "file_path": "examples/capture-test.rs", "rank": 77, "score": 166736.1804086232 }, { "content": "fn main() {\n\n\n\n let nodes: u32 = std::env::args().nth(1).unwrap().parse().unwrap();\n\n let edges: u32 = std::env::args().nth(2).unwrap().parse().unwrap();\n\n let weight: u32 = std::env::args().nth(3).unwrap().parse().unwrap();\n\n let batch: u32 = std::env::args().nth(4).unwrap().parse().unwrap();\n\n let rounds: u32 = std::env::args().nth(5).unwrap().parse().unwrap();\n\n let inspect: bool = std::env::args().nth(6).unwrap() == \"inspect\";\n\n\n\n // define a new computational scope, in which to run BFS\n\n timely::execute_from_args(std::env::args(), move |worker| {\n\n\n\n let timer = ::std::time::Instant::now();\n\n\n\n // define BFS dataflow; return handles to roots and edges inputs\n\n let mut probe = Handle::new();\n\n let (mut roots, mut graph) = worker.dataflow(|scope| {\n\n\n\n let (root_input, roots) = scope.new_collection();\n\n let (edge_input, graph) = scope.new_collection();\n", "file_path": "examples/monoid-bfs.rs", "rank": 78, "score": 166736.1804086232 }, { "content": "// Returns a weighted collection in which the weight of each node is proportional\n\n// to its PageRank in the input graph `edges`.\n\nfn pagerank<G>(iters: Iter, edges: &Collection<G, Edge, Diff>) -> Collection<G, Node, Diff>\n\nwhere\n\n G: Scope,\n\n G::Timestamp: Lattice,\n\n{\n\n // initialize many surfers at each node.\n\n let nodes =\n\n edges.flat_map(|(x,y)| Some(x).into_iter().chain(Some(y)))\n\n .distinct();\n\n\n\n // snag out-degrees for each node.\n\n let degrs = edges.map(|(src,_dst)| src)\n\n .count();\n\n\n\n edges.scope().iterative::<Iter,_,_>(|inner| {\n\n\n\n // Bring various collections into the scope.\n\n let edges = edges.enter(inner);\n\n let nodes = nodes.enter(inner);\n\n let degrs = degrs.enter(inner);\n", "file_path": "examples/pagerank.rs", "rank": 79, "score": 166211.89706630982 }, { "content": "type Node = u32;\n", "file_path": "examples/capture-test.rs", "rank": 80, "score": 165567.92715485458 }, { "content": "type Node = u32;\n", "file_path": "dogsdogsdogs/examples/ngo.rs", "rank": 81, "score": 165567.92715485458 }, { "content": "type Node = u32;\n", "file_path": "examples/monoid-bfs.rs", "rank": 82, "score": 165567.92715485458 }, { "content": "fn main() {\n\n\n\n // snag a filename to use for the input graph.\n\n let filename = std::env::args().nth(1).unwrap();\n\n let batching = std::env::args().nth(2).unwrap().parse::<usize>().unwrap();\n\n let inspect = std::env::args().any(|x| x == \"inspect\");\n\n\n\n timely::execute_from_args(std::env::args().skip(2), move |worker| {\n\n\n\n let timer = std::time::Instant::now();\n\n let graph = GraphMMap::new(&filename);\n\n\n\n let peers = worker.peers();\n\n let index = worker.index();\n\n\n\n let mut probe = Handle::new();\n\n\n\n let mut input = worker.dataflow::<usize,_,_>(|scope| {\n\n\n\n let (edges_input, edges) = scope.new_collection();\n", "file_path": "dogsdogsdogs/examples/delta_query.rs", "rank": 83, "score": 164077.04698340688 }, { "content": "fn main() {\n\n\n\n // snag a filename to use for the input graph.\n\n let filename = std::env::args().nth(1).unwrap();\n\n let batching = std::env::args().nth(2).unwrap().parse::<usize>().unwrap();\n\n let inspect = std::env::args().any(|x| x == \"inspect\");\n\n\n\n timely::execute_from_args(std::env::args().skip(2), move |worker| {\n\n\n\n let timer = std::time::Instant::now();\n\n let graph = GraphMMap::new(&filename);\n\n\n\n let peers = worker.peers();\n\n let index = worker.index();\n\n\n\n let mut probe = Handle::new();\n\n\n\n let mut input = worker.dataflow::<usize,_,_>(|scope| {\n\n\n\n let (edges_input, edges) = scope.new_collection();\n", "file_path": "dogsdogsdogs/examples/delta_query_wcoj.rs", "rank": 84, "score": 161567.29798697718 }, { "content": "/// Sorts and consolidates a slice, returning the valid prefix length.\n\npub fn consolidate_slice<T: Ord, R: Semigroup>(slice: &mut [(T, R)]) -> usize {\n\n\n\n // We could do an insertion-sort like initial scan which builds up sorted, consolidated runs.\n\n // In a world where there are not many results, we may never even need to call in to merge sort.\n\n slice.sort_by(|x,y| x.0.cmp(&y.0));\n\n\n\n // Counts the number of distinct known-non-zero accumulations. Indexes the write location.\n\n let mut offset = 0;\n\n for index in 1 .. slice.len() {\n\n\n\n // The following unsafe block elides various bounds checks, using the reasoning that `offset`\n\n // is always strictly less than `index` at the beginning of each iteration. This is initially\n\n // true, and in each iteration `offset` can increase by at most one (whereas `index` always\n\n // increases by one). As `index` is always in bounds, and `offset` starts at zero, it too is\n\n // always in bounds.\n\n //\n\n // LLVM appears to struggle to optimize out Rust's split_at_mut, which would prove disjointness\n\n // using run-time tests.\n\n unsafe {\n\n\n", "file_path": "src/consolidation.rs", "rank": 85, "score": 158895.34830724995 }, { "content": "/// Sorts and consolidate `vec[offset..]`.\n\n///\n\n/// This method will sort `vec[offset..]` and then consolidate runs of more than one entry with\n\n/// identical first elements by accumulating the second elements of the pairs. Should the final\n\n/// accumulation be zero, the element is discarded.\n\npub fn consolidate_from<T: Ord, R: Semigroup>(vec: &mut Vec<(T, R)>, offset: usize) {\n\n let length = consolidate_slice(&mut vec[offset..]);\n\n vec.truncate(offset + length);\n\n}\n\n\n", "file_path": "src/consolidation.rs", "rank": 86, "score": 153561.14397005027 }, { "content": "type IntegerTrace = OrdValSpine<UnsignedWrapper<u64>, u64, usize, i64>;\n\n\n", "file_path": "tests/trace.rs", "rank": 87, "score": 151505.39705640345 }, { "content": "// Generate synthetic interactions with a skewed distribution\n\nfn generate_interactions<R>(how_many: usize, rng: &mut R) -> Vec<(u32,u32)> where R: Rng {\n\n let mut interactions = Vec::with_capacity(how_many);\n\n\n\n let mut user_sampler = CRP::new(6000.0, 0.35);\n\n let mut item_sampler = CRP::new(6000.0, 0.35);\n\n\n\n for _ in 0 .. how_many {\n\n let user = user_sampler.sample(rng);\n\n let item = item_sampler.sample(rng);\n\n interactions.push((user, item));\n\n }\n\n\n\n interactions\n\n}\n", "file_path": "examples/itembased_cf.rs", "rank": 88, "score": 151270.98366186125 }, { "content": "fn load<'a>(filename: &str, interner: Rc<RefCell<StringInterner>>) -> impl Iterator<Item=Vec<Symbol>>+'a {\n\n use ::std::io::{BufReader, BufRead};\n\n use ::std::fs::File;\n\n let file = BufReader::new(File::open(filename).unwrap());\n\n file.lines()\n\n .filter_map(|line| line.ok())\n\n .map(move |line| {\n\n let mut interner = interner.borrow_mut();\n\n line.split('\\t')\n\n .map(move |string| interner.intern(string))\n\n .collect()\n\n })\n\n}\n\n\n", "file_path": "doop/src/main.rs", "rank": 89, "score": 148328.67128585937 }, { "content": "// type PrimitiveType = Type;\n\ntype ReferenceType = Type;\n", "file_path": "doop/src/main.rs", "rank": 90, "score": 147960.5705257153 }, { "content": "/// Sorts and consolidates a slice, returning the valid prefix length.\n\npub fn consolidate_updates_slice<D: Ord, T: Ord, R: Semigroup>(slice: &mut [(D, T, R)]) -> usize {\n\n\n\n // We could do an insertion-sort like initial scan which builds up sorted, consolidated runs.\n\n // In a world where there are not many results, we may never even need to call in to merge sort.\n\n slice.sort_unstable_by(|x,y| (&x.0, &x.1).cmp(&(&y.0, &y.1)));\n\n\n\n // Counts the number of distinct known-non-zero accumulations. Indexes the write location.\n\n let mut offset = 0;\n\n for index in 1 .. slice.len() {\n\n\n\n // The following unsafe block elides various bounds checks, using the reasoning that `offset`\n\n // is always strictly less than `index` at the beginning of each iteration. This is initially\n\n // true, and in each iteration `offset` can increase by at most one (whereas `index` always\n\n // increases by one). As `index` is always in bounds, and `offset` starts at zero, it too is\n\n // always in bounds.\n\n //\n\n // LLVM appears to struggle to optimize out Rust's split_at_mut, which would prove disjointness\n\n // using run-time tests.\n\n unsafe {\n\n\n", "file_path": "src/consolidation.rs", "rank": 91, "score": 145630.84312444145 }, { "content": "/// Reports the number of elements satisfing the predicate.\n\n///\n\n/// This methods *relies strongly* on the assumption that the predicate\n\n/// stays false once it becomes false, a joint property of the predicate\n\n/// and the slice. This allows `advance` to use exponential search to\n\n/// count the number of elements in time logarithmic in the result.\n\npub fn advance<T, F: Fn(&T)->bool>(slice: &[T], function: F) -> usize {\n\n\n\n let small_limit = 8;\n\n\n\n // Exponential seach if the answer isn't within `small_limit`.\n\n if slice.len() > small_limit && function(&slice[small_limit]) {\n\n\n\n // start with no advance\n\n let mut index = small_limit + 1;\n\n if index < slice.len() && function(&slice[index]) {\n\n\n\n // advance in exponentially growing steps.\n\n let mut step = 1;\n\n while index + step < slice.len() && function(&slice[index + step]) {\n\n index += step;\n\n step = step << 1;\n\n }\n\n\n\n // advance in exponentially shrinking steps.\n\n step = step >> 1;\n", "file_path": "src/trace/layers/mod.rs", "rank": 92, "score": 145352.54484221904 }, { "content": "#[inline(never)]\n\npub fn advance<T, F: Fn(&T)->bool>(slice: &[T], function: F) -> usize {\n\n\n\n\t// start with no advance\n\n\tlet mut index = 0;\n\n\tif index < slice.len() && function(&slice[index]) {\n\n\n\n\t\t// advance in exponentially growing steps.\n\n\t\tlet mut step = 1;\n\n\t\twhile index + step < slice.len() && function(&slice[index + step]) {\n\n\t\t\tindex += step;\n\n\t\t\tstep = step << 1;\n\n\t\t}\n\n\n\n\t\t// advance in exponentially shrinking steps.\n\n\t\tstep = step >> 1;\n\n\t\twhile step > 0 {\n\n\t\t\tif index + step < slice.len() && function(&slice[index + step]) {\n\n\t\t\t\tindex += step;\n\n\t\t\t}\n\n\t\t\tstep = step >> 1;\n\n\t\t}\n\n\n\n\t\tindex += 1;\n\n\t}\n\n\n\n\tindex\n\n}\n", "file_path": "src/trace/implementations/vec.rs", "rank": 93, "score": 145343.73097004968 }, { "content": "type Type = Symbol;\n", "file_path": "doop/src/main.rs", "rank": 94, "score": 145160.01689609667 }, { "content": "/// Returns pairs (node, dist) indicating distance of each node from a root.\n\npub fn bfs<G, N>(edges: &Collection<G, (N,N)>, roots: &Collection<G, N>) -> Collection<G, (N,u32)>\n\nwhere\n\n G: Scope,\n\n G::Timestamp: Lattice+Ord,\n\n N: ExchangeData+Hash,\n\n{\n\n use operators::arrange::arrangement::ArrangeByKey;\n\n let edges = edges.arrange_by_key();\n\n bfs_arranged(&edges, roots)\n\n}\n\n\n\nuse crate::trace::TraceReader;\n\nuse crate::operators::arrange::Arranged;\n\n\n", "file_path": "src/algorithms/graphs/bfs.rs", "rank": 95, "score": 143628.04259754592 }, { "content": "#[inline]\n\npub fn _advance<T, F: Fn(&T)->bool>(slice: &[T], function: F) -> usize {\n\n\n\n\t// start with no advance\n\n\tlet mut index = 0;\n\n\tif index < slice.len() && function(&slice[index]) {\n\n\n\n\t\t// advance in exponentially growing steps.\n\n\t\tlet mut step = 1;\n\n\t\twhile index + step < slice.len() && function(&slice[index + step]) {\n\n\t\t\tindex += step;\n\n\t\t\tstep = step << 1;\n\n\t\t}\n\n\n\n\t\t// advance in exponentially shrinking steps.\n\n\t\tstep = step >> 1;\n\n\t\twhile step > 0 {\n\n\t\t\tif index + step < slice.len() && function(&slice[index + step]) {\n\n\t\t\t\tindex += step;\n\n\t\t\t}\n\n\t\t\tstep = step >> 1;\n\n\t\t}\n\n\n\n\t\tindex += 1;\n\n\t}\n\n\n\n\tindex\n\n}\n", "file_path": "src/trace/implementations/merge_batcher.rs", "rank": 96, "score": 143219.05965029498 }, { "content": "/// Returns pairs (node, dist) indicating distance of each node from a root.\n\npub fn bfs_arranged<G, N, Tr>(edges: &Arranged<G, Tr>, roots: &Collection<G, N>) -> Collection<G, (N, u32)>\n\nwhere\n\n G: Scope,\n\n G::Timestamp: Lattice+Ord,\n\n N: ExchangeData+Hash,\n\n Tr: TraceReader<Key=N, Val=N, Time=G::Timestamp, R=isize>+Clone+'static,\n\n Tr::Batch: crate::trace::BatchReader<N, N, G::Timestamp, Tr::R>+'static,\n\n Tr::Cursor: crate::trace::Cursor<N, N, G::Timestamp, Tr::R>+'static,\n\n{\n\n // initialize roots as reaching themselves at distance 0\n\n let nodes = roots.map(|x| (x, 0));\n\n\n\n // repeatedly update minimal distances each node can be reached from each root\n\n nodes.iterate(|inner| {\n\n\n\n let edges = edges.enter(&inner.scope());\n\n let nodes = nodes.enter(&inner.scope());\n\n\n\n inner.join_core(&edges, |_k,l,d| Some((d.clone(), l+1)))\n\n .concat(&nodes)\n\n .reduce(|_, s, t| t.push((s[0].0.clone(), 1)))\n\n })\n\n}", "file_path": "src/algorithms/graphs/bfs.rs", "rank": 97, "score": 141244.05750045372 }, { "content": "/// Sorts and consolidate `vec[offset..]`.\n\n///\n\n/// This method will sort `vec[offset..]` and then consolidate runs of more than one entry with\n\n/// identical first two elements by accumulating the third elements of the triples. Should the final\n\n/// accumulation be zero, the element is discarded.\n\npub fn consolidate_updates_from<D: Ord, T: Ord, R: Semigroup>(vec: &mut Vec<(D, T, R)>, offset: usize) {\n\n let length = consolidate_updates_slice(&mut vec[offset..]);\n\n vec.truncate(offset + length);\n\n}\n\n\n", "file_path": "src/consolidation.rs", "rank": 98, "score": 141131.02578606442 }, { "content": "type ArrayType = ReferenceType;\n", "file_path": "doop/src/main.rs", "rank": 99, "score": 138747.00604761273 } ]
Rust
boa/src/exec/mod.rs
RageKnify/boa
7d318f0582a2aac5a01af762635000c686452ab4
mod array; mod block; mod break_node; mod call; mod conditional; mod declaration; mod field; mod identifier; mod iteration; mod new; mod object; mod operator; mod return_smt; mod spread; mod statement_list; mod switch; mod throw; mod try_node; #[cfg(test)] mod tests; use crate::{ syntax::ast::{constant::Const, node::Node}, BoaProfiler, Context, Result, Value, }; pub trait Executable { fn run(&self, interpreter: &mut Context) -> Result<Value>; } #[derive(Debug, Eq, PartialEq)] pub(crate) enum InterpreterState { Executing, Return, Break(Option<String>), Continue(Option<String>), } #[derive(Debug)] pub struct Interpreter { state: InterpreterState, } impl Default for Interpreter { fn default() -> Self { Self::new() } } impl Interpreter { pub fn new() -> Self { Self { state: InterpreterState::Executing, } } #[inline] pub(crate) fn set_current_state(&mut self, new_state: InterpreterState) { self.state = new_state } #[inline] pub(crate) fn get_current_state(&self) -> &InterpreterState { &self.state } } impl Executable for Node { fn run(&self, interpreter: &mut Context) -> Result<Value> { let _timer = BoaProfiler::global().start_event("Executable", "exec"); match *self { Node::Const(Const::Null) => Ok(Value::null()), Node::Const(Const::Num(num)) => Ok(Value::rational(num)), Node::Const(Const::Int(num)) => Ok(Value::integer(num)), Node::Const(Const::BigInt(ref num)) => Ok(Value::from(num.clone())), Node::Const(Const::Undefined) => Ok(Value::Undefined), Node::Const(Const::String(ref value)) => Ok(Value::string(value.to_string())), Node::Const(Const::Bool(value)) => Ok(Value::boolean(value)), Node::Block(ref block) => block.run(interpreter), Node::Identifier(ref identifier) => identifier.run(interpreter), Node::GetConstField(ref get_const_field_node) => get_const_field_node.run(interpreter), Node::GetField(ref get_field) => get_field.run(interpreter), Node::Call(ref call) => call.run(interpreter), Node::WhileLoop(ref while_loop) => while_loop.run(interpreter), Node::DoWhileLoop(ref do_while) => do_while.run(interpreter), Node::ForLoop(ref for_loop) => for_loop.run(interpreter), Node::If(ref if_smt) => if_smt.run(interpreter), Node::ConditionalOp(ref op) => op.run(interpreter), Node::Switch(ref switch) => switch.run(interpreter), Node::Object(ref obj) => obj.run(interpreter), Node::ArrayDecl(ref arr) => arr.run(interpreter), Node::FunctionDecl(ref decl) => decl.run(interpreter), Node::FunctionExpr(ref function_expr) => function_expr.run(interpreter), Node::ArrowFunctionDecl(ref decl) => decl.run(interpreter), Node::BinOp(ref op) => op.run(interpreter), Node::UnaryOp(ref op) => op.run(interpreter), Node::New(ref call) => call.run(interpreter), Node::Return(ref ret) => ret.run(interpreter), Node::Throw(ref throw) => throw.run(interpreter), Node::Assign(ref op) => op.run(interpreter), Node::VarDeclList(ref decl) => decl.run(interpreter), Node::LetDeclList(ref decl) => decl.run(interpreter), Node::ConstDeclList(ref decl) => decl.run(interpreter), Node::Spread(ref spread) => spread.run(interpreter), Node::This => { Ok(interpreter.realm().environment.get_this_binding()) } Node::Try(ref try_node) => try_node.run(interpreter), Node::Break(ref break_node) => break_node.run(interpreter), Node::Continue(ref continue_node) => continue_node.run(interpreter), } } }
mod array; mod block; mod break_node; mod call; mod conditional; mod declaration; mod field; mod identifier; mod iteration; mod new; mod object; mod operator; mod return_smt; mod spread; mod statement_list; mod switch; mod throw; mod try_node; #[cfg(test)] mod tests; use crate::{ syntax::ast::{constant::Const, node::Node}, BoaProfiler, Context, Result, Value, }; pub trait Executable { fn run(&self, interpreter: &mut Context) -> Result<Value>; } #[derive(Debug, Eq, PartialEq)] pub(crate) enum InterpreterState { Executing, Return, Break(Option<String>), Continue(Option<String>), } #[derive(Debug)] pub struct Interpreter { state: InterpreterState, } impl Default for Interpreter { fn default() -> Self { Self::new() } } impl Interpreter { pub fn new() -> Self { Self { state: InterpreterState::Executing, } } #[inline] pub(crate) fn set_current_state(&mut self, new_state: InterpreterState) { self.state = new_state } #[inline] pub(crate) fn get_current_state(&self) -> &InterpreterState { &self.state } } impl Executable for Node { fn run(&self, interpreter: &mut Context) -> Result<Value> { let _timer = BoaProfiler::global().start_event("Executable", "exec"); match *self { Node::Const(Const::Null) => Ok(Value::null()), Node::Const(Const::Num(num)) => Ok(Value::rational(num)), Node::Const(Const::Int(num)) => Ok(Value::integer(num)), Node::Const(Const::BigInt(ref num)) => Ok(Value::from(num.clone())), Node::Const(Const::Undefined) => Ok(Value::Undefined), Node::Const(Const::String(ref value)) => Ok(Value::string(value.to_string())), Node::Const(Const::Bool(value)) => Ok(Value::boolean(value)), Node::Block(ref block) => block.run(interpreter), Node::Identifier(ref identifier) => identifier.run(interpreter), Node::GetConstField(ref get_const_field_node) => get_const_field_node.run(interpreter), Node::GetField(ref get_field) => get_field.run(interpreter), Node::Call(ref call) => call.run(interpreter), Node::WhileLoop(ref while_loo
}
p) => while_loop.run(interpreter), Node::DoWhileLoop(ref do_while) => do_while.run(interpreter), Node::ForLoop(ref for_loop) => for_loop.run(interpreter), Node::If(ref if_smt) => if_smt.run(interpreter), Node::ConditionalOp(ref op) => op.run(interpreter), Node::Switch(ref switch) => switch.run(interpreter), Node::Object(ref obj) => obj.run(interpreter), Node::ArrayDecl(ref arr) => arr.run(interpreter), Node::FunctionDecl(ref decl) => decl.run(interpreter), Node::FunctionExpr(ref function_expr) => function_expr.run(interpreter), Node::ArrowFunctionDecl(ref decl) => decl.run(interpreter), Node::BinOp(ref op) => op.run(interpreter), Node::UnaryOp(ref op) => op.run(interpreter), Node::New(ref call) => call.run(interpreter), Node::Return(ref ret) => ret.run(interpreter), Node::Throw(ref throw) => throw.run(interpreter), Node::Assign(ref op) => op.run(interpreter), Node::VarDeclList(ref decl) => decl.run(interpreter), Node::LetDeclList(ref decl) => decl.run(interpreter), Node::ConstDeclList(ref decl) => decl.run(interpreter), Node::Spread(ref spread) => spread.run(interpreter), Node::This => { Ok(interpreter.realm().environment.get_this_binding()) } Node::Try(ref try_node) => try_node.run(interpreter), Node::Break(ref break_node) => break_node.run(interpreter), Node::Continue(ref continue_node) => continue_node.run(interpreter), } }
function_block-function_prefixed
[ { "content": "#[inline]\n\npub fn this_time_value(value: &Value, ctx: &mut Context) -> Result<Date> {\n\n if let Value::Object(ref object) = value {\n\n if let ObjectData::Date(ref date) = object.borrow().data {\n\n return Ok(*date);\n\n }\n\n }\n\n Err(ctx.construct_type_error(\"'this' is not a Date\"))\n\n}\n", "file_path": "boa/src/builtins/date/mod.rs", "rank": 0, "score": 357037.48474957317 }, { "content": "/// Create new function `[[Construct]]`\n\n///\n\n// This gets called when a new Function() is created.\n\npub fn make_function(this: &Value, _: &[Value], _: &mut Context) -> Result<Value> {\n\n this.set_data(ObjectData::Function(Function::BuiltIn(\n\n BuiltInFunction(|_, _, _| Ok(Value::undefined())),\n\n FunctionFlags::CALLABLE | FunctionFlags::CONSTRUCTABLE,\n\n )));\n\n Ok(this.clone())\n\n}\n\n\n", "file_path": "boa/src/builtins/function/mod.rs", "rank": 1, "score": 353623.60809848504 }, { "content": "/// This represents the `console` formatter.\n\npub fn formatter(data: &[Value], ctx: &mut Context) -> Result<String> {\n\n let target = data.get(0).cloned().unwrap_or_default().to_string(ctx)?;\n\n\n\n match data.len() {\n\n 0 => Ok(String::new()),\n\n 1 => Ok(target.to_string()),\n\n _ => {\n\n let mut formatted = String::new();\n\n let mut arg_index = 1;\n\n let mut chars = target.chars();\n\n while let Some(c) = chars.next() {\n\n if c == '%' {\n\n let fmt = chars.next().unwrap_or('%');\n\n match fmt {\n\n /* integer */\n\n 'd' | 'i' => {\n\n let arg = data\n\n .get(arg_index)\n\n .cloned()\n\n .unwrap_or_default()\n", "file_path": "boa/src/builtins/console/mod.rs", "rank": 2, "score": 343234.7963965172 }, { "content": "#[inline]\n\npub fn init(interpreter: &mut Context) {\n\n let globals = [\n\n // The `Function` global must be initialized before other types.\n\n function::init,\n\n Object::init,\n\n Array::init,\n\n BigInt::init,\n\n Boolean::init,\n\n Date::init,\n\n Json::init,\n\n Map::init,\n\n Math::init,\n\n Number::init,\n\n RegExp::init,\n\n String::init,\n\n Symbol::init,\n\n Console::init,\n\n // Global error types.\n\n Error::init,\n\n RangeError::init,\n", "file_path": "boa/src/builtins/mod.rs", "rank": 3, "score": 340734.68303004955 }, { "content": "#[inline]\n\npub fn init(interpreter: &mut Context) -> (&'static str, Value) {\n\n let global = interpreter.global_object();\n\n let _timer = BoaProfiler::global().start_event(\"function\", \"init\");\n\n let prototype = Value::new_object(Some(global));\n\n\n\n let function_object =\n\n make_constructor_fn(\"Function\", 1, make_function, global, prototype, true, true);\n\n\n\n (\"Function\", function_object)\n\n}\n", "file_path": "boa/src/builtins/function/mod.rs", "rank": 4, "score": 338863.86140603875 }, { "content": "#[test]\n\nfn default_not_taken_switch() {\n\n let scenario = r#\"\n\n let a = 5;\n\n\n\n switch (a) {\n\n case 5:\n\n a = 150;\n\n break;\n\n default:\n\n a = 70;\n\n }\n\n \n\n a;\n\n \"#;\n\n assert_eq!(&exec(scenario), \"150\");\n\n}\n\n\n", "file_path": "boa/src/exec/switch/tests.rs", "rank": 6, "score": 261862.8602969392 }, { "content": "#[test]\n\nfn default_taken_switch() {\n\n let scenario = r#\"\n\n let a = 10;\n\n\n\n switch (a) {\n\n case 5:\n\n a = 150;\n\n break;\n\n default:\n\n a = 70;\n\n }\n\n \n\n a;\n\n \"#;\n\n assert_eq!(&exec(scenario), \"70\");\n\n}\n\n\n", "file_path": "boa/src/exec/switch/tests.rs", "rank": 7, "score": 261862.8602969392 }, { "content": "#[test]\n\nfn test_result_of_empty_block() {\n\n let scenario = \"{}\";\n\n assert_eq!(&exec(scenario), \"undefined\");\n\n}\n\n\n", "file_path": "boa/src/exec/tests.rs", "rank": 8, "score": 261144.09365715494 }, { "content": "#[test]\n\nfn for_loop_return() {\n\n let scenario = r#\"\n", "file_path": "boa/src/exec/iteration/tests.rs", "rank": 9, "score": 255289.82843878862 }, { "content": "#[test]\n\nfn array_field_set() {\n\n let element_changes = r#\"\n\n let m = [1, 2, 3];\n\n m[1] = 5;\n\n m[1]\n\n \"#;\n\n assert_eq!(&exec(element_changes), \"5\");\n\n\n\n let length_changes = r#\"\n\n let m = [1, 2, 3];\n\n m[10] = 52;\n\n m.length\n\n \"#;\n\n assert_eq!(&exec(length_changes), \"11\");\n\n\n\n let negative_index_wont_affect_length = r#\"\n\n let m = [1, 2, 3];\n\n m[-11] = 5;\n\n m.length\n\n \"#;\n\n assert_eq!(&exec(negative_index_wont_affect_length), \"3\");\n\n\n\n let non_num_key_wont_affect_length = r#\"\n\n let m = [1, 2, 3];\n\n m[\"magic\"] = 5;\n\n m.length\n\n \"#;\n\n assert_eq!(&exec(non_num_key_wont_affect_length), \"3\");\n\n}\n\n\n", "file_path": "boa/src/exec/tests.rs", "rank": 10, "score": 255111.44482839562 }, { "content": "#[test]\n\nfn object_field_set() {\n\n let scenario = r#\"\n\n let m = {};\n\n m['key'] = 22;\n\n m['key']\n\n \"#;\n\n assert_eq!(&exec(scenario), \"22\");\n\n}\n\n\n", "file_path": "boa/src/exec/tests.rs", "rank": 11, "score": 255084.24944580358 }, { "content": "#[test]\n\nfn function_declaration_returns_undefined() {\n\n let scenario = r#\"\n", "file_path": "boa/src/exec/tests.rs", "rank": 12, "score": 249586.98632660974 }, { "content": "#[test]\n\nfn for_loop_continue_out_of_switch() {\n\n let scenario = r#\"\n\n var a = 0, b = 0, c = 0;\n\n for (let i = 0; i < 3; i++) {\n\n a++;\n\n switch (i) {\n\n case 0:\n\n continue;\n\n c++;\n\n case 1:\n\n continue;\n\n case 5:\n\n c++;\n\n }\n\n b++;\n\n }\n\n [a, b, c]\n\n \"#;\n\n assert_eq!(&exec(scenario), \"[ 3, 1, 0 ]\");\n\n}\n\n\n", "file_path": "boa/src/exec/iteration/tests.rs", "rank": 13, "score": 249555.26039999418 }, { "content": "#[test]\n\nfn identifier_on_global_object_undefined() {\n\n let scenario = r#\"\n\n try {\n\n bar;\n\n } catch (err) {\n\n err.message\n\n }\n\n \"#;\n\n\n\n assert_eq!(&exec(scenario), \"\\\"bar is not defined\\\"\");\n\n}\n\n\n", "file_path": "boa/src/exec/tests.rs", "rank": 14, "score": 249404.8915987378 }, { "content": "pub fn new_object_environment(object: Value, environment: Option<Environment>) -> Environment {\n\n Gc::new(GcCell::new(Box::new(ObjectEnvironmentRecord {\n\n bindings: object,\n\n outer_env: environment,\n\n /// Object Environment Records created for with statements (13.11)\n\n /// can provide their binding object as an implicit this value for use in function calls.\n\n /// The capability is controlled by a withEnvironment Boolean value that is associated\n\n /// with each object Environment Record. By default, the value of withEnvironment is false\n\n /// for any object Environment Record.\n\n with_environment: false,\n\n })))\n\n}\n\n\n", "file_path": "boa/src/environment/lexical_environment.rs", "rank": 15, "score": 248762.0844104546 }, { "content": "#[test]\n\nfn assignmentoperator_rhs_throws_error() {\n\n let scenario = r#\"\n\n try {\n\n let a;\n\n a += b\n\n } catch (err) {\n\n err.toString()\n\n }\n\n \"#;\n\n\n\n assert_eq!(&exec(scenario), \"\\\"ReferenceError: b is not defined\\\"\");\n\n}\n", "file_path": "boa/src/exec/operator/tests.rs", "rank": 16, "score": 244218.82026144036 }, { "content": "#[test]\n\nfn check_post_state() {\n\n let mut engine = Context::new();\n\n\n\n let brk: Break = Break::new(\"label\");\n\n\n\n brk.run(&mut engine).unwrap();\n\n\n\n assert_eq!(\n\n engine.executor().get_current_state(),\n\n &InterpreterState::Break(Some(\"label\".to_string()))\n\n );\n\n}\n", "file_path": "boa/src/exec/break_node/tests.rs", "rank": 17, "score": 244075.88723480213 }, { "content": "/// Arguments.\n\n///\n\n/// <https://tc39.es/ecma262/#sec-createunmappedargumentsobject>\n\npub fn create_unmapped_arguments_object(arguments_list: &[Value]) -> Value {\n\n let len = arguments_list.len();\n\n let mut obj = Object::default();\n\n // Set length\n\n let length = Property::data_descriptor(\n\n len.into(),\n\n Attribute::WRITABLE | Attribute::NON_ENUMERABLE | Attribute::PERMANENT,\n\n );\n\n // Define length as a property\n\n obj.define_own_property(\"length\", length);\n\n let mut index: usize = 0;\n\n while index < len {\n\n let val = arguments_list.get(index).expect(\"Could not get argument\");\n\n let prop = Property::data_descriptor(\n\n val.clone(),\n\n Attribute::WRITABLE | Attribute::ENUMERABLE | Attribute::CONFIGURABLE,\n\n );\n\n\n\n obj.insert_property(index, prop);\n\n index += 1;\n\n }\n\n\n\n Value::from(obj)\n\n}\n\n\n", "file_path": "boa/src/builtins/function/mod.rs", "rank": 18, "score": 243325.69797778988 }, { "content": "/// Utility to join multiple Nodes into a single string.\n\nfn join_nodes<N>(f: &mut fmt::Formatter<'_>, nodes: &[N]) -> fmt::Result\n\nwhere\n\n N: Display,\n\n{\n\n let mut first = true;\n\n for e in nodes {\n\n if !first {\n\n f.write_str(\", \")?;\n\n }\n\n first = false;\n\n Display::fmt(e, f)?;\n\n }\n\n Ok(())\n\n}\n\n\n\n/// \"Formal parameter\" is a fancy way of saying \"function parameter\".\n\n///\n\n/// In the declaration of a function, the parameters must be identifiers,\n\n/// not any value like numbers, strings, or objects.\n\n///```text\n", "file_path": "boa/src/syntax/ast/node/mod.rs", "rank": 19, "score": 241283.65533507115 }, { "content": "fn assert_throws(engine: &mut Context, src: &str, error_type: &str) {\n\n let result = forward(engine, src);\n\n assert!(result.contains(error_type));\n\n}\n", "file_path": "boa/src/builtins/bigint/tests.rs", "rank": 20, "score": 234239.30652085572 }, { "content": "/// This trait allows Rust types to be passed around as objects.\n\n///\n\n/// This is automatically implemented, when a type implements `Debug`, `Any` and `Trace`.\n\npub trait NativeObject: Debug + Any + Trace {\n\n /// Convert the Rust type which implements `NativeObject` to a `&dyn Any`.\n\n fn as_any(&self) -> &dyn Any;\n\n\n\n /// Convert the Rust type which implements `NativeObject` to a `&mut dyn Any`.\n\n fn as_mut_any(&mut self) -> &mut dyn Any;\n\n}\n\n\n\nimpl<T: Any + Debug + Trace> NativeObject for T {\n\n fn as_any(&self) -> &dyn Any {\n\n self as &dyn Any\n\n }\n\n\n\n fn as_mut_any(&mut self) -> &mut dyn Any {\n\n self as &mut dyn Any\n\n }\n\n}\n\n\n\n/// The internal representation of an JavaScript object.\n\n#[derive(Debug, Trace, Finalize)]\n", "file_path": "boa/src/object/mod.rs", "rank": 21, "score": 228419.22638083706 }, { "content": "fn for_loop_execution(c: &mut Criterion) {\n\n // Create new Realm and interpreter.\n\n let mut engine = Context::new();\n\n\n\n // Parse the AST nodes.\n\n let nodes = Parser::new(FOR_LOOP.as_bytes()).parse_all().unwrap();\n\n\n\n // Execute the parsed nodes, passing them through a black box, to avoid over-optimizing by the compiler\n\n c.bench_function(\"For loop (Execution)\", move |b| {\n\n b.iter(|| black_box(&nodes).run(&mut engine).unwrap())\n\n });\n\n}\n\n\n\nstatic FIBONACCI: &str = include_str!(\"bench_scripts/fibonacci.js\");\n\n\n", "file_path": "boa/benches/exec.rs", "rank": 22, "score": 225214.32752933505 }, { "content": "fn arithmetic_operations(c: &mut Criterion) {\n\n let mut engine = Context::new();\n\n\n\n let nodes = Parser::new(ARITHMETIC_OPERATIONS.as_bytes())\n\n .parse_all()\n\n .unwrap();\n\n\n\n c.bench_function(\"Arithmetic operations (Execution)\", move |b| {\n\n b.iter(|| black_box(&nodes).run(&mut engine).unwrap())\n\n });\n\n}\n\n\n\nstatic CLEAN_JS: &str = include_str!(\"bench_scripts/clean_js.js\");\n\n\n", "file_path": "boa/benches/exec.rs", "rank": 23, "score": 225029.06655030104 }, { "content": "fn array_pop(c: &mut Criterion) {\n\n let mut engine = Context::new();\n\n\n\n let nodes = Parser::new(ARRAY_POP.as_bytes()).parse_all().unwrap();\n\n\n\n c.bench_function(\"Array pop (Execution)\", move |b| {\n\n b.iter(|| black_box(&nodes).run(&mut engine).unwrap())\n\n });\n\n}\n\n\n\nstatic STRING_CONCAT: &str = include_str!(\"bench_scripts/string_concat.js\");\n\n\n", "file_path": "boa/benches/exec.rs", "rank": 24, "score": 224808.754898185 }, { "content": "fn array_creation(c: &mut Criterion) {\n\n let mut engine = Context::new();\n\n\n\n let nodes = Parser::new(ARRAY_CREATE.as_bytes()).parse_all().unwrap();\n\n\n\n c.bench_function(\"Array creation (Execution)\", move |b| {\n\n b.iter(|| black_box(&nodes).run(&mut engine).unwrap())\n\n });\n\n}\n\n\n\nstatic ARRAY_POP: &str = include_str!(\"bench_scripts/array_pop.js\");\n\n\n", "file_path": "boa/benches/exec.rs", "rank": 25, "score": 224808.754898185 }, { "content": "fn array_access(c: &mut Criterion) {\n\n let mut engine = Context::new();\n\n\n\n let nodes = Parser::new(ARRAY_ACCESS.as_bytes()).parse_all().unwrap();\n\n\n\n c.bench_function(\"Array access (Execution)\", move |b| {\n\n b.iter(|| black_box(&nodes).run(&mut engine).unwrap())\n\n });\n\n}\n\n\n\nstatic ARRAY_CREATE: &str = include_str!(\"bench_scripts/array_create.js\");\n\n\n", "file_path": "boa/benches/exec.rs", "rank": 26, "score": 224808.754898185 }, { "content": "fn object_creation(c: &mut Criterion) {\n\n // Create new Realm and interpreter.\n\n let mut engine = Context::new();\n\n\n\n // Parse the AST nodes.\n\n let nodes = Parser::new(OBJECT_CREATION.as_bytes()).parse_all().unwrap();\n\n\n\n // Execute the parsed nodes, passing them through a black box, to avoid over-optimizing by the compiler\n\n c.bench_function(\"Object Creation (Execution)\", move |b| {\n\n b.iter(|| black_box(&nodes).run(&mut engine).unwrap())\n\n });\n\n}\n\n\n\nstatic OBJECT_PROP_ACCESS_CONST: &str = include_str!(\"bench_scripts/object_prop_access_const.js\");\n\n\n", "file_path": "boa/benches/exec.rs", "rank": 27, "score": 224781.55951559293 }, { "content": "#[test]\n\nfn string_switch() {\n\n let scenario = r#\"\n\n let a = \"hello\";\n\n\n\n switch (a) {\n\n case \"hello\":\n\n a = \"world\";\n\n break;\n\n default:\n\n a = \"hi\";\n\n }\n\n \n\n a;\n\n \"#;\n\n assert_eq!(&exec(scenario), \"\\\"world\\\"\");\n\n}\n\n\n", "file_path": "boa/src/exec/switch/tests.rs", "rank": 28, "score": 220918.1755513723 }, { "content": "#[test]\n\nfn no_cases_switch() {\n\n let scenario = r#\"\n\n let a = 10;\n\n switch (a) {\n\n }\n\n \n\n a;\n\n \"#;\n\n assert_eq!(&exec(scenario), \"10\");\n\n}\n\n\n", "file_path": "boa/src/exec/switch/tests.rs", "rank": 29, "score": 220918.17555137232 }, { "content": "#[test]\n\nfn test_conditional_op() {\n\n let scenario = \"1 === 2 ? 'a' : 'b'\";\n\n assert_eq!(&exec(scenario), \"\\\"b\\\"\");\n\n}\n", "file_path": "boa/src/exec/tests.rs", "rank": 30, "score": 220424.67482444123 }, { "content": "fn string_object_access(c: &mut Criterion) {\n\n let mut engine = Context::new();\n\n\n\n let nodes = Parser::new(STRING_OBJECT_ACCESS.as_bytes())\n\n .parse_all()\n\n .unwrap();\n\n\n\n c.bench_function(\"String Object Access (Execution)\", move |b| {\n\n b.iter(|| black_box(&nodes).run(&mut engine).unwrap())\n\n });\n\n}\n\n\n\nstatic ARITHMETIC_OPERATIONS: &str = include_str!(\"bench_scripts/arithmetic_operations.js\");\n\n\n", "file_path": "boa/benches/exec.rs", "rank": 31, "score": 220201.5514132485 }, { "content": "fn number_object_access(c: &mut Criterion) {\n\n let mut engine = Context::new();\n\n\n\n let nodes = Parser::new(NUMBER_OBJECT_ACCESS.as_bytes())\n\n .parse_all()\n\n .unwrap();\n\n\n\n c.bench_function(\"Number Object Access (Execution)\", move |b| {\n\n b.iter(|| black_box(&nodes).run(&mut engine).unwrap())\n\n });\n\n}\n\n\n\nstatic BOOLEAN_OBJECT_ACCESS: &str = include_str!(\"bench_scripts/boolean_object_access.js\");\n\n\n", "file_path": "boa/benches/exec.rs", "rank": 32, "score": 220201.5514132485 }, { "content": "fn boolean_object_access(c: &mut Criterion) {\n\n let mut engine = Context::new();\n\n\n\n let nodes = Parser::new(BOOLEAN_OBJECT_ACCESS.as_bytes())\n\n .parse_all()\n\n .unwrap();\n\n\n\n c.bench_function(\"Boolean Object Access (Execution)\", move |b| {\n\n b.iter(|| black_box(&nodes).run(&mut engine).unwrap())\n\n });\n\n}\n\n\n\nstatic STRING_OBJECT_ACCESS: &str = include_str!(\"bench_scripts/string_object_access.js\");\n\n\n", "file_path": "boa/benches/exec.rs", "rank": 33, "score": 220201.5514132485 }, { "content": "#[test]\n\nfn is_object() {\n\n let val = Value::new_object(None);\n\n assert_eq!(val.is_object(), true);\n\n}\n\n\n", "file_path": "boa/src/value/tests.rs", "rank": 34, "score": 218508.02016138643 }, { "content": "#[test]\n\nfn to_object() {\n\n let mut engine = Context::new();\n\n\n\n assert!(Value::undefined()\n\n .to_object(&mut engine)\n\n .unwrap_err()\n\n .is_object());\n\n assert!(Value::null()\n\n .to_object(&mut engine)\n\n .unwrap_err()\n\n .is_object());\n\n}\n\n\n", "file_path": "boa/src/exec/tests.rs", "rank": 35, "score": 218481.3150389026 }, { "content": "#[test]\n\nfn two_case_switch() {\n\n let scenario = r#\"\n\n let a = 10;\n\n switch (a) {\n\n case 5:\n\n a = 15;\n\n break;\n\n case 10:\n\n a = 20;\n\n break;\n\n }\n\n \n\n a;\n\n \"#;\n\n assert_eq!(&exec(scenario), \"20\");\n\n}\n\n\n", "file_path": "boa/src/exec/switch/tests.rs", "rank": 36, "score": 216619.615200567 }, { "content": "#[test]\n\nfn no_true_case_switch() {\n\n let scenario = r#\"\n\n let a = 10;\n\n switch (a) {\n\n case 5:\n\n a = 15;\n\n break;\n\n }\n\n \n\n a;\n\n \"#;\n\n assert_eq!(&exec(scenario), \"10\");\n\n}\n\n\n", "file_path": "boa/src/exec/switch/tests.rs", "rank": 37, "score": 216619.615200567 }, { "content": "#[test]\n\nfn single_case_switch() {\n\n let scenario = r#\"\n\n let a = 10;\n\n switch (a) {\n\n case 10:\n\n a = 20;\n\n break;\n\n }\n\n \n\n a;\n\n \"#;\n\n assert_eq!(&exec(scenario), \"20\");\n\n}\n\n\n", "file_path": "boa/src/exec/switch/tests.rs", "rank": 38, "score": 216619.615200567 }, { "content": "#[test]\n\nfn bigger_switch_example() {\n\n let expected = [\n\n \"\\\"Mon\\\"\",\n\n \"\\\"Tue\\\"\",\n\n \"\\\"Wed\\\"\",\n\n \"\\\"Thurs\\\"\",\n\n \"\\\"Fri\\\"\",\n\n \"\\\"Sat\\\"\",\n\n \"\\\"Sun\\\"\",\n\n ];\n\n\n\n for (i, val) in expected.iter().enumerate() {\n\n let scenario = format!(\n\n r#\"\n\n let a = {};\n\n let b = \"unknown\";\n\n\n\n switch (a) {{\n\n case 0:\n\n b = \"Mon\";\n", "file_path": "boa/src/exec/switch/tests.rs", "rank": 39, "score": 216619.615200567 }, { "content": "fn object_prop_access_const(c: &mut Criterion) {\n\n // Create new Realm and interpreter.\n\n let mut engine = Context::new();\n\n\n\n // Parse the AST nodes.\n\n let nodes = Parser::new(OBJECT_PROP_ACCESS_CONST.as_bytes())\n\n .parse_all()\n\n .unwrap();\n\n\n\n // Execute the parsed nodes, passing them through a black box, to avoid over-optimizing by the compiler\n\n c.bench_function(\"Static Object Property Access (Execution)\", move |b| {\n\n b.iter(|| black_box(&nodes).run(&mut engine).unwrap())\n\n });\n\n}\n\n\n\nstatic OBJECT_PROP_ACCESS_DYN: &str = include_str!(\"bench_scripts/object_prop_access_dyn.js\");\n\n\n", "file_path": "boa/benches/exec.rs", "rank": 40, "score": 215845.52491736907 }, { "content": "fn object_prop_access_dyn(c: &mut Criterion) {\n\n // Create new Realm and interpreter.\n\n let mut engine = Context::new();\n\n\n\n // Parse the AST nodes.\n\n let nodes = Parser::new(OBJECT_PROP_ACCESS_DYN.as_bytes())\n\n .parse_all()\n\n .unwrap();\n\n\n\n // Execute the parsed nodes, passing them through a black box, to avoid over-optimizing by the compiler\n\n c.bench_function(\"Dynamic Object Property Access (Execution)\", move |b| {\n\n b.iter(|| black_box(&nodes).run(&mut engine).unwrap())\n\n });\n\n}\n\n\n\nstatic REGEXP_LITERAL_CREATION: &str = include_str!(\"bench_scripts/regexp_literal_creation.js\");\n\n\n", "file_path": "boa/benches/exec.rs", "rank": 41, "score": 215845.52491736907 }, { "content": "#[test]\n\nfn spread_with_arguments() {\n\n let mut engine = Context::new();\n\n\n\n let scenario = r#\"\n\n const a = [1, \"test\", 3, 4];\n", "file_path": "boa/src/exec/tests.rs", "rank": 42, "score": 213885.83966416545 }, { "content": "#[test]\n\nfn early_return() {\n\n let early_return = r#\"\n", "file_path": "boa/src/exec/tests.rs", "rank": 43, "score": 213838.15952538123 }, { "content": "#[test]\n\nfn comma_operator() {\n\n let scenario = r#\"\n\n var a, b;\n\n b = 10;\n\n a = (b++, b);\n\n a\n\n \"#;\n\n assert_eq!(&exec(scenario), \"11\");\n\n\n\n let scenario = r#\"\n\n var a, b;\n\n b = 10;\n\n a = (b += 5, b /= 3, b - 3);\n\n a\n\n \"#;\n\n assert_eq!(&exec(scenario), \"2\");\n\n}\n\n\n", "file_path": "boa/src/exec/tests.rs", "rank": 44, "score": 213712.60883103716 }, { "content": "#[test]\n\nfn tilde_operator() {\n\n let float = r#\"\n\n let f = -1.2;\n\n ~f\n\n \"#;\n\n assert_eq!(&exec(float), \"0\");\n\n\n\n let numeric = r#\"\n\n let f = 1789;\n\n ~f\n\n \"#;\n\n assert_eq!(&exec(numeric), \"-1790\");\n\n\n\n let nan = r#\"\n\n var m = NaN;\n\n ~m\n\n \"#;\n\n assert_eq!(&exec(nan), \"-1\");\n\n\n\n let object = r#\"\n", "file_path": "boa/src/exec/tests.rs", "rank": 45, "score": 213712.60883103716 }, { "content": "#[test]\n\n#[ignore] // TODO: Once objects are printed in a simpler way this test can be simplified and used\n\nfn display_object() {\n\n let mut engine = Context::new();\n\n let d_obj = r#\"\n\n let o = {a: 'a'};\n\n o\n\n \"#;\n\n let value = forward_val(&mut engine, d_obj).unwrap();\n\n assert_eq!(\n\n value.display().to_string(),\n\n r#\"{\n\n a: \"a\",\n\n__proto__: {\n\nconstructor: {\n\nsetPrototypeOf: {\n\n length: 2\n\n },\n\n prototype: [Cycle],\n\n name: \"Object\",\n\n length: 1,\n\ndefineProperty: {\n", "file_path": "boa/src/value/tests.rs", "rank": 46, "score": 213495.30302114322 }, { "content": "#[test]\n\nfn debug_object() {\n\n let mut engine = Context::new();\n\n let value = forward_val(&mut engine, \"new Array([new Date()])\").unwrap();\n\n\n\n // We don't care about the contents of the debug display (it is *debug* after all). In the commit that this test was\n\n // added, this would cause a stack overflow, so executing Debug::fmt is the assertion.\n\n //\n\n // However, we want to make sure that no data is being left in the internal hashset, so executing this twice should\n\n // result in the same output.\n\n assert_eq!(format!(\"{:?}\", value), format!(\"{:?}\", value));\n\n}\n\n\n", "file_path": "boa/src/value/tests.rs", "rank": 47, "score": 213484.57106078436 }, { "content": "#[test]\n\nfn hash_object() {\n\n let object1 = Value::object(Object::default());\n\n assert_eq!(object1, object1);\n\n assert_eq!(object1, object1.clone());\n\n\n\n let object2 = Value::object(Object::default());\n\n assert_ne!(object1, object2);\n\n\n\n assert_eq!(hash_value(&object1), hash_value(&object1.clone()));\n\n assert_ne!(hash_value(&object1), hash_value(&object2));\n\n}\n\n\n", "file_path": "boa/src/value/tests.rs", "rank": 48, "score": 213484.57106078434 }, { "content": "#[test]\n\nfn typeof_object() {\n\n let typeof_object = r#\"\n\n let a = {};\n\n typeof a;\n\n \"#;\n\n assert_eq!(&exec(typeof_object), \"\\\"object\\\"\");\n\n}\n\n\n", "file_path": "boa/src/exec/tests.rs", "rank": 49, "score": 213458.57062797187 }, { "content": "#[test]\n\nfn let_declaration() {\n\n check_parser(\n\n \"let a = 5;\",\n\n vec![LetDeclList::from(vec![LetDecl::new(\"a\", Node::from(Const::from(5)))]).into()],\n\n );\n\n}\n\n\n\n/// Checks `let` declaration parsing with reserved words.\n", "file_path": "boa/src/syntax/parser/statement/declaration/tests.rs", "rank": 50, "score": 212938.41448748918 }, { "content": "#[test]\n\nfn two_case_no_break_switch() {\n\n let scenario = r#\"\n\n let a = 10;\n\n let b = 10;\n\n\n\n switch (a) {\n\n case 10:\n\n a = 150;\n\n case 20:\n\n b = 150;\n\n break;\n\n }\n\n \n\n a + b;\n\n \"#;\n\n assert_eq!(&exec(scenario), \"300\");\n\n}\n\n\n", "file_path": "boa/src/exec/switch/tests.rs", "rank": 51, "score": 212514.9819385669 }, { "content": "#[test]\n\nfn sub_number_object_and_number_object() {\n\n let mut engine = Context::new();\n\n\n\n let value = forward_val(&mut engine, \"new Number(1) - new Number(999)\").unwrap();\n\n let value = value.to_i32(&mut engine).unwrap();\n\n assert_eq!(value, -998);\n\n}\n\n\n", "file_path": "boa/src/value/tests.rs", "rank": 52, "score": 212140.0070023422 }, { "content": "#[test]\n\nfn add_number_object_and_string_object() {\n\n let mut engine = Context::new();\n\n\n\n let value = forward_val(&mut engine, \"new Number(10) + new String(\\\"0\\\")\").unwrap();\n\n let value = value.to_string(&mut engine).unwrap();\n\n assert_eq!(value, \"100\");\n\n}\n\n\n", "file_path": "boa/src/value/tests.rs", "rank": 53, "score": 212140.0070023422 }, { "content": "#[wasm_bindgen]\n\npub fn evaluate(src: &str) -> Result<String, JsValue> {\n\n // Setup executor\n\n let mut engine = Context::new();\n\n\n\n let expr = match parse(src) {\n\n Ok(res) => res,\n\n Err(e) => {\n\n return Err(format!(\n\n \"Uncaught {}\",\n\n engine\n\n .throw_syntax_error(e.to_string())\n\n .expect_err(\"interpreter.throw_syntax_error() did not return an error\")\n\n .display()\n\n )\n\n .into());\n\n }\n\n };\n\n expr.run(&mut engine)\n\n .map_err(|e| JsValue::from(format!(\"Uncaught {}\", e.display())))\n\n .map(|v| v.display().to_string())\n\n}\n", "file_path": "boa_wasm/src/lib.rs", "rank": 54, "score": 211406.35155318014 }, { "content": "#[test]\n\nfn get_set_field() {\n\n let obj = Value::new_object(None);\n\n // Create string and convert it to a Value\n\n let s = Value::from(\"bar\");\n\n obj.set_field(\"foo\", s);\n\n assert_eq!(obj.get_field(\"foo\").display().to_string(), \"\\\"bar\\\"\");\n\n}\n\n\n", "file_path": "boa/src/value/tests.rs", "rank": 55, "score": 209126.42456356777 }, { "content": "#[test]\n\nfn let_declaration_no_spaces() {\n\n check_parser(\n\n \"let a=5;\",\n\n vec![LetDeclList::from(vec![LetDecl::new(\"a\", Node::from(Const::from(5)))]).into()],\n\n );\n\n}\n\n\n\n/// Checks empty `let` declaration parsing.\n", "file_path": "boa/src/syntax/parser/statement/declaration/tests.rs", "rank": 56, "score": 209003.47694130227 }, { "content": "#[test]\n\nfn empty_let_declaration() {\n\n check_parser(\n\n \"let a;\",\n\n vec![LetDeclList::from(vec![LetDecl::new(\"a\", None)]).into()],\n\n );\n\n}\n\n\n\n/// Checks multiple `let` declarations.\n", "file_path": "boa/src/syntax/parser/statement/declaration/tests.rs", "rank": 57, "score": 209003.47694130227 }, { "content": "#[test]\n\nfn multiple_let_declaration() {\n\n check_parser(\n\n \"let a = 5, b, c = 6;\",\n\n vec![LetDeclList::from(vec![\n\n LetDecl::new(\"a\", Node::from(Const::from(5))),\n\n LetDecl::new(\"b\", None),\n\n LetDecl::new(\"c\", Node::from(Const::from(6))),\n\n ])\n\n .into()],\n\n );\n\n}\n\n\n\n/// Checks `const` declaration parsing.\n", "file_path": "boa/src/syntax/parser/statement/declaration/tests.rs", "rank": 58, "score": 209003.47694130227 }, { "content": "#[test]\n\nfn let_declaration_keywords() {\n\n check_parser(\n\n \"let yield = 5;\",\n\n vec![LetDeclList::from(vec![LetDecl::new(\"yield\", Node::from(Const::from(5)))]).into()],\n\n );\n\n\n\n check_parser(\n\n \"let await = 5;\",\n\n vec![LetDeclList::from(vec![LetDecl::new(\"await\", Node::from(Const::from(5)))]).into()],\n\n );\n\n}\n\n\n\n/// Checks `let` declaration parsing with no spaces.\n", "file_path": "boa/src/syntax/parser/statement/declaration/tests.rs", "rank": 59, "score": 209003.47694130227 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nstruct CaseBlock {\n\n allow_yield: AllowYield,\n\n allow_await: AllowAwait,\n\n allow_return: AllowReturn,\n\n}\n\n\n\nimpl CaseBlock {\n\n /// Creates a new `CaseBlock` parser.\n\n fn new<Y, A, R>(allow_yield: Y, allow_await: A, allow_return: R) -> Self\n\n where\n\n Y: Into<AllowYield>,\n\n A: Into<AllowAwait>,\n\n R: Into<AllowReturn>,\n\n {\n\n Self {\n\n allow_yield: allow_yield.into(),\n\n allow_await: allow_await.into(),\n\n allow_return: allow_return.into(),\n\n }\n\n }\n", "file_path": "boa/src/syntax/parser/statement/switch/mod.rs", "rank": 60, "score": 208992.26112287797 }, { "content": "#[test]\n\nfn assign_operator_precedence() {\n\n let src = r#\"\n\n let a = 1;\n\n a = a + 1;\n\n a\n\n \"#;\n\n assert_eq!(&exec(src), \"2\");\n\n}\n\n\n", "file_path": "boa/src/exec/tests.rs", "rank": 61, "score": 208941.5966858877 }, { "content": "#[test]\n\nfn do_while_loop_continue() {\n\n let scenario = r#\"\n\n var i = 0, a = 0, b = 0;\n\n do {\n\n i++;\n\n if (i < 2) {\n\n a++;\n\n continue;\n\n }\n\n b++;\n\n } while (i < 3)\n\n [a, b]\n\n \"#;\n\n assert_eq!(&exec(scenario), \"[ 1, 2 ]\");\n\n}\n", "file_path": "boa/src/exec/iteration/tests.rs", "rank": 62, "score": 208936.8416823892 }, { "content": "#[test]\n\nfn while_loop_continue() {\n\n let scenario = r#\"\n\n var i = 0, a = 0, b = 0;\n\n while (i < 3) {\n\n i++;\n\n if (i < 2) {\n\n a++;\n\n continue;\n\n }\n\n b++;\n\n }\n\n [a, b]\n\n \"#;\n\n assert_eq!(&exec(scenario), \"[ 1, 2 ]\");\n\n}\n\n\n", "file_path": "boa/src/exec/iteration/tests.rs", "rank": 63, "score": 208936.8416823892 }, { "content": "#[test]\n\nfn for_loop_break() {\n\n let scenario = r#\"\n\n let a = 1;\n\n for (; a < 5; a++) {\n\n if (a == 3) {\n\n break;\n\n }\n\n }\n\n a;\n\n \"#;\n\n\n\n assert_eq!(&exec(scenario), \"3\");\n\n}\n\n\n", "file_path": "boa/src/exec/iteration/tests.rs", "rank": 64, "score": 208936.8416823892 }, { "content": "#[test]\n\nfn finally() {\n\n let scenario = r#\"\n\n let a = 10;\n\n try {\n\n a = 20;\n\n } finally {\n\n a = 30;\n\n }\n\n\n\n a;\n\n \"#;\n\n assert_eq!(&exec(scenario), \"30\");\n\n}\n\n\n", "file_path": "boa/src/exec/try_node/tests.rs", "rank": 65, "score": 208760.62008816598 }, { "content": "#[test]\n\nfn catch() {\n\n let scenario = r#\"\n\n let a = 10;\n\n try {\n\n throw \"error\";\n\n } catch {\n\n a = 20;\n\n }\n\n\n\n a;\n\n \"#;\n\n assert_eq!(&exec(scenario), \"20\");\n\n}\n\n\n", "file_path": "boa/src/exec/try_node/tests.rs", "rank": 66, "score": 208760.62008816595 }, { "content": "#[test]\n\nfn display_array_string() {\n\n let mut engine = Context::new();\n\n\n\n let value = forward_val(&mut engine, \"[\\\"Hello\\\"]\").unwrap();\n\n assert_eq!(value.display().to_string(), \"[ \\\"Hello\\\" ]\");\n\n}\n\n\n", "file_path": "boa/src/value/tests.rs", "rank": 67, "score": 208746.61701120218 }, { "content": "#[test]\n\nfn includes_value() {\n\n let mut engine = Context::new();\n\n let init = r#\"\n\n var empty = [ ];\n\n var one = [\"a\"];\n\n var many = [\"a\", \"b\", \"c\"];\n\n var duplicates = [\"a\", \"b\", \"c\", \"a\", \"b\"];\n\n var undefined = [undefined];\n\n \"#;\n\n eprintln!(\"{}\", forward(&mut engine, init));\n\n\n\n // Empty\n\n let empty = forward(&mut engine, \"empty.includes('a')\");\n\n assert_eq!(empty, String::from(\"false\"));\n\n\n\n // One\n\n let one = forward(&mut engine, \"one.includes('a')\");\n\n assert_eq!(one, String::from(\"true\"));\n\n // Missing from one\n\n let missing_from_one = forward(&mut engine, \"one.includes('b')\");\n", "file_path": "boa/src/builtins/array/tests.rs", "rank": 68, "score": 208746.61701120218 }, { "content": "#[test]\n\nfn for_each_push_value() {\n\n let mut engine = Context::new();\n\n let init = r#\"\n\n var a = [1, 2, 3, 4];\n", "file_path": "boa/src/builtins/array/tests.rs", "rank": 69, "score": 208746.61701120218 }, { "content": "#[test]\n\nfn assign_to_array_decl() {\n\n let mut engine = Context::new();\n\n\n\n assert!(forward(&mut engine, \"[1] = [2]\").starts_with(\"Uncaught \\\"SyntaxError\\\": \"));\n\n assert!(forward(&mut engine, \"[3, 5] = [7, 8]\").starts_with(\"Uncaught \\\"SyntaxError\\\": \"));\n\n assert!(forward(&mut engine, \"[6, 8] = [2]\").starts_with(\"Uncaught \\\"SyntaxError\\\": \"));\n\n assert!(forward(&mut engine, \"[6] = [2, 9]\").starts_with(\"Uncaught \\\"SyntaxError\\\": \"));\n\n}\n\n\n", "file_path": "boa/src/exec/tests.rs", "rank": 70, "score": 208721.28503377165 }, { "content": "#[test]\n\nfn array_rest_with_arguments() {\n\n let mut engine = Context::new();\n\n\n\n let scenario = r#\"\n\n var b = [4, 5, 6]\n\n var a = [1, 2, 3, ...b];\n\n \"#;\n\n forward(&mut engine, scenario);\n\n let one = forward(&mut engine, \"a\");\n\n assert_eq!(one, String::from(\"[ 1, 2, 3, 4, 5, 6 ]\"));\n\n}\n\n\n", "file_path": "boa/src/exec/tests.rs", "rank": 71, "score": 208721.28503377165 }, { "content": "#[test]\n\nfn array_pop_benchmark() {\n\n let mut engine = Context::new();\n\n let init = r#\"\n\n (function(){\n\n let testArray = [83, 93, 27, 29, 2828, 234, 23, 56, 32, 56, 67, 77, 32,\n\n 45, 93, 17, 28, 83, 62, 99, 36, 28, 93, 27, 29, 2828,\n\n 234, 23, 56, 32, 56, 67, 77, 32, 45, 93, 17, 28, 83, 62,\n\n 99, 36, 28, 93, 27, 29, 2828, 234, 23, 56, 32, 56, 67,\n\n 77, 32, 45, 93, 17, 28, 83, 62, 99, 36, 28, 93, 27, 29,\n\n 2828, 234, 23, 56, 32, 56, 67, 77, 32, 45, 93, 17, 28,\n\n 83, 62, 99, 36, 28, 93, 27, 29, 2828, 234, 23, 56, 32,\n\n 56, 67, 77, 32, 45, 93, 17, 28, 83, 62, 99, 36, 28, 93,\n\n 27, 29, 2828, 234, 23, 56, 32, 56, 67, 77, 32, 45, 93,\n\n 17, 28, 83, 62, 99, 36, 28, 93, 27, 29, 2828, 234, 23,\n\n 56, 32, 56, 67, 77, 32, 45, 93, 17, 28, 83, 62, 99, 36,\n\n 28, 93, 27, 29, 2828, 234, 23, 56, 32, 56, 67, 77, 32,\n\n 45, 93, 17, 28, 83, 62, 99, 36, 28, 93, 27, 29, 2828, 234,\n\n 23, 56, 32, 56, 67, 77, 32, 45, 93, 17, 28, 83, 62, 99,\n\n 36, 28, 93, 27, 29, 2828, 234, 23, 56, 32, 56, 67, 77, 32,\n\n 45, 93, 17, 28, 83, 62, 99, 36, 28];\n", "file_path": "boa/src/exec/tests.rs", "rank": 72, "score": 208721.28503377165 }, { "content": "#[test]\n\nfn array_creation_benchmark() {\n\n let mut engine = Context::new();\n\n let init = r#\"\n\n (function(){\n\n let testArr = [];\n\n for (let a = 0; a <= 500; a++) {\n\n testArr[a] = ('p' + a);\n\n }\n\n\n\n return testArr;\n\n })();\n\n \"#;\n\n\n\n assert_eq!(forward(&mut engine, init), \"[ \\\"p0\\\", \\\"p1\\\", \\\"p2\\\", \\\"p3\\\", \\\"p4\\\", \\\"p5\\\", \\\"p6\\\", \\\"p7\\\", \\\"p8\\\", \\\"p9\\\", \\\"p10\\\", \\\"p11\\\", \\\"p12\\\", \\\"p13\\\", \\\"p14\\\", \\\"p15\\\", \\\"p16\\\", \\\"p17\\\", \\\"p18\\\", \\\"p19\\\", \\\"p20\\\", \\\"p21\\\", \\\"p22\\\", \\\"p23\\\", \\\"p24\\\", \\\"p25\\\", \\\"p26\\\", \\\"p27\\\", \\\"p28\\\", \\\"p29\\\", \\\"p30\\\", \\\"p31\\\", \\\"p32\\\", \\\"p33\\\", \\\"p34\\\", \\\"p35\\\", \\\"p36\\\", \\\"p37\\\", \\\"p38\\\", \\\"p39\\\", \\\"p40\\\", \\\"p41\\\", \\\"p42\\\", \\\"p43\\\", \\\"p44\\\", \\\"p45\\\", \\\"p46\\\", \\\"p47\\\", \\\"p48\\\", \\\"p49\\\", \\\"p50\\\", \\\"p51\\\", \\\"p52\\\", \\\"p53\\\", \\\"p54\\\", \\\"p55\\\", \\\"p56\\\", \\\"p57\\\", \\\"p58\\\", \\\"p59\\\", \\\"p60\\\", \\\"p61\\\", \\\"p62\\\", \\\"p63\\\", \\\"p64\\\", \\\"p65\\\", \\\"p66\\\", \\\"p67\\\", \\\"p68\\\", \\\"p69\\\", \\\"p70\\\", \\\"p71\\\", \\\"p72\\\", \\\"p73\\\", \\\"p74\\\", \\\"p75\\\", \\\"p76\\\", \\\"p77\\\", \\\"p78\\\", \\\"p79\\\", \\\"p80\\\", \\\"p81\\\", \\\"p82\\\", \\\"p83\\\", \\\"p84\\\", \\\"p85\\\", \\\"p86\\\", \\\"p87\\\", \\\"p88\\\", \\\"p89\\\", \\\"p90\\\", \\\"p91\\\", \\\"p92\\\", \\\"p93\\\", \\\"p94\\\", \\\"p95\\\", \\\"p96\\\", \\\"p97\\\", \\\"p98\\\", \\\"p99\\\", \\\"p100\\\", \\\"p101\\\", \\\"p102\\\", \\\"p103\\\", \\\"p104\\\", \\\"p105\\\", \\\"p106\\\", \\\"p107\\\", \\\"p108\\\", \\\"p109\\\", \\\"p110\\\", \\\"p111\\\", \\\"p112\\\", \\\"p113\\\", \\\"p114\\\", \\\"p115\\\", \\\"p116\\\", \\\"p117\\\", \\\"p118\\\", \\\"p119\\\", \\\"p120\\\", \\\"p121\\\", \\\"p122\\\", \\\"p123\\\", \\\"p124\\\", \\\"p125\\\", \\\"p126\\\", \\\"p127\\\", \\\"p128\\\", \\\"p129\\\", \\\"p130\\\", \\\"p131\\\", \\\"p132\\\", \\\"p133\\\", \\\"p134\\\", \\\"p135\\\", \\\"p136\\\", \\\"p137\\\", \\\"p138\\\", \\\"p139\\\", \\\"p140\\\", \\\"p141\\\", \\\"p142\\\", \\\"p143\\\", \\\"p144\\\", \\\"p145\\\", \\\"p146\\\", \\\"p147\\\", \\\"p148\\\", \\\"p149\\\", \\\"p150\\\", \\\"p151\\\", \\\"p152\\\", \\\"p153\\\", \\\"p154\\\", \\\"p155\\\", \\\"p156\\\", \\\"p157\\\", \\\"p158\\\", \\\"p159\\\", \\\"p160\\\", \\\"p161\\\", \\\"p162\\\", \\\"p163\\\", \\\"p164\\\", \\\"p165\\\", \\\"p166\\\", \\\"p167\\\", \\\"p168\\\", \\\"p169\\\", \\\"p170\\\", \\\"p171\\\", \\\"p172\\\", \\\"p173\\\", \\\"p174\\\", \\\"p175\\\", \\\"p176\\\", \\\"p177\\\", \\\"p178\\\", \\\"p179\\\", \\\"p180\\\", \\\"p181\\\", \\\"p182\\\", \\\"p183\\\", \\\"p184\\\", \\\"p185\\\", \\\"p186\\\", \\\"p187\\\", \\\"p188\\\", \\\"p189\\\", \\\"p190\\\", \\\"p191\\\", \\\"p192\\\", \\\"p193\\\", \\\"p194\\\", \\\"p195\\\", \\\"p196\\\", \\\"p197\\\", \\\"p198\\\", \\\"p199\\\", \\\"p200\\\", \\\"p201\\\", \\\"p202\\\", \\\"p203\\\", \\\"p204\\\", \\\"p205\\\", \\\"p206\\\", \\\"p207\\\", \\\"p208\\\", \\\"p209\\\", \\\"p210\\\", \\\"p211\\\", \\\"p212\\\", \\\"p213\\\", \\\"p214\\\", \\\"p215\\\", \\\"p216\\\", \\\"p217\\\", \\\"p218\\\", \\\"p219\\\", \\\"p220\\\", \\\"p221\\\", \\\"p222\\\", \\\"p223\\\", \\\"p224\\\", \\\"p225\\\", \\\"p226\\\", \\\"p227\\\", \\\"p228\\\", \\\"p229\\\", \\\"p230\\\", \\\"p231\\\", \\\"p232\\\", \\\"p233\\\", \\\"p234\\\", \\\"p235\\\", \\\"p236\\\", \\\"p237\\\", \\\"p238\\\", \\\"p239\\\", \\\"p240\\\", \\\"p241\\\", \\\"p242\\\", \\\"p243\\\", \\\"p244\\\", \\\"p245\\\", \\\"p246\\\", \\\"p247\\\", \\\"p248\\\", \\\"p249\\\", \\\"p250\\\", \\\"p251\\\", \\\"p252\\\", \\\"p253\\\", \\\"p254\\\", \\\"p255\\\", \\\"p256\\\", \\\"p257\\\", \\\"p258\\\", \\\"p259\\\", \\\"p260\\\", \\\"p261\\\", \\\"p262\\\", \\\"p263\\\", \\\"p264\\\", \\\"p265\\\", \\\"p266\\\", \\\"p267\\\", \\\"p268\\\", \\\"p269\\\", \\\"p270\\\", \\\"p271\\\", \\\"p272\\\", \\\"p273\\\", \\\"p274\\\", \\\"p275\\\", \\\"p276\\\", \\\"p277\\\", \\\"p278\\\", \\\"p279\\\", \\\"p280\\\", \\\"p281\\\", \\\"p282\\\", \\\"p283\\\", \\\"p284\\\", \\\"p285\\\", \\\"p286\\\", \\\"p287\\\", \\\"p288\\\", \\\"p289\\\", \\\"p290\\\", \\\"p291\\\", \\\"p292\\\", \\\"p293\\\", \\\"p294\\\", \\\"p295\\\", \\\"p296\\\", \\\"p297\\\", \\\"p298\\\", \\\"p299\\\", \\\"p300\\\", \\\"p301\\\", \\\"p302\\\", \\\"p303\\\", \\\"p304\\\", \\\"p305\\\", \\\"p306\\\", \\\"p307\\\", \\\"p308\\\", \\\"p309\\\", \\\"p310\\\", \\\"p311\\\", \\\"p312\\\", \\\"p313\\\", \\\"p314\\\", \\\"p315\\\", \\\"p316\\\", \\\"p317\\\", \\\"p318\\\", \\\"p319\\\", \\\"p320\\\", \\\"p321\\\", \\\"p322\\\", \\\"p323\\\", \\\"p324\\\", \\\"p325\\\", \\\"p326\\\", \\\"p327\\\", \\\"p328\\\", \\\"p329\\\", \\\"p330\\\", \\\"p331\\\", \\\"p332\\\", \\\"p333\\\", \\\"p334\\\", \\\"p335\\\", \\\"p336\\\", \\\"p337\\\", \\\"p338\\\", \\\"p339\\\", \\\"p340\\\", \\\"p341\\\", \\\"p342\\\", \\\"p343\\\", \\\"p344\\\", \\\"p345\\\", \\\"p346\\\", \\\"p347\\\", \\\"p348\\\", \\\"p349\\\", \\\"p350\\\", \\\"p351\\\", \\\"p352\\\", \\\"p353\\\", \\\"p354\\\", \\\"p355\\\", \\\"p356\\\", \\\"p357\\\", \\\"p358\\\", \\\"p359\\\", \\\"p360\\\", \\\"p361\\\", \\\"p362\\\", \\\"p363\\\", \\\"p364\\\", \\\"p365\\\", \\\"p366\\\", \\\"p367\\\", \\\"p368\\\", \\\"p369\\\", \\\"p370\\\", \\\"p371\\\", \\\"p372\\\", \\\"p373\\\", \\\"p374\\\", \\\"p375\\\", \\\"p376\\\", \\\"p377\\\", \\\"p378\\\", \\\"p379\\\", \\\"p380\\\", \\\"p381\\\", \\\"p382\\\", \\\"p383\\\", \\\"p384\\\", \\\"p385\\\", \\\"p386\\\", \\\"p387\\\", \\\"p388\\\", \\\"p389\\\", \\\"p390\\\", \\\"p391\\\", \\\"p392\\\", \\\"p393\\\", \\\"p394\\\", \\\"p395\\\", \\\"p396\\\", \\\"p397\\\", \\\"p398\\\", \\\"p399\\\", \\\"p400\\\", \\\"p401\\\", \\\"p402\\\", \\\"p403\\\", \\\"p404\\\", \\\"p405\\\", \\\"p406\\\", \\\"p407\\\", \\\"p408\\\", \\\"p409\\\", \\\"p410\\\", \\\"p411\\\", \\\"p412\\\", \\\"p413\\\", \\\"p414\\\", \\\"p415\\\", \\\"p416\\\", \\\"p417\\\", \\\"p418\\\", \\\"p419\\\", \\\"p420\\\", \\\"p421\\\", \\\"p422\\\", \\\"p423\\\", \\\"p424\\\", \\\"p425\\\", \\\"p426\\\", \\\"p427\\\", \\\"p428\\\", \\\"p429\\\", \\\"p430\\\", \\\"p431\\\", \\\"p432\\\", \\\"p433\\\", \\\"p434\\\", \\\"p435\\\", \\\"p436\\\", \\\"p437\\\", \\\"p438\\\", \\\"p439\\\", \\\"p440\\\", \\\"p441\\\", \\\"p442\\\", \\\"p443\\\", \\\"p444\\\", \\\"p445\\\", \\\"p446\\\", \\\"p447\\\", \\\"p448\\\", \\\"p449\\\", \\\"p450\\\", \\\"p451\\\", \\\"p452\\\", \\\"p453\\\", \\\"p454\\\", \\\"p455\\\", \\\"p456\\\", \\\"p457\\\", \\\"p458\\\", \\\"p459\\\", \\\"p460\\\", \\\"p461\\\", \\\"p462\\\", \\\"p463\\\", \\\"p464\\\", \\\"p465\\\", \\\"p466\\\", \\\"p467\\\", \\\"p468\\\", \\\"p469\\\", \\\"p470\\\", \\\"p471\\\", \\\"p472\\\", \\\"p473\\\", \\\"p474\\\", \\\"p475\\\", \\\"p476\\\", \\\"p477\\\", \\\"p478\\\", \\\"p479\\\", \\\"p480\\\", \\\"p481\\\", \\\"p482\\\", \\\"p483\\\", \\\"p484\\\", \\\"p485\\\", \\\"p486\\\", \\\"p487\\\", \\\"p488\\\", \\\"p489\\\", \\\"p490\\\", \\\"p491\\\", \\\"p492\\\", \\\"p493\\\", \\\"p494\\\", \\\"p495\\\", \\\"p496\\\", \\\"p497\\\", \\\"p498\\\", \\\"p499\\\", \\\"p500\\\" ]\");\n\n}\n\n\n", "file_path": "boa/src/exec/tests.rs", "rank": 73, "score": 208721.28503377168 }, { "content": "#[test]\n\nfn display_number_object() {\n\n let mut engine = Context::new();\n\n let d_obj = r#\"\n\n let num = new Number(3.14);\n\n num\n\n \"#;\n\n let value = forward_val(&mut engine, d_obj).unwrap();\n\n assert_eq!(value.display().to_string(), \"Number { 3.14 }\")\n\n}\n\n\n", "file_path": "boa/src/value/tests.rs", "rank": 74, "score": 208719.42162861014 }, { "content": "#[test]\n\nfn display_boolean_object() {\n\n let mut engine = Context::new();\n\n let d_obj = r#\"\n\n let bool = new Boolean(0);\n\n bool\n\n \"#;\n\n let value = forward_val(&mut engine, d_obj).unwrap();\n\n assert_eq!(value.display().to_string(), \"Boolean { false }\")\n\n}\n\n\n", "file_path": "boa/src/value/tests.rs", "rank": 75, "score": 208719.42162861014 }, { "content": "#[test]\n\nfn assign_to_object_decl() {\n\n let mut engine = Context::new();\n\n\n\n const ERR_MSG: &str =\n\n \"Uncaught \\\"SyntaxError\\\": \\\"expected token \\';\\', got \\':\\' in expression statement at line 1, col 3\\\"\";\n\n\n\n assert_eq!(forward(&mut engine, \"{a: 3} = {a: 5};\"), ERR_MSG);\n\n}\n\n\n", "file_path": "boa/src/exec/tests.rs", "rank": 76, "score": 208694.08965117962 }, { "content": "#[test]\n\nfn call_array_constructor_with_one_argument() {\n\n let mut engine = Context::new();\n\n let init = r#\"\n\n var empty = new Array(0);\n\n\n\n var five = new Array(5);\n\n\n\n var one = new Array(\"Hello, world!\");\n\n \"#;\n\n forward(&mut engine, init);\n\n // let result = forward(&mut engine, \"empty.length\");\n\n // assert_eq!(result, \"0\");\n\n\n\n // let result = forward(&mut engine, \"five.length\");\n\n // assert_eq!(result, \"5\");\n\n\n\n // let result = forward(&mut engine, \"one.length\");\n\n // assert_eq!(result, \"1\");\n\n}\n", "file_path": "boa/src/builtins/array/tests.rs", "rank": 77, "score": 208674.35465775762 }, { "content": "pub fn new_global_environment(global: Value, this_value: Value) -> Environment {\n\n let obj_rec = ObjectEnvironmentRecord {\n\n bindings: global,\n\n outer_env: None,\n\n /// Object Environment Records created for with statements (13.11)\n\n /// can provide their binding object as an implicit this value for use in function calls.\n\n /// The capability is controlled by a withEnvironment Boolean value that is associated\n\n /// with each object Environment Record. By default, the value of withEnvironment is false\n\n /// for any object Environment Record.\n\n with_environment: false,\n\n };\n\n\n\n let dcl_rec = DeclarativeEnvironmentRecord {\n\n env_rec: FxHashMap::default(),\n\n outer_env: None,\n\n };\n\n\n\n Gc::new(GcCell::new(Box::new(GlobalEnvironmentRecord {\n\n object_record: obj_rec,\n\n global_this_binding: this_value,\n", "file_path": "boa/src/environment/lexical_environment.rs", "rank": 78, "score": 206397.2708233666 }, { "content": "#[test]\n\nfn check_switch_two_default() {\n\n check_invalid(\n\n r#\"\n\n let a = 10;\n\n switch (a) {\n\n default:\n\n a = 20;\n\n break;\n\n default:\n\n a = 30;\n\n break;\n\n }\n\n \"#,\n\n );\n\n}\n\n\n\n/// Checks parsing malformed switch with no expression.\n", "file_path": "boa/src/syntax/parser/statement/switch/tests.rs", "rank": 79, "score": 205349.5002958667 }, { "content": "#[test]\n\nfn check_switch_seperated_defaults() {\n\n check_invalid(\n\n r#\"\n\n let a = 10;\n\n switch (a) {\n\n default:\n\n a = 20;\n\n break;\n\n case 10:\n\n a = 60;\n\n break;\n\n default:\n\n a = 30;\n\n break;\n\n }\n\n \"#,\n\n );\n\n}\n\n\n\n/// Example of JS code https://jsfiddle.net/zq6jx47h/4/.\n", "file_path": "boa/src/syntax/parser/statement/switch/tests.rs", "rank": 80, "score": 205349.5002958667 }, { "content": "#[test]\n\nfn empty_let_decl_undefined() {\n\n let scenario = r#\"\n\n let a;\n\n a === undefined;\n\n \"#;\n\n\n\n assert_eq!(&exec(scenario), \"true\");\n\n}\n\n\n", "file_path": "boa/src/exec/tests.rs", "rank": 81, "score": 204581.10754333768 }, { "content": "#[test]\n\nfn empty_function_returns_undefined() {\n\n let scenario = \"(function () {}) ()\";\n\n assert_eq!(&exec(scenario), \"undefined\");\n\n}\n\n\n", "file_path": "boa/src/exec/tests.rs", "rank": 82, "score": 204529.01211287102 }, { "content": "#[test]\n\nfn calling_function_with_unspecified_arguments() {\n\n let mut engine = Context::new();\n\n let scenario = r#\"\n", "file_path": "boa/src/exec/tests.rs", "rank": 83, "score": 204494.8576495031 }, { "content": "#[test]\n\nfn assignmentoperator_lhs_not_defined() {\n\n let scenario = r#\"\n\n try {\n\n a += 5\n\n } catch (err) {\n\n err.toString()\n\n }\n\n \"#;\n\n\n\n assert_eq!(&exec(scenario), \"\\\"ReferenceError: a is not defined\\\"\");\n\n}\n\n\n", "file_path": "boa/src/exec/operator/tests.rs", "rank": 84, "score": 204409.75527200365 }, { "content": "#[test]\n\nfn while_loop_late_break() {\n\n // Ordering with statement before the break.\n\n let scenario = r#\"\n\n let a = 1;\n\n while (a < 5) {\n\n a++;\n\n if (a == 3) {\n\n break;\n\n }\n\n }\n\n a;\n\n \"#;\n\n\n\n assert_eq!(&exec(scenario), \"3\");\n\n}\n\n\n", "file_path": "boa/src/exec/iteration/tests.rs", "rank": 85, "score": 204405.11945261425 }, { "content": "#[test]\n\nfn while_loop_early_break() {\n\n // Ordering with statements after the break.\n\n let scenario = r#\"\n\n let a = 1;\n\n while (a < 5) {\n\n if (a == 3) {\n\n break;\n\n }\n\n a++;\n\n }\n\n a;\n\n \"#;\n\n\n\n assert_eq!(&exec(scenario), \"3\");\n\n}\n\n\n", "file_path": "boa/src/exec/iteration/tests.rs", "rank": 86, "score": 204405.11945261425 }, { "content": "#[test]\n\nfn do_loop_early_break() {\n\n // Ordering with statements after the break.\n\n let scenario = r#\"\n\n let a = 1;\n\n do {\n\n if (a == 3) {\n\n break;\n\n }\n\n a++;\n\n } while (a < 5);\n\n a;\n\n \"#;\n\n\n\n assert_eq!(&exec(scenario), \"3\");\n\n}\n\n\n", "file_path": "boa/src/exec/iteration/tests.rs", "rank": 87, "score": 204405.11945261425 }, { "content": "#[test]\n\nfn break_out_of_inner_loop() {\n\n let scenario = r#\"\n\n var a = 0, b = 0;\n\n for (let i = 0; i < 2; i++) {\n\n a++;\n\n for (let j = 0; j < 10; j++) {\n\n b++;\n\n if (j == 3)\n\n break;\n\n }\n\n a++;\n\n }\n\n [a, b]\n\n \"#;\n\n assert_eq!(&exec(scenario), \"[ 4, 8 ]\");\n\n}\n\n\n", "file_path": "boa/src/exec/iteration/tests.rs", "rank": 88, "score": 204405.11945261425 }, { "content": "#[test]\n\nfn for_loop_iteration_variable_does_not_leak() {\n\n let inner_scope = r#\"\n\n for (let i = 0;false;) {}\n\n\n\n try {\n\n i\n\n } catch (err) {\n\n err.message\n\n }\n\n \"#;\n\n\n\n assert_eq!(&exec(inner_scope), \"\\\"i is not defined\\\"\");\n\n}\n\n\n", "file_path": "boa/src/exec/tests.rs", "rank": 89, "score": 204405.11945261425 }, { "content": "#[test]\n\nfn do_loop_late_break() {\n\n // Ordering with statement before the break.\n\n let scenario = r#\"\n\n let a = 1;\n\n do {\n\n a++;\n\n if (a == 3) {\n\n break;\n\n }\n\n } while (a < 5);\n\n a;\n\n \"#;\n\n\n\n assert_eq!(&exec(scenario), \"3\");\n\n}\n\n\n", "file_path": "boa/src/exec/iteration/tests.rs", "rank": 90, "score": 204405.11945261425 }, { "content": "#[test]\n\nfn continue_inner_loop() {\n\n let scenario = r#\"\n\n var a = 0, b = 0;\n\n for (let i = 0; i < 2; i++) {\n\n a++;\n\n for (let j = 0; j < 10; j++) {\n\n if (j < 3)\n\n continue;\n\n b++;\n\n }\n\n a++;\n\n }\n\n [a, b]\n\n \"#;\n\n assert_eq!(&exec(scenario), \"[ 4, 14 ]\");\n\n}\n\n\n", "file_path": "boa/src/exec/iteration/tests.rs", "rank": 91, "score": 204405.11945261425 }, { "content": "#[test]\n\nfn simple_try() {\n\n let scenario = r#\"\n\n let a = 10;\n\n try {\n\n a = 20;\n\n } catch {\n\n a = 30;\n\n }\n\n\n\n a;\n\n \"#;\n\n assert_eq!(&exec(scenario), \"20\");\n\n}\n\n\n", "file_path": "boa/src/exec/try_node/tests.rs", "rank": 92, "score": 204233.3148506636 }, { "content": "#[test]\n\nfn catch_binding() {\n\n let scenario = r#\"\n\n let a = 10;\n\n try {\n\n throw 20;\n\n } catch(err) {\n\n a = err;\n\n }\n\n\n\n a;\n\n \"#;\n\n assert_eq!(&exec(scenario), \"20\");\n\n}\n\n\n", "file_path": "boa/src/exec/try_node/tests.rs", "rank": 93, "score": 204233.31485066362 }, { "content": "#[test]\n\nfn catch_finally() {\n\n let scenario = r#\"\n\n let a = 10;\n\n try {\n\n a = 20;\n\n } catch {\n\n a = 40;\n\n } finally {\n\n a = 30;\n\n }\n\n\n\n a;\n\n \"#;\n\n assert_eq!(&exec(scenario), \"30\");\n\n}\n\n\n", "file_path": "boa/src/exec/try_node/tests.rs", "rank": 94, "score": 204233.3148506636 }, { "content": "#[test]\n\nfn add_number_object_and_number() {\n\n let mut engine = Context::new();\n\n\n\n let value = forward_val(&mut engine, \"new Number(10) + 6\").unwrap();\n\n let value = value.to_i32(&mut engine).unwrap();\n\n assert_eq!(value, 16);\n\n}\n\n\n", "file_path": "boa/src/value/tests.rs", "rank": 95, "score": 204193.1490300321 }, { "content": "#[test]\n\nfn display_negative_zero_object() {\n\n let mut engine = Context::new();\n\n let d_obj = r#\"\n\n let num = new Number(-0);\n\n num\n\n \"#;\n\n let value = forward_val(&mut engine, d_obj).unwrap();\n\n assert_eq!(value.display().to_string(), \"Number { -0 }\")\n\n}\n\n\n", "file_path": "boa/src/value/tests.rs", "rank": 96, "score": 204193.1490300321 }, { "content": "#[test]\n\nfn sub_string_and_number_object() {\n\n let mut engine = Context::new();\n\n\n\n let value = forward_val(&mut engine, \"'Hello' - new Number(999)\").unwrap();\n\n let value = value.to_number(&mut engine).unwrap();\n\n assert!(value.is_nan());\n\n}\n\n\n", "file_path": "boa/src/value/tests.rs", "rank": 97, "score": 204193.1490300321 }, { "content": "#[test]\n\nfn check_this_binding_in_object_literal() {\n\n let mut engine = Context::new();\n\n let init = r#\"\n\n var foo = {\n\n a: 3,\n\n bar: function () { return this.a + 5 }\n\n };\n\n\n\n foo.bar()\n\n \"#;\n\n\n\n assert_eq!(forward(&mut engine, init), \"8\");\n\n}\n\n\n", "file_path": "boa/src/exec/tests.rs", "rank": 98, "score": 204168.4519983303 }, { "content": "#[test]\n\nfn number_object_access_benchmark() {\n\n let mut engine = Context::new();\n\n let init = r#\"\n\n new Number(\n\n new Number(\n\n new Number(\n\n new Number(100).valueOf() - 10.5\n\n ).valueOf() + 100\n\n ).valueOf() * 1.6\n\n )\n\n \"#;\n\n\n\n assert!(forward_val(&mut engine, init).is_ok());\n\n}\n\n\n", "file_path": "boa/src/exec/tests.rs", "rank": 99, "score": 204168.4519983303 } ]
Rust
term/src/test/csi.rs
bcully/wezterm
ea401e1f58ca5a088ac5d5e1d7963f36269afb76
use super::*; #[test] fn test_vpa() { let mut term = TestTerm::new(3, 4, 0); term.assert_cursor_pos(0, 0, None); term.print("a\r\nb\r\nc"); term.assert_cursor_pos(1, 2, None); term.print("\x1b[d"); term.assert_cursor_pos(1, 0, None); term.print("\r\n\r\n"); term.assert_cursor_pos(0, 2, None); term.print("\x1b[2d"); term.assert_cursor_pos(0, 1, None); term.print("\x1b[-2d"); term.assert_cursor_pos(0, 1, None); } #[test] fn test_rep() { let mut term = TestTerm::new(3, 4, 0); term.print("h"); term.cup(1, 0); term.print("\x1b[2ba"); assert_visible_contents(&term, file!(), line!(), &["hhha", " ", " "]); } #[test] fn test_irm() { let mut term = TestTerm::new(3, 8, 0); term.print("foo"); term.cup(0, 0); term.print("\x1b[4hBAR"); assert_visible_contents( &term, file!(), line!(), &["BARfoo ", " ", " "], ); } #[test] fn test_ich() { let mut term = TestTerm::new(3, 4, 0); term.print("hey!wat?"); term.cup(1, 0); term.print("\x1b[2@"); assert_visible_contents(&term, file!(), line!(), &["h e", "wat?", " "]); term.print("\x1b[12@"); assert_visible_contents(&term, file!(), line!(), &["h ", "wat?", " "]); term.print("\x1b[-12@"); assert_visible_contents(&term, file!(), line!(), &["h ", "wat?", " "]); } #[test] fn test_ech() { let mut term = TestTerm::new(3, 4, 0); term.print("hey!wat?"); term.cup(1, 0); term.print("\x1b[2X"); assert_visible_contents(&term, file!(), line!(), &["h !", "wat?", " "]); term.print("\x1b[12X"); assert_visible_contents(&term, file!(), line!(), &["h ", "wat?", " "]); term.print("\x1b[-12X"); assert_visible_contents(&term, file!(), line!(), &["h ", "wat?", " "]); } #[test] fn test_dch() { let mut term = TestTerm::new(1, 12, 0); term.print("hello world"); term.cup(1, 0); term.print("\x1b[P"); assert_visible_contents(&term, file!(), line!(), &["hllo world "]); term.cup(4, 0); term.print("\x1b[2P"); assert_visible_contents(&term, file!(), line!(), &["hlloorld "]); term.print("\x1b[-2P"); assert_visible_contents(&term, file!(), line!(), &["hlloorld "]); } #[test] fn test_cup() { let mut term = TestTerm::new(3, 4, 0); term.cup(1, 1); term.assert_cursor_pos(1, 1, None); term.cup(-1, -1); term.assert_cursor_pos(0, 0, None); term.cup(2, 2); term.assert_cursor_pos(2, 2, None); term.cup(-1, -1); term.assert_cursor_pos(0, 0, None); term.cup(500, 500); term.assert_cursor_pos(3, 2, None); } #[test] fn test_hvp() { let mut term = TestTerm::new(3, 4, 0); term.hvp(1, 1); term.assert_cursor_pos(1, 1, None); term.hvp(-1, -1); term.assert_cursor_pos(0, 0, None); term.hvp(2, 2); term.assert_cursor_pos(2, 2, None); term.hvp(-1, -1); term.assert_cursor_pos(0, 0, None); term.hvp(500, 500); term.assert_cursor_pos(3, 2, None); } #[test] fn test_dl() { let mut term = TestTerm::new(3, 1, 0); term.print("a\r\nb\r\nc"); term.cup(0, 1); term.delete_lines(1); assert_visible_contents(&term, file!(), line!(), &["a", "c", " "]); term.assert_cursor_pos(0, 1, None); term.cup(0, 0); term.delete_lines(2); assert_visible_contents(&term, file!(), line!(), &[" ", " ", " "]); term.print("1\r\n2\r\n3"); term.cup(0, 1); term.delete_lines(-2); assert_visible_contents(&term, file!(), line!(), &["1", "2", "3"]); } #[test] fn test_cha() { let mut term = TestTerm::new(3, 4, 0); term.cup(1, 1); term.assert_cursor_pos(1, 1, None); term.print("\x1b[G"); term.assert_cursor_pos(0, 1, None); term.print("\x1b[2G"); term.assert_cursor_pos(1, 1, None); term.print("\x1b[0G"); term.assert_cursor_pos(0, 1, None); term.print("\x1b[-1G"); term.assert_cursor_pos(0, 1, None); term.print("\x1b[100G"); term.assert_cursor_pos(3, 1, None); } #[test] fn test_ed() { let mut term = TestTerm::new(3, 3, 0); term.print("abc\r\ndef\r\nghi"); term.cup(1, 2); term.print("\x1b[J"); assert_visible_contents(&term, file!(), line!(), &["abc", "def", "g "]); term.print("\x1b[44m"); term.print("\x1b[2J"); let attr = CellAttributes::default() .set_background(color::AnsiColor::Navy) .clone(); let mut line: Line = " ".into(); line.fill_range(0..=2, &Cell::new(' ', attr.clone())); assert_lines_equal( file!(), line!(), &term.screen().visible_lines(), &[line.clone(), line.clone(), line], Compare::TEXT | Compare::ATTRS, ); }
use super::*; #[test] fn test_vpa() { let mut term = TestTerm::new(3, 4, 0); term.assert_cursor_pos(0, 0, None); term.print("a\r\nb\r\nc"); term.assert_cursor_pos(1, 2, None); term.print("\x1b[d"); term.assert_cursor_pos(1, 0, None); term.print("\r\n\r\n"); term.assert_cursor_pos(0, 2, None); term.print("\x1b[2d"); term.assert_cursor_pos(0, 1, None); term.print("\x1b[-2d"); term.assert_cursor_pos(0, 1, None); } #[test] fn test_rep() { let mut term = TestTerm::new(3, 4, 0); term.print("h"); term.cup(1, 0); term.print("\x1b[2ba"); assert_visible_contents(&term, file!(), line!(), &["hhha", " ", " "]); } #[test] fn test_irm() { let mut term = TestTerm::new(3, 8, 0); term.print("foo"); term.cup(0, 0); term.print("\x1b[4hBAR"); assert_visible_contents( &term, file!(), line!(), &["BARfoo ", " ", " "], ); } #[test] fn test_ich() { let mut term = TestTerm::new(3, 4, 0); term.print("hey!wat?"); term.cup(1, 0); term.print("\x1b[2@");
#[test] fn test_ech() { let mut term = TestTerm::new(3, 4, 0); term.print("hey!wat?"); term.cup(1, 0); term.print("\x1b[2X"); assert_visible_contents(&term, file!(), line!(), &["h !", "wat?", " "]); term.print("\x1b[12X"); assert_visible_contents(&term, file!(), line!(), &["h ", "wat?", " "]); term.print("\x1b[-12X"); assert_visible_contents(&term, file!(), line!(), &["h ", "wat?", " "]); } #[test] fn test_dch() { let mut term = TestTerm::new(1, 12, 0); term.print("hello world"); term.cup(1, 0); term.print("\x1b[P"); assert_visible_contents(&term, file!(), line!(), &["hllo world "]); term.cup(4, 0); term.print("\x1b[2P"); assert_visible_contents(&term, file!(), line!(), &["hlloorld "]); term.print("\x1b[-2P"); assert_visible_contents(&term, file!(), line!(), &["hlloorld "]); } #[test] fn test_cup() { let mut term = TestTerm::new(3, 4, 0); term.cup(1, 1); term.assert_cursor_pos(1, 1, None); term.cup(-1, -1); term.assert_cursor_pos(0, 0, None); term.cup(2, 2); term.assert_cursor_pos(2, 2, None); term.cup(-1, -1); term.assert_cursor_pos(0, 0, None); term.cup(500, 500); term.assert_cursor_pos(3, 2, None); } #[test] fn test_hvp() { let mut term = TestTerm::new(3, 4, 0); term.hvp(1, 1); term.assert_cursor_pos(1, 1, None); term.hvp(-1, -1); term.assert_cursor_pos(0, 0, None); term.hvp(2, 2); term.assert_cursor_pos(2, 2, None); term.hvp(-1, -1); term.assert_cursor_pos(0, 0, None); term.hvp(500, 500); term.assert_cursor_pos(3, 2, None); } #[test] fn test_dl() { let mut term = TestTerm::new(3, 1, 0); term.print("a\r\nb\r\nc"); term.cup(0, 1); term.delete_lines(1); assert_visible_contents(&term, file!(), line!(), &["a", "c", " "]); term.assert_cursor_pos(0, 1, None); term.cup(0, 0); term.delete_lines(2); assert_visible_contents(&term, file!(), line!(), &[" ", " ", " "]); term.print("1\r\n2\r\n3"); term.cup(0, 1); term.delete_lines(-2); assert_visible_contents(&term, file!(), line!(), &["1", "2", "3"]); } #[test] fn test_cha() { let mut term = TestTerm::new(3, 4, 0); term.cup(1, 1); term.assert_cursor_pos(1, 1, None); term.print("\x1b[G"); term.assert_cursor_pos(0, 1, None); term.print("\x1b[2G"); term.assert_cursor_pos(1, 1, None); term.print("\x1b[0G"); term.assert_cursor_pos(0, 1, None); term.print("\x1b[-1G"); term.assert_cursor_pos(0, 1, None); term.print("\x1b[100G"); term.assert_cursor_pos(3, 1, None); } #[test] fn test_ed() { let mut term = TestTerm::new(3, 3, 0); term.print("abc\r\ndef\r\nghi"); term.cup(1, 2); term.print("\x1b[J"); assert_visible_contents(&term, file!(), line!(), &["abc", "def", "g "]); term.print("\x1b[44m"); term.print("\x1b[2J"); let attr = CellAttributes::default() .set_background(color::AnsiColor::Navy) .clone(); let mut line: Line = " ".into(); line.fill_range(0..=2, &Cell::new(' ', attr.clone())); assert_lines_equal( file!(), line!(), &term.screen().visible_lines(), &[line.clone(), line.clone(), line], Compare::TEXT | Compare::ATTRS, ); }
assert_visible_contents(&term, file!(), line!(), &["h e", "wat?", " "]); term.print("\x1b[12@"); assert_visible_contents(&term, file!(), line!(), &["h ", "wat?", " "]); term.print("\x1b[-12@"); assert_visible_contents(&term, file!(), line!(), &["h ", "wat?", " "]); }
function_block-function_prefix_line
[ { "content": "fn assert_all_contents(term: &Terminal, file: &str, line: u32, expect_lines: &[&str]) {\n\n print_all_lines(&term);\n\n let screen = term.screen();\n\n\n\n let expect: Vec<Line> = expect_lines.iter().map(|s| (*s).into()).collect();\n\n\n\n assert_lines_equal(file, line, &screen.all_lines(), &expect, Compare::TEXT);\n\n}\n\n\n", "file_path": "term/src/test/mod.rs", "rank": 0, "score": 267423.51240122144 }, { "content": "/// Asserts that the visible lines of the terminal have the\n\n/// same character contents as the expected lines.\n\n/// The other cell attributes are not compared; this is\n\n/// a convenience for writing visually understandable tests.\n\nfn assert_visible_contents(term: &Terminal, file: &str, line: u32, expect_lines: &[&str]) {\n\n print_visible_lines(&term);\n\n let screen = term.screen();\n\n\n\n let expect: Vec<Line> = expect_lines.iter().map(|s| (*s).into()).collect();\n\n\n\n assert_lines_equal(file, line, &screen.visible_lines(), &expect, Compare::TEXT);\n\n}\n\n\n", "file_path": "term/src/test/mod.rs", "rank": 1, "score": 262952.33119507186 }, { "content": "fn print_all_lines(term: &Terminal) {\n\n let screen = term.screen();\n\n\n\n println!(\"whole screen contents are:\");\n\n for line in screen.lines.iter() {\n\n println!(\"[{}]\", line.as_str());\n\n }\n\n}\n\n\n", "file_path": "term/src/test/mod.rs", "rank": 2, "score": 233244.43729643774 }, { "content": "fn print_visible_lines(term: &Terminal) {\n\n let screen = term.screen();\n\n\n\n println!(\"screen contents are:\");\n\n for line in screen.visible_lines().iter() {\n\n println!(\"[{}]\", line.as_str());\n\n }\n\n}\n\n\n", "file_path": "term/src/test/mod.rs", "rank": 3, "score": 227902.96493406055 }, { "content": "#[test]\n\nfn test_delete_lines() {\n\n let mut term = TestTerm::new(5, 3, 0);\n\n\n\n term.print(\"111\\r\\n222\\r\\n333\\r\\n444\\r\\n555\");\n\n assert_visible_contents(\n\n &term,\n\n file!(),\n\n line!(),\n\n &[\"111\", \"222\", \"333\", \"444\", \"555\"],\n\n );\n\n term.assert_dirty_lines(&[0, 1, 2, 3, 4], None);\n\n term.cup(0, 1);\n\n term.clean_dirty_lines();\n\n\n\n term.assert_dirty_lines(&[], None);\n\n term.delete_lines(2);\n\n assert_visible_contents(\n\n &term,\n\n file!(),\n\n line!(),\n", "file_path": "term/src/test/mod.rs", "rank": 8, "score": 213876.82979000002 }, { "content": "/// Asserts that both line slices match according to the\n\n/// selected flags.\n\nfn assert_lines_equal(\n\n file: &str,\n\n line_no: u32,\n\n lines: &[Line],\n\n expect_lines: &[Line],\n\n compare: Compare,\n\n) {\n\n let mut expect_iter = expect_lines.iter();\n\n\n\n for (idx, line) in lines.iter().enumerate() {\n\n let expect = expect_iter.next().unwrap();\n\n\n\n if compare.contains(Compare::DIRTY) {\n\n assert_eq!(\n\n line.is_dirty(),\n\n expect.is_dirty(),\n\n \"line {} dirty didn't match\",\n\n idx,\n\n );\n\n }\n", "file_path": "term/src/test/mod.rs", "rank": 9, "score": 200558.57679373468 }, { "content": "fn read_pipe_with_timeout(mut file: FileDescriptor) -> anyhow::Result<String> {\n\n let mut result = Vec::new();\n\n\n\n file.set_non_blocking(true)?;\n\n let mut pfd = libc::pollfd {\n\n fd: file.as_raw_fd(),\n\n events: libc::POLLIN,\n\n revents: 0,\n\n };\n\n\n\n let mut buf = [0u8; 8192];\n\n\n\n loop {\n\n if unsafe { libc::poll(&mut pfd, 1, 3000) == 1 } {\n\n match file.read(&mut buf) {\n\n Ok(size) if size == 0 => {\n\n break;\n\n }\n\n Ok(size) => {\n\n result.extend_from_slice(&buf[..size]);\n", "file_path": "window/src/os/wayland/window.rs", "rank": 10, "score": 177149.85756957345 }, { "content": "#[test]\n\nfn test_ind() {\n\n let mut term = TestTerm::new(4, 4, 0);\n\n term.print(\"a\\r\\nb\\x1bD\");\n\n term.assert_cursor_pos(1, 2, None);\n\n assert_visible_contents(&term, file!(), line!(), &[\"a \", \"b \", \" \", \" \"]);\n\n term.print(\"\\x1bD\");\n\n term.assert_cursor_pos(1, 3, None);\n\n term.print(\"\\x1bD\");\n\n term.assert_cursor_pos(1, 3, None);\n\n assert_visible_contents(&term, file!(), line!(), &[\"b \", \" \", \" \", \" \"]);\n\n}\n\n\n", "file_path": "term/src/test/c1.rs", "rank": 12, "score": 176791.41971122747 }, { "content": "#[test]\n\nfn test_tab() {\n\n let mut term = TestTerm::new(3, 25, 0);\n\n term.print(\"\\t\");\n\n term.assert_cursor_pos(8, 0, None);\n\n term.print(\"\\t\");\n\n term.assert_cursor_pos(16, 0, None);\n\n term.print(\"\\t\");\n\n term.assert_cursor_pos(24, 0, None);\n\n term.print(\"\\t\");\n\n term.assert_cursor_pos(24, 0, None);\n\n}\n", "file_path": "term/src/test/c0.rs", "rank": 14, "score": 176791.4197112275 }, { "content": "#[test]\n\nfn test_hts() {\n\n let mut term = TestTerm::new(3, 25, 0);\n\n term.print(\"boo\");\n\n term.print(\"\\x1bH\\r\\n\");\n\n term.assert_cursor_pos(0, 1, None);\n\n term.print(\"\\t\");\n\n term.assert_cursor_pos(3, 1, None);\n\n term.print(\"\\t\");\n\n term.assert_cursor_pos(8, 1, None);\n\n\n\n // Check that tabs are expanded if we resize\n\n term.resize(4, 80, 4 * 16, 80 * 8);\n\n term.cup(0, 1);\n\n term.print(\"\\t\");\n\n term.assert_cursor_pos(3, 1, None);\n\n term.print(\"\\t\");\n\n term.assert_cursor_pos(8, 1, None);\n\n term.print(\"\\t\");\n\n term.assert_cursor_pos(16, 1, None);\n\n term.print(\"\\t\");\n\n term.assert_cursor_pos(24, 1, None);\n\n term.print(\"\\t\");\n\n term.assert_cursor_pos(32, 1, None);\n\n}\n\n\n", "file_path": "term/src/test/c1.rs", "rank": 16, "score": 176791.41971122747 }, { "content": "#[test]\n\nfn test_bs() {\n\n let mut term = TestTerm::new(3, 4, 0);\n\n term.assert_cursor_pos(0, 0, None);\n\n term.print(\"\\x08\");\n\n term.assert_cursor_pos(0, 0, Some(\"cannot move left of the margin\"));\n\n term.print(\"ab\\x08\");\n\n term.assert_cursor_pos(1, 0, None);\n\n // TODO: when we can set the left margin, we should test that here\n\n}\n\n\n", "file_path": "term/src/test/c0.rs", "rank": 17, "score": 176791.41971122747 }, { "content": "#[test]\n\nfn test_lf() {\n\n let mut term = TestTerm::new(3, 10, 0);\n\n term.print(\"hello\\n\");\n\n term.assert_cursor_pos(5, 1, Some(\"LF moves vertically only\"));\n\n}\n\n\n", "file_path": "term/src/test/c0.rs", "rank": 18, "score": 176791.4197112275 }, { "content": "#[test]\n\nfn test_nel() {\n\n let mut term = TestTerm::new(4, 4, 0);\n\n term.print(\"a\\r\\nb\\x1bE\");\n\n term.assert_cursor_pos(0, 2, None);\n\n term.print(\"\\x1bE\");\n\n term.assert_cursor_pos(0, 3, None);\n\n term.print(\"\\x1bE\");\n\n term.assert_cursor_pos(0, 3, None);\n\n assert_visible_contents(&term, file!(), line!(), &[\"b \", \" \", \" \", \" \"]);\n\n}\n\n\n", "file_path": "term/src/test/c1.rs", "rank": 19, "score": 176791.4197112275 }, { "content": "#[test]\n\nfn test_ri() {\n\n let mut term = TestTerm::new(4, 2, 0);\n\n term.print(\"a\\r\\nb\\r\\nc\\r\\nd.\");\n\n assert_visible_contents(&term, file!(), line!(), &[\"a \", \"b \", \"c \", \"d.\"]);\n\n term.assert_cursor_pos(1, 3, None);\n\n term.print(\"\\x1bM\");\n\n term.assert_cursor_pos(1, 2, None);\n\n term.print(\"\\x1bM\");\n\n term.assert_cursor_pos(1, 1, None);\n\n term.print(\"\\x1bM\");\n\n term.assert_cursor_pos(1, 0, None);\n\n term.print(\"\\x1bM\");\n\n term.assert_cursor_pos(1, 0, None);\n\n assert_visible_contents(&term, file!(), line!(), &[\" \", \"a \", \"b \", \"c \"]);\n\n}\n", "file_path": "term/src/test/c1.rs", "rank": 20, "score": 176791.4197112275 }, { "content": "#[test]\n\nfn test_hyperlinks() {\n\n let mut term = TestTerm::new(3, 5, 0);\n\n let link = Arc::new(Hyperlink::new(\"http://example.com\"));\n\n term.hyperlink(&link);\n\n term.print(\"hello\");\n\n term.hyperlink_off();\n\n\n\n let mut linked = CellAttributes::default();\n\n linked.hyperlink = Some(Arc::clone(&link));\n\n\n\n assert_lines_equal(\n\n file!(),\n\n line!(),\n\n &term.screen().visible_lines(),\n\n &[\n\n Line::from_text_with_wrapped_last_col(\"hello\", &linked),\n\n Line::from_text(\" \", &CellAttributes::default()),\n\n Line::from_text(\" \", &CellAttributes::default()),\n\n ],\n\n Compare::TEXT | Compare::ATTRS,\n", "file_path": "term/src/test/mod.rs", "rank": 22, "score": 176791.41971122747 }, { "content": "#[test]\n\nfn test_cr() {\n\n let mut term = TestTerm::new(3, 10, 0);\n\n term.print(\"hello\\r\");\n\n term.assert_cursor_pos(0, 0, Some(\"CR moves to left margin on current line\"));\n\n // TODO: when we can set the left margin, we should test that here\n\n}\n\n\n", "file_path": "term/src/test/c0.rs", "rank": 23, "score": 176791.41971122747 }, { "content": "#[test]\n\nfn test_scrollup() {\n\n let mut term = TestTerm::new(2, 1, 4);\n\n term.print(\"1\\n\");\n\n assert_all_contents(&term, file!(), line!(), &[\"1\", \" \"]);\n\n assert_eq!(term.screen().visible_row_to_stable_row(0), 0);\n\n\n\n term.print(\"2\\n\");\n\n assert_all_contents(&term, file!(), line!(), &[\"1\", \"2\", \" \"]);\n\n assert_eq!(term.screen().visible_row_to_stable_row(0), 1);\n\n\n\n term.print(\"3\\n\");\n\n assert_all_contents(&term, file!(), line!(), &[\"1\", \"2\", \"3\", \" \"]);\n\n assert_eq!(term.screen().visible_row_to_stable_row(0), 2);\n\n\n\n term.print(\"4\\n\");\n\n assert_all_contents(&term, file!(), line!(), &[\"1\", \"2\", \"3\", \"4\", \" \"]);\n\n assert_eq!(term.screen().visible_row_to_stable_row(0), 3);\n\n\n\n term.print(\"5\\n\");\n\n assert_all_contents(&term, file!(), line!(), &[\"1\", \"2\", \"3\", \"4\", \"5\", \" \"]);\n", "file_path": "term/src/test/mod.rs", "rank": 26, "score": 176791.4197112275 }, { "content": "#[test]\n\nfn test_emoji_with_modifier() {\n\n let waving_hand = \"\\u{1f44b}\";\n\n let waving_hand_dark_tone = \"\\u{1f44b}\\u{1f3ff}\";\n\n\n\n let mut term = TestTerm::new(3, 5, 0);\n\n term.print(waving_hand);\n\n term.print(\"\\r\\n\");\n\n term.print(waving_hand_dark_tone);\n\n\n\n assert_all_contents(\n\n &term,\n\n file!(),\n\n line!(),\n\n &[\n\n &format!(\"{} \", waving_hand),\n\n &format!(\"{} \", waving_hand_dark_tone),\n\n \" \",\n\n ],\n\n );\n\n}\n\n\n", "file_path": "term/src/test/mod.rs", "rank": 28, "score": 172276.3707026871 }, { "content": "#[test]\n\nfn test_scroll_margins() {\n\n let mut term = TestTerm::new(3, 1, 10);\n\n term.print(\"1\\n2\\n3\\n4\\n\");\n\n assert_all_contents(&term, file!(), line!(), &[\"1\", \"2\", \"3\", \"4\", \" \"]);\n\n\n\n let margins = CSI::Cursor(termwiz::escape::csi::Cursor::SetTopAndBottomMargins {\n\n top: OneBased::new(1),\n\n bottom: OneBased::new(2),\n\n });\n\n term.print(format!(\"{}\", margins));\n\n\n\n term.print(\"z\\n\");\n\n assert_all_contents(&term, file!(), line!(), &[\"1\", \"2\", \"3\", \"4\", \"z\"]);\n\n\n\n term.print(\"a\\n\");\n\n assert_all_contents(&term, file!(), line!(), &[\"1\", \"2\", \"3\", \"4\", \"a\"]);\n\n\n\n term.cup(0, 1);\n\n term.print(\"W\\n\");\n\n assert_all_contents(&term, file!(), line!(), &[\"1\", \"2\", \"3\", \"W\", \" \", \"a\"]);\n\n}\n\n\n", "file_path": "term/src/test/mod.rs", "rank": 29, "score": 172276.37070268707 }, { "content": "#[test]\n\nfn test_resize_wrap() {\n\n const LINES: usize = 8;\n\n let mut term = TestTerm::new(LINES, 4, 0);\n\n term.print(\"111\\r\\n2222aa\\r\\n333\\r\\n\");\n\n assert_visible_contents(\n\n &term,\n\n file!(),\n\n line!(),\n\n &[\n\n \"111 \", \"2222\", \"aa \", \"333 \", \" \", \" \", \" \", \" \",\n\n ],\n\n );\n\n term.resize(LINES, 5, 0, 0);\n\n assert_visible_contents(\n\n &term,\n\n file!(),\n\n line!(),\n\n &[\n\n \"111 \", \"2222a\", \"a\", \"333 \", \" \", \" \", \" \", \" \",\n\n ],\n", "file_path": "term/src/test/mod.rs", "rank": 30, "score": 172276.37070268707 }, { "content": "fn write_pipe_with_timeout(mut file: FileDescriptor, data: &[u8]) -> anyhow::Result<()> {\n\n file.set_non_blocking(true)?;\n\n let mut pfd = libc::pollfd {\n\n fd: file.as_raw_fd(),\n\n events: libc::POLLOUT,\n\n revents: 0,\n\n };\n\n\n\n let mut buf = data;\n\n\n\n while !buf.is_empty() {\n\n if unsafe { libc::poll(&mut pfd, 1, 3000) == 1 } {\n\n match file.write(buf) {\n\n Ok(size) if size == 0 => {\n\n bail!(\"zero byte write\");\n\n }\n\n Ok(size) => {\n\n buf = &buf[size..];\n\n }\n\n Err(e) => bail!(\"error writing to pipe: {}\", e),\n\n }\n\n } else {\n\n bail!(\"timed out writing to pipe\");\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "window/src/os/wayland/window.rs", "rank": 31, "score": 170271.42258807234 }, { "content": "#[test]\n\nfn basic_output() {\n\n let mut term = TestTerm::new(5, 10, 0);\n\n\n\n term.cup(1, 1);\n\n\n\n term.set_auto_wrap(false);\n\n term.print(\"hello, world!\");\n\n assert_visible_contents(\n\n &term,\n\n file!(),\n\n line!(),\n\n &[\n\n \" \",\n\n \" hello, w!\",\n\n \" \",\n\n \" \",\n\n \" \",\n\n ],\n\n );\n\n\n", "file_path": "term/src/test/mod.rs", "rank": 32, "score": 163694.8357533798 }, { "content": "#[test]\n\nfn drag_selection() {\n\n let mut term = TestTerm::new(3, 12, 0);\n\n term.print(\"hello world\\r\\n\");\n\n assert_visible_contents(&term, &[\"hello world \", \" \", \" \"]);\n\n\n\n term.drag_select(1, 0, 4, 0);\n\n assert_eq!(term.get_clipboard().unwrap(), \"ello\");\n\n\n\n // Now check that we respect double-width boundaries reasonably sanely;\n\n // here we're dragging from the middle of the skull emoji\n\n term.print(\"\\u{1F480}skull\\r\\n\");\n\n assert_visible_contents(\n\n &term,\n\n &[\"hello world \", \"\\u{1F480}skull \", \" \"],\n\n );\n\n term.drag_select(1, 1, 5, 1);\n\n assert_eq!(term.get_clipboard().unwrap(), \"skul\");\n\n\n\n // Let's include the start of it this time\n\n term.drag_select(0, 1, 5, 1);\n", "file_path": "term/src/test/selection.rs", "rank": 33, "score": 163694.8357533798 }, { "content": "#[test]\n\nfn selection_in_scrollback() {\n\n let mut term = TestTerm::new(2, 2, 4);\n\n term.print(\"1 2 3 4\");\n\n assert_all_contents(&term, &[\"1 \", \"2 \", \"3 \", \"4 \"]);\n\n\n\n // Scroll back one line\n\n term.scroll_viewport(-1);\n\n term.assert_viewport_contents(&[\"2 \", \"3 \"]);\n\n\n\n term.click_n(0, 0, MouseButton::Left, 2);\n\n assert_eq!(term.get_clipboard().unwrap(), \"2\");\n\n\n\n // Clear the click streak\n\n term.click_n(0, 1, MouseButton::Right, 1);\n\n\n\n term.click_n(0, 1, MouseButton::Left, 3);\n\n assert_eq!(term.get_clipboard().unwrap(), \"3\");\n\n\n\n term.drag_select(0, 0, 0, 1);\n\n assert_eq!(term.get_clipboard().unwrap(), \"2\\n3\");\n\n}\n", "file_path": "term/src/test/selection.rs", "rank": 34, "score": 163694.8357533798 }, { "content": "#[test]\n\nfn double_click_selection() {\n\n let mut term = TestTerm::new(3, 10, 0);\n\n term.print(\"hello world\");\n\n\n\n term.click_n(1, 0, MouseButton::Left, 2);\n\n\n\n assert_eq!(term.get_clipboard().unwrap(), \"hello\");\n\n}\n\n\n\n/// Test triple click to select a line\n", "file_path": "term/src/test/selection.rs", "rank": 35, "score": 158958.6341923695 }, { "content": "#[test]\n\nfn cursor_movement_damage() {\n\n let mut term = TestTerm::new(2, 3, 0);\n\n\n\n term.print(\"fooo.\");\n\n assert_visible_contents(&term, file!(), line!(), &[\"foo\", \"o. \"]);\n\n term.assert_cursor_pos(2, 1, None);\n\n term.assert_dirty_lines(&[0, 1], None);\n\n\n\n term.cup(0, 1);\n\n term.clean_dirty_lines();\n\n term.print(\"\\x08\");\n\n term.assert_cursor_pos(0, 1, Some(\"BS doesn't change the line\"));\n\n term.assert_dirty_lines(&[1], None);\n\n term.clean_dirty_lines();\n\n\n\n term.cup(0, 0);\n\n term.assert_dirty_lines(&[0, 1], Some(\"cursor movement dirties old and new lines\"));\n\n}\n\n\n\n/// Replicates a bug I initially found via:\n\n/// $ vim\n\n/// :help\n\n/// PageDown\n", "file_path": "term/src/test/mod.rs", "rank": 36, "score": 158958.6341923695 }, { "content": "#[test]\n\nfn triple_click_selection() {\n\n let mut term = TestTerm::new(3, 10, 0);\n\n term.print(\"hello world\");\n\n assert_visible_contents(&term, &[\"hello worl\", \"d \", \" \"]);\n\n term.click_n(1, 0, MouseButton::Left, 3);\n\n\n\n assert_eq!(term.get_clipboard().unwrap(), \"hello worl\");\n\n}\n\n\n\n/// Test double click on wrapped line selects across the line boundary\n", "file_path": "term/src/test/selection.rs", "rank": 37, "score": 158958.6341923695 }, { "content": "#[test]\n\nfn double_click_wrapped_selection() {\n\n let mut term = TestTerm::new(3, 10, 0);\n\n term.print(\"hello world\");\n\n assert_visible_contents(&term, &[\"hello worl\", \"d \", \" \"]);\n\n term.click_n(7, 0, MouseButton::Left, 2);\n\n\n\n assert_eq!(term.get_clipboard().unwrap(), \"world\");\n\n}\n\n\n\n/// Make sure that we adjust for the viewport offset when scrolling\n", "file_path": "term/src/test/selection.rs", "rank": 38, "score": 154558.4110320304 }, { "content": "fn csi_u_encode(buf: &mut String, c: char, mods: KeyModifiers) -> Result<(), Error> {\n\n if ENABLE_CSI_U {\n\n write!(buf, \"\\x1b[{};{}u\", c as u32, 1 + encode_modifiers(mods))?;\n\n } else {\n\n // FIXME: this ignores the modifiers completely. That's sort of\n\n // OK, but eg: CTRL-SPACE should really send a NUL byte in that\n\n // case, so this isn't great\n\n write!(buf, \"{}\", c)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "term/src/terminalstate.rs", "rank": 39, "score": 135389.85977449562 }, { "content": "pub fn poll_for_read(pfd: &mut [pollfd]) {\n\n if let Err(e) = poll(pfd, None) {\n\n log::error!(\"poll failed for {}\", e);\n\n }\n\n}\n", "file_path": "src/server/pollable.rs", "rank": 40, "score": 127926.13658303104 }, { "content": "fn open_log(path: PathBuf) -> anyhow::Result<File> {\n\n create_user_owned_dirs(\n\n path.parent()\n\n .ok_or_else(|| anyhow!(\"path {} has no parent dir!?\", path.display()))?,\n\n )?;\n\n let mut options = OpenOptions::new();\n\n options.write(true).create(true).append(true);\n\n options\n\n .open(&path)\n\n .map_err(|e| anyhow!(\"failed to open log stream: {}: {}\", path.display(), e))\n\n}\n\n\n\nimpl DaemonOptions {\n\n #[cfg_attr(windows, allow(dead_code))]\n\n pub fn pid_file(&self) -> PathBuf {\n\n self.pid_file\n\n .as_ref()\n\n .cloned()\n\n .unwrap_or_else(|| RUNTIME_DIR.join(\"pid\"))\n\n }\n", "file_path": "src/config/daemon.rs", "rank": 41, "score": 119403.76991182395 }, { "content": "fn consume_stream<F: Read, T: Write>(mut from_stream: F, mut to_stream: T) -> anyhow::Result<()> {\n\n let mut buf = [0u8; 8192];\n\n\n\n loop {\n\n let size = from_stream.read(&mut buf)?;\n\n if size == 0 {\n\n break;\n\n }\n\n to_stream.write_all(&buf[0..size])?;\n\n to_stream.flush()?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 42, "score": 117251.60003303044 }, { "content": "fn default_term() -> String {\n\n \"xterm-256color\".into()\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 43, "score": 117053.10838549751 }, { "content": "#[allow(dead_code)]\n\npub fn use_default_configuration() {\n\n CONFIG.use_defaults();\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 44, "score": 114267.79590253005 }, { "content": "fn default_scrollback_lines() -> usize {\n\n 3500\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 45, "score": 114204.49200503716 }, { "content": "struct TestTerm {\n\n term: Terminal,\n\n host: TestHost,\n\n}\n\n\n", "file_path": "term/src/test/mod.rs", "rank": 46, "score": 112134.43516741035 }, { "content": "fn main() -> anyhow::Result<()> {\n\n println!(\"Type `exit` to quit this example, or start a word with `h` and press Tab.\");\n\n let mut terminal = line_editor_terminal()?;\n\n let mut editor = LineEditor::new(&mut terminal);\n\n\n\n let mut host = Host::default();\n\n loop {\n\n if let Some(line) = editor.read_line(&mut host)? {\n\n println!(\"read line: {:?}\", line);\n\n if line == \"exit\" {\n\n break;\n\n }\n\n\n\n host.history().add(&line);\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "termwiz/examples/line_editor.rs", "rank": 47, "score": 111350.13576929475 }, { "content": "#[derive(Debug)]\n\nstruct TestTermConfig {\n\n scrollback: usize,\n\n}\n\nimpl TerminalConfiguration for TestTermConfig {\n\n fn scrollback_size(&self) -> usize {\n\n self.scrollback\n\n }\n\n\n\n fn color_palette(&self) -> ColorPalette {\n\n ColorPalette::default()\n\n }\n\n}\n\n\n\nimpl TestTerm {\n\n fn new(height: usize, width: usize, scrollback: usize) -> Self {\n\n let mut term = Terminal::new(\n\n height,\n\n width,\n\n height * 16,\n\n width * 8,\n", "file_path": "term/src/test/mod.rs", "rank": 48, "score": 109411.96429988781 }, { "content": "/// This is a conceptually simple function that computes the bounds\n\n/// of the whitespace delimited word at the specified cursor position\n\n/// in the supplied line string.\n\n/// It returns the range and the corresponding slice out of the line.\n\n/// This function is sufficient for example purposes; in a real application\n\n/// the equivalent function would need to be aware of quoting and other\n\n/// application specific context.\n\nfn word_at_cursor(line: &str, cursor_position: usize) -> Option<(std::ops::Range<usize>, &str)> {\n\n let char_indices: Vec<(usize, char)> = line.char_indices().collect();\n\n if char_indices.is_empty() {\n\n return None;\n\n }\n\n let char_position = char_indices\n\n .iter()\n\n .position(|(idx, _)| *idx == cursor_position)\n\n .unwrap_or(char_indices.len());\n\n\n\n // Look back until we find whitespace\n\n let mut start_position = char_position;\n\n while start_position > 0\n\n && start_position <= char_indices.len()\n\n && !char_indices[start_position - 1].1.is_whitespace()\n\n {\n\n start_position -= 1;\n\n }\n\n\n\n // Look forwards until we find whitespace\n", "file_path": "termwiz/examples/line_editor.rs", "rank": 49, "score": 109104.05821308118 }, { "content": "/// Create a pair of connected sockets\n\n///\n\n/// This implementation creates a pair of SOCK_STREAM sockets.\n\npub fn socketpair() -> anyhow::Result<(FileDescriptor, FileDescriptor)> {\n\n socketpair_impl()\n\n}\n", "file_path": "filedescriptor/src/lib.rs", "rank": 50, "score": 108473.65640187665 }, { "content": "#[cfg(not(target_os = \"linux\"))]\n\n#[doc(hidden)]\n\npub fn socketpair_impl() -> anyhow::Result<(FileDescriptor, FileDescriptor)> {\n\n let mut fds = [-1i32; 2];\n\n let res = unsafe { libc::socketpair(libc::PF_LOCAL, libc::SOCK_STREAM, 0, fds.as_mut_ptr()) };\n\n if res == -1 {\n\n bail!(\n\n \"failed to create a socketpair: {:?}\",\n\n std::io::Error::last_os_error()\n\n )\n\n } else {\n\n let mut read = FileDescriptor {\n\n handle: OwnedHandle {\n\n handle: fds[0],\n\n handle_type: (),\n\n },\n\n };\n\n let mut write = FileDescriptor {\n\n handle: OwnedHandle {\n\n handle: fds[1],\n\n handle_type: (),\n\n },\n\n };\n\n read.handle.cloexec()?;\n\n write.handle.cloexec()?;\n\n Ok((read, write))\n\n }\n\n}\n\n\n\npub use libc::{pollfd, POLLERR, POLLHUP, POLLIN, POLLOUT};\n\nuse std::time::Duration;\n\n\n", "file_path": "filedescriptor/src/unix.rs", "rank": 51, "score": 106272.86670100283 }, { "content": "#[doc(hidden)]\n\npub fn socketpair_impl() -> anyhow::Result<(FileDescriptor, FileDescriptor)> {\n\n init_winsock();\n\n\n\n let s = socket(AF_INET, SOCK_STREAM, 0)?;\n\n\n\n let mut in_addr: SOCKADDR_IN = unsafe { std::mem::zeroed() };\n\n in_addr.sin_family = AF_INET as _;\n\n unsafe {\n\n *in_addr.sin_addr.S_un.S_addr_mut() = htonl(INADDR_LOOPBACK);\n\n }\n\n\n\n unsafe {\n\n if bind(\n\n s.as_raw_handle() as _,\n\n std::mem::transmute(&in_addr),\n\n std::mem::size_of_val(&in_addr) as _,\n\n ) != 0\n\n {\n\n bail!(\"bind failed: {}\", IoError::last_os_error());\n\n }\n", "file_path": "filedescriptor/src/windows.rs", "rank": 52, "score": 106272.86670100283 }, { "content": "fn default_ratelimit_line_prefetches_per_second() -> u32 {\n\n 10\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 53, "score": 105977.75519086001 }, { "content": "/// characters that when masked for CTRL could be an ascii control character\n\n/// or could be a key that a user legitimately wants to process in their\n\n/// terminal application\n\nfn is_ambiguous_ascii_ctrl(c: char) -> bool {\n\n match c {\n\n 'i' | 'I' | 'm' | 'M' | '[' | '{' | '@' => true,\n\n _ => false,\n\n }\n\n}\n\n\n\nimpl TerminalState {\n\n pub fn new(\n\n physical_rows: usize,\n\n physical_cols: usize,\n\n pixel_width: usize,\n\n pixel_height: usize,\n\n config: Arc<dyn TerminalConfiguration>,\n\n ) -> TerminalState {\n\n let screen = ScreenOrAlt::new(physical_rows, physical_cols, &config);\n\n\n\n TerminalState {\n\n config,\n\n screen,\n", "file_path": "term/src/terminalstate.rs", "rank": 54, "score": 105744.37411514229 }, { "content": "/// Adjust the color to make it appear disabled.\n\n/// This is not defined on RgbColor itself in order\n\n/// to avoid termwiz requiring a dep on the palette crate.\n\nfn grey_out(color: RgbColor) -> RgbColor {\n\n use palette::{Blend, Lch, Saturate, Srgba};\n\n let color = Srgba::new(color.red, color.green, color.blue, 0xff);\n\n let color: Srgba = color.into_format();\n\n let color = color.into_linear();\n\n\n\n let desaturated = Lch::from(color).desaturate(0.2);\n\n\n\n let tint = Srgba::new(0.2, 0.2, 0.2, 0.6).into_linear();\n\n let result = Srgba::from_linear(tint.over(desaturated.into()));\n\n let result = Srgba::<u8>::from_format(result);\n\n\n\n RgbColor::new(result.red, result.green, result.blue)\n\n}\n\n\n\nimpl fmt::Debug for Palette256 {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n // If we wanted to dump all of the entries, we'd use this:\n\n // self.0[..].fmt(fmt)\n\n // However, we typically don't care about those and we're interested\n", "file_path": "term/src/color.rs", "rank": 55, "score": 103018.73379774296 }, { "content": "fn encode_modifiers(mods: KeyModifiers) -> u8 {\n\n let mut number = 0;\n\n if mods.contains(KeyModifiers::SHIFT) {\n\n number |= 1;\n\n }\n\n if mods.contains(KeyModifiers::ALT) {\n\n number |= 2;\n\n }\n\n if mods.contains(KeyModifiers::CTRL) {\n\n number |= 4;\n\n }\n\n number\n\n}\n\n\n\n// FIXME: provide an option to enable this, because it is super annoying\n\n// in vim when accidentally pressing shift-space and it emits a sequence\n\n// that undoes some number of commands\n\nconst ENABLE_CSI_U: bool = false;\n\n\n", "file_path": "term/src/terminalstate.rs", "rank": 56, "score": 103018.73379774296 }, { "content": "fn emit_padding_for_terminal(len: usize, spacer: &CellAttributes, output: &mut Vec<Change>) {\n\n if len == 0 {\n\n return;\n\n }\n\n output.push(Change::AllAttributes(spacer.clone()));\n\n let mut s = String::new();\n\n for _ in 0..len {\n\n s.push(' ');\n\n }\n\n output.push(s.into());\n\n}\n\n\n", "file_path": "tabout/src/lib.rs", "rank": 57, "score": 102829.11766008072 }, { "content": "pub fn use_ime(enable: bool) {\n\n USE_IME.store(enable, Ordering::Relaxed);\n\n}\n\n\n", "file_path": "window/src/os/macos/window.rs", "rank": 58, "score": 100801.66324230288 }, { "content": "fn linear_f32_to_srgb8_using_table(f: f32) -> u8 {\n\n let minval = f32::from_bits(MINVAL);\n\n let almost_one = f32::from_bits(ALMOST_ONE);\n\n\n\n let f = if f < minval {\n\n minval\n\n } else if f > almost_one {\n\n almost_one\n\n } else {\n\n f\n\n };\n\n\n\n let f_bits = f.to_bits();\n\n let tab = unsafe { *F32_TO_U8_TABLE.get_unchecked(((f_bits - MINVAL) >> 20) as usize) };\n\n let bias = (tab >> 16) << 9;\n\n let scale = tab & 0xffff;\n\n\n\n let t = (f_bits >> 12) & 0xff;\n\n\n\n ((bias + scale * t) >> 16) as u8\n\n}\n\n\n", "file_path": "window/src/color.rs", "rank": 59, "score": 100801.66324230288 }, { "content": "/// Examines a set of FileDescriptors to see if some of them are ready for I/O,\n\n/// or if certain events have occurred on them.\n\n///\n\n/// This uses the system native readiness checking mechanism, which on Windows\n\n/// means that it does NOT use IOCP and that this only works with sockets on\n\n/// Windows. If you need IOCP then the `mio` crate is recommended for a much\n\n/// more scalable solution.\n\n///\n\n/// On macOS, the `poll(2)` implementation has problems when used with eg: pty\n\n/// descriptors, so this implementation of poll uses the `select(2)` interface\n\n/// under the covers. That places a limit on the maximum file descriptor value\n\n/// that can be passed to poll. If a file descriptor is out of range then an\n\n/// error will returned. This limitation could potentially be lifted in the\n\n/// future.\n\n///\n\n/// On Windows, `WSAPoll` is used to implement readiness checking, which has\n\n/// the consequence that it can only be used with sockets.\n\n///\n\n/// If `duration` is `None`, then `poll` will block until any of the requested\n\n/// events are ready. Otherwise, `duration` specifies how long to wait for\n\n/// readiness before giving up.\n\n///\n\n/// The return value is the number of entries that were satisfied; `0` means\n\n/// that none were ready after waiting for the specified duration.\n\n///\n\n/// The `pfd` array is mutated and the `revents` field is updated to indicate\n\n/// which of the events were received.\n\npub fn poll(pfd: &mut [pollfd], duration: Option<Duration>) -> anyhow::Result<usize> {\n\n poll_impl(pfd, duration)\n\n}\n\n\n", "file_path": "filedescriptor/src/lib.rs", "rank": 60, "score": 100629.55070995117 }, { "content": "/// Read a single leb128 encoded value from the stream\n\nfn read_u64<R: std::io::Read>(mut r: R) -> anyhow::Result<u64> {\n\n leb128::read::unsigned(&mut r)\n\n .map_err(|err| match err {\n\n leb128::read::Error::IoError(ioerr) => anyhow::Error::new(ioerr),\n\n err => anyhow::Error::new(err),\n\n })\n\n .context(\"reading leb128\")\n\n}\n\n\n", "file_path": "src/server/codec.rs", "rank": 61, "score": 100615.83608699101 }, { "content": "/// Decode a frame.\n\n/// See encode_raw() for the frame format.\n\nfn decode_raw<R: std::io::Read>(mut r: R) -> anyhow::Result<Decoded> {\n\n let len = read_u64(r.by_ref()).context(\"reading PDU length\")?;\n\n let (len, is_compressed) = if (len & COMPRESSED_MASK) != 0 {\n\n (len & !COMPRESSED_MASK, true)\n\n } else {\n\n (len, false)\n\n };\n\n let serial = read_u64(r.by_ref()).context(\"reading PDU serial\")?;\n\n let ident = read_u64(r.by_ref()).context(\"reading PDU ident\")?;\n\n let data_len = len as usize - (encoded_length(ident) + encoded_length(serial));\n\n\n\n if is_compressed {\n\n metrics::value!(\"pdu.decode.compressed.size\", data_len as u64);\n\n } else {\n\n metrics::value!(\"pdu.decode.size\", data_len as u64);\n\n }\n\n\n\n let mut data = vec![0u8; data_len];\n\n r.read_exact(&mut data).with_context(|| {\n\n format!(\n", "file_path": "src/server/codec.rs", "rank": 62, "score": 100615.83608699101 }, { "content": "struct TestHost {}\n\n\n\nimpl TestHost {\n\n fn new() -> Self {\n\n Self {}\n\n }\n\n}\n\n\n\nimpl std::io::Write for TestHost {\n\n fn write(&mut self, _buf: &[u8]) -> Result<usize, std::io::Error> {\n\n panic!(\"no writer support in TestHost\");\n\n }\n\n fn flush(&mut self) -> Result<(), std::io::Error> {\n\n panic!(\"no writer support in TestHost\");\n\n }\n\n}\n\n\n", "file_path": "term/src/test/mod.rs", "rank": 63, "score": 99120.10965244782 }, { "content": "#[doc(hidden)]\n\npub fn poll_impl(pfd: &mut [pollfd], duration: Option<Duration>) -> anyhow::Result<usize> {\n\n let poll_result = unsafe {\n\n WSAPoll(\n\n pfd.as_mut_ptr(),\n\n pfd.len() as _,\n\n duration\n\n .map(|wait| wait.as_millis() as libc::c_int)\n\n .unwrap_or(-1),\n\n )\n\n };\n\n if poll_result < 0 {\n\n Err(std::io::Error::last_os_error().into())\n\n } else {\n\n Ok(poll_result as usize)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use std::io::{Read, Write};\n", "file_path": "filedescriptor/src/windows.rs", "rank": 64, "score": 98743.77729103202 }, { "content": "#[cfg(not(target_os = \"macos\"))]\n\n#[doc(hidden)]\n\npub fn poll_impl(pfd: &mut [pollfd], duration: Option<Duration>) -> anyhow::Result<usize> {\n\n let poll_result = unsafe {\n\n libc::poll(\n\n pfd.as_mut_ptr(),\n\n pfd.len() as _,\n\n duration\n\n .map(|wait| wait.as_millis() as libc::c_int)\n\n .unwrap_or(-1),\n\n )\n\n };\n\n if poll_result < 0 {\n\n Err(std::io::Error::last_os_error().into())\n\n } else {\n\n Ok(poll_result as usize)\n\n }\n\n}\n\n\n\n// macOS has a broken poll(2) implementation, so we introduce a layer to deal with that here\n\n#[cfg(target_os = \"macos\")]\n\nmod macos {\n", "file_path": "filedescriptor/src/unix.rs", "rank": 65, "score": 98743.77729103202 }, { "content": "fn read_from_tab_pty(tab_id: TabId, mut reader: Box<dyn std::io::Read>) {\n\n const BUFSIZE: usize = 32 * 1024;\n\n let mut buf = [0; BUFSIZE];\n\n\n\n let mut lim = RateLimiter::new(|config| config.ratelimit_output_bytes_per_second);\n\n\n\n loop {\n\n match reader.read(&mut buf) {\n\n Ok(size) if size == 0 => {\n\n error!(\"read_pty EOF: tab_id {}\", tab_id);\n\n break;\n\n }\n\n Err(err) => {\n\n error!(\"read_pty failed: tab {} {:?}\", tab_id, err);\n\n break;\n\n }\n\n Ok(size) => {\n\n let buf = &buf[..size];\n\n let mut pos = 0;\n\n\n", "file_path": "src/mux/mod.rs", "rank": 66, "score": 97230.86505699012 }, { "content": "pub fn allocate(width: usize, height: usize) -> (TermWizTerminal, TermWizTerminalTab) {\n\n let (render_tx, render_rx) = channel();\n\n let (input_tx, input_rx) = channel();\n\n\n\n let tw_term = TermWizTerminal {\n\n render_tx,\n\n input_rx,\n\n screen_size: ScreenSize {\n\n cols: width,\n\n rows: height,\n\n xpixel: 0,\n\n ypixel: 0,\n\n },\n\n };\n\n\n\n let domain_id = 0;\n\n let tab = TermWizTerminalTab::new(domain_id, width, height, input_tx, render_rx);\n\n (tw_term, tab)\n\n}\n\n\n", "file_path": "src/termwiztermtab.rs", "rank": 67, "score": 97128.30741846663 }, { "content": "\n\n assert_visible_contents(\n\n &term,\n\n file!(),\n\n line!(),\n\n &[\"111\", \"aaa\", \" \", \" \", \"bbb\"],\n\n );\n\n term.assert_dirty_lines(&[1, 2, 3], None);\n\n\n\n // expand the scroll region to fill the screen\n\n term.set_scroll_region(0, 4);\n\n term.clean_dirty_lines();\n\n term.delete_lines(1);\n\n\n\n assert_visible_contents(\n\n &term,\n\n file!(),\n\n line!(),\n\n &[\"111\", \" \", \" \", \"bbb\", \" \"],\n\n );\n\n term.assert_dirty_lines(&[1, 2, 3, 4], None);\n\n}\n\n\n\n/// Test the behavior of wrapped lines when we resize the terminal\n\n/// wider and then narrower.\n", "file_path": "term/src/test/mod.rs", "rank": 68, "score": 93156.81608048585 }, { "content": " &[\"111\", \"444\", \"555\", \" \", \" \"],\n\n );\n\n term.assert_dirty_lines(&[1, 2, 3, 4], None);\n\n term.clean_dirty_lines();\n\n\n\n term.cup(0, 3);\n\n term.print(\"aaa\\r\\nbbb\");\n\n term.cup(0, 1);\n\n term.clean_dirty_lines();\n\n assert_visible_contents(\n\n &term,\n\n file!(),\n\n line!(),\n\n &[\"111\", \"444\", \"555\", \"aaa\", \"bbb\"],\n\n );\n\n\n\n // test with a scroll region smaller than the screen\n\n term.set_scroll_region(1, 3);\n\n print_all_lines(&term);\n\n term.delete_lines(2);\n", "file_path": "term/src/test/mod.rs", "rank": 69, "score": 93156.08543655663 }, { "content": " reason\n\n );\n\n }\n\n}\n\n\n\nimpl Deref for TestTerm {\n\n type Target = Terminal;\n\n\n\n fn deref(&self) -> &Terminal {\n\n &self.term\n\n }\n\n}\n\n\n\nimpl DerefMut for TestTerm {\n\n fn deref_mut(&mut self) -> &mut Terminal {\n\n &mut self.term\n\n }\n\n}\n\n\n", "file_path": "term/src/test/mod.rs", "rank": 70, "score": 93153.92944312253 }, { "content": "use super::*;\n\nuse pretty_assertions::assert_eq;\n\n\n\n/// Test basic dragging to select some text\n\n#[test]\n", "file_path": "term/src/test/selection.rs", "rank": 71, "score": 93152.41188910279 }, { "content": "//! Testing C0 control characters\n\n\n\nuse super::*;\n\n\n\n#[test]\n", "file_path": "term/src/test/c0.rs", "rank": 72, "score": 93152.40793970044 }, { "content": "//! Testing C1 control sequences\n\n\n\nuse super::*;\n\n\n\n#[test]\n", "file_path": "term/src/test/c1.rs", "rank": 73, "score": 93152.40793970044 }, { "content": " Arc::new(TestTermConfig { scrollback }),\n\n );\n\n let clip: Arc<dyn Clipboard> = Arc::new(LocalClip::new());\n\n term.set_clipboard(&clip);\n\n\n\n let mut term = Self {\n\n term,\n\n host: TestHost::new(),\n\n };\n\n\n\n term.set_auto_wrap(true);\n\n\n\n term\n\n }\n\n\n\n fn print<B: AsRef<[u8]>>(&mut self, bytes: B) {\n\n self.term.advance_bytes(bytes, &mut self.host);\n\n }\n\n\n\n #[allow(dead_code)]\n", "file_path": "term/src/test/mod.rs", "rank": 75, "score": 93151.84668969616 }, { "content": "//! Various tests of the terminal model and escape sequence\n\n//! processing routines.\n\n\n\nuse super::*;\n\nmod c0;\n\nuse bitflags::bitflags;\n\nmod c1;\n\nmod csi;\n\n// mod selection; FIXME: port to render layer\n\nuse crate::color::ColorPalette;\n\nuse pretty_assertions::assert_eq;\n\nuse std::cell::RefCell;\n\nuse std::sync::Arc;\n\nuse termwiz::escape::csi::{Edit, EraseInDisplay, EraseInLine};\n\nuse termwiz::escape::{OneBased, OperatingSystemCommand, CSI};\n\nuse termwiz::surface::CursorShape;\n\n\n", "file_path": "term/src/test/mod.rs", "rank": 76, "score": 93150.89985944514 }, { "content": " let otherlink = Arc::new(Hyperlink::new_with_id(\"http://example.com/other\", \"w00t\"));\n\n\n\n // Switching link and turning it off\n\n term.hyperlink(&otherlink);\n\n term.print(\"wo\");\n\n // soft reset also disables hyperlink attribute\n\n term.soft_reset();\n\n term.print(\"00t\");\n\n\n\n let mut partial_line =\n\n Line::from_text_with_wrapped_last_col(\"wo00t\", &CellAttributes::default());\n\n partial_line.set_cell(\n\n 0,\n\n Cell::new(\n\n 'w',\n\n CellAttributes::default()\n\n .set_hyperlink(Some(Arc::clone(&otherlink)))\n\n .clone(),\n\n ),\n\n );\n", "file_path": "term/src/test/mod.rs", "rank": 77, "score": 93149.34020892378 }, { "content": " );\n\n term.resize(LINES, 6, 0, 0);\n\n assert_visible_contents(\n\n &term,\n\n file!(),\n\n line!(),\n\n &[\n\n \"111 \", \"2222aa\", \"333 \", \" \", \" \", \" \", \" \", \" \",\n\n ],\n\n );\n\n term.resize(LINES, 7, 0, 0);\n\n assert_visible_contents(\n\n &term,\n\n file!(),\n\n line!(),\n\n &[\n\n \"111 \", \"2222aa\", \"333 \", \" \", \" \", \" \", \" \", \" \",\n\n ],\n\n );\n\n term.resize(LINES, 8, 0, 0);\n", "file_path": "term/src/test/mod.rs", "rank": 78, "score": 93148.61943205952 }, { "content": " file!(),\n\n line!(),\n\n &[\n\n \" \",\n\n \" \",\n\n \" \",\n\n \" \",\n\n \" \",\n\n ],\n\n );\n\n\n\n term.cup(0, 2);\n\n term.print(\"woot\");\n\n term.cup(2, 2);\n\n term.erase_in_line(EraseInLine::EraseToEndOfLine);\n\n assert_visible_contents(\n\n &term,\n\n file!(),\n\n line!(),\n\n &[\n", "file_path": "term/src/test/mod.rs", "rank": 79, "score": 93148.61943205952 }, { "content": " assert_visible_contents(\n\n &term,\n\n file!(),\n\n line!(),\n\n &[\n\n \"111 \", \"2222aa\", \"333 \", \" \", \" \", \" \", \" \", \" \",\n\n ],\n\n );\n\n term.resize(LINES, 5, 0, 0);\n\n assert_visible_contents(\n\n &term,\n\n file!(),\n\n line!(),\n\n &[\n\n \"111 \", \"2222a\", \"a\", \"333 \", \" \", \" \", \" \", \" \",\n\n ],\n\n );\n\n term.resize(LINES, 4, 0, 0);\n\n assert_visible_contents(\n\n &term,\n\n file!(),\n\n line!(),\n\n &[\"111 \", \"2222\", \"aa\", \"333 \", \" \", \" \", \" \", \" \"],\n\n );\n\n}\n\n\n", "file_path": "term/src/test/mod.rs", "rank": 80, "score": 93148.48802199158 }, { "content": " assert_visible_contents(\n\n &term,\n\n file!(),\n\n line!(),\n\n &[\n\n \"111 \", \"2222aa\", \"333 \", \" \", \" \", \" \", \" \", \" \",\n\n ],\n\n );\n\n\n\n // Resize smaller again\n\n term.resize(LINES, 7, 0, 0);\n\n assert_visible_contents(\n\n &term,\n\n file!(),\n\n line!(),\n\n &[\n\n \"111 \", \"2222aa\", \"333 \", \" \", \" \", \" \", \" \", \" \",\n\n ],\n\n );\n\n term.resize(LINES, 6, 0, 0);\n", "file_path": "term/src/test/mod.rs", "rank": 81, "score": 93148.24973923026 }, { "content": " );\n\n\n\n term.hyperlink(&link);\n\n term.print(\"he\");\n\n // Resetting pen should not reset the link\n\n term.print(\"\\x1b[m\");\n\n term.print(\"y!!\");\n\n\n\n assert_lines_equal(\n\n file!(),\n\n line!(),\n\n &term.screen().visible_lines(),\n\n &[\n\n Line::from_text_with_wrapped_last_col(\"hello\", &linked),\n\n Line::from_text_with_wrapped_last_col(\"hey!!\", &linked),\n\n \" \".into(),\n\n ],\n\n Compare::TEXT | Compare::ATTRS,\n\n );\n\n\n", "file_path": "term/src/test/mod.rs", "rank": 82, "score": 93147.86681099296 }, { "content": " assert_eq!(term.screen().visible_row_to_stable_row(0), 4);\n\n\n\n term.print(\"6\\n\");\n\n assert_all_contents(&term, file!(), line!(), &[\"2\", \"3\", \"4\", \"5\", \"6\", \" \"]);\n\n assert_eq!(term.screen().visible_row_to_stable_row(0), 5);\n\n\n\n term.print(\"7\\n\");\n\n assert_all_contents(&term, file!(), line!(), &[\"3\", \"4\", \"5\", \"6\", \"7\", \" \"]);\n\n assert_eq!(term.screen().visible_row_to_stable_row(0), 6);\n\n\n\n term.print(\"8\\n\");\n\n assert_all_contents(&term, file!(), line!(), &[\"4\", \"5\", \"6\", \"7\", \"8\", \" \"]);\n\n assert_eq!(term.screen().visible_row_to_stable_row(0), 7);\n\n}\n\n\n", "file_path": "term/src/test/mod.rs", "rank": 83, "score": 93147.37662957575 }, { "content": " assert_eq!(term.get_clipboard().unwrap(), \"\\u{1F480}skul\");\n\n\n\n // Multi-line selection\n\n term.drag_select(1, 0, 6, 1);\n\n assert_eq!(term.get_clipboard().unwrap(), \"ello world\\n\\u{1F480}skull\");\n\n\n\n // This next one drags off the bottom; this is technically out of bounds\n\n // but we want to make sure we handle this without panicking. See the\n\n // comment in TerminalState::mouse_event for more info.\n\n term.drag_select(0, 0, 15, 3);\n\n assert_eq!(\n\n term.get_clipboard().unwrap(),\n\n \"hello world\\n\\u{1F480}skull\\n\"\n\n );\n\n\n\n term.drag_select(6, 0, 3, 1);\n\n assert_eq!(term.get_clipboard().unwrap(), \"world\\n\\u{1F480}sk\");\n\n}\n\n\n\n/// Test double click to select a word\n", "file_path": "term/src/test/selection.rs", "rank": 84, "score": 93147.28869964382 }, { "content": " \" \",\n\n \" \",\n\n \"wo \",\n\n \" \",\n\n \" \",\n\n ],\n\n );\n\n\n\n term.erase_in_line(EraseInLine::EraseToStartOfLine);\n\n assert_visible_contents(\n\n &term,\n\n file!(),\n\n line!(),\n\n &[\n\n \" \",\n\n \" \",\n\n \" \",\n\n \" \",\n\n \" \",\n\n ],\n\n );\n\n}\n\n\n\n/// Ensure that we dirty lines as the cursor is moved around, otherwise\n\n/// the renderer won't draw the cursor in the right place\n", "file_path": "term/src/test/mod.rs", "rank": 85, "score": 93147.21822048264 }, { "content": "\n\n fn cup(&mut self, col: isize, row: isize) {\n\n self.print(CSI);\n\n self.print(format!(\"{};{}H\", row + 1, col + 1));\n\n }\n\n\n\n fn hvp(&mut self, col: isize, row: isize) {\n\n self.print(CSI);\n\n self.print(format!(\"{};{}f\", row + 1, col + 1));\n\n }\n\n\n\n fn erase_in_display(&mut self, erase: EraseInDisplay) {\n\n let csi = CSI::Edit(Edit::EraseInDisplay(erase));\n\n self.print(format!(\"{}\", csi));\n\n }\n\n\n\n fn erase_in_line(&mut self, erase: EraseInLine) {\n\n let csi = CSI::Edit(Edit::EraseInLine(erase));\n\n self.print(format!(\"{}\", csi));\n\n }\n", "file_path": "term/src/test/mod.rs", "rank": 86, "score": 93145.78879602136 }, { "content": " term.set_auto_wrap(true);\n\n term.erase_in_display(EraseInDisplay::EraseToStartOfDisplay);\n\n term.cup(1, 1);\n\n term.print(\"hello, world!\");\n\n assert_visible_contents(\n\n &term,\n\n file!(),\n\n line!(),\n\n &[\n\n \" \",\n\n \" hello, wo\",\n\n \"rld! \",\n\n \" \",\n\n \" \",\n\n ],\n\n );\n\n\n\n term.erase_in_display(EraseInDisplay::EraseToStartOfDisplay);\n\n assert_visible_contents(\n\n &term,\n", "file_path": "term/src/test/mod.rs", "rank": 87, "score": 93145.59716930808 }, { "content": " partial_line.set_cell(\n\n 1,\n\n Cell::new(\n\n 'o',\n\n CellAttributes::default()\n\n .set_hyperlink(Some(Arc::clone(&otherlink)))\n\n .clone(),\n\n ),\n\n );\n\n\n\n assert_lines_equal(\n\n file!(),\n\n line!(),\n\n &term.screen().visible_lines(),\n\n &[\n\n Line::from_text_with_wrapped_last_col(\"hello\", &linked),\n\n Line::from_text_with_wrapped_last_col(\"hey!!\", &linked),\n\n partial_line,\n\n ],\n\n Compare::TEXT | Compare::ATTRS,\n\n );\n\n}\n", "file_path": "term/src/test/mod.rs", "rank": 88, "score": 93145.37098083475 }, { "content": " .borrow()\n\n .as_ref()\n\n .map(|c| c.clone())\n\n .ok_or_else(|| anyhow::anyhow!(\"no clipboard\"))\n\n }\n\n}\n\n\n\nimpl TerminalHost for TestHost {\n\n fn writer(&mut self) -> &mut dyn std::io::Write {\n\n self\n\n }\n\n}\n\n\n", "file_path": "term/src/test/mod.rs", "rank": 89, "score": 93145.04603031323 }, { "content": " shape: CursorShape::Default,\n\n };\n\n assert_eq!(\n\n cursor, expect,\n\n \"actual cursor (left) didn't match expected cursor (right) reason={:?}\",\n\n reason\n\n );\n\n }\n\n\n\n fn assert_dirty_lines(&self, expected: &[usize], reason: Option<&str>) {\n\n let dirty_indices: Vec<usize> = self\n\n .screen()\n\n .lines\n\n .iter()\n\n .enumerate()\n\n .filter_map(|(i, line)| if line.is_dirty() { Some(i) } else { None })\n\n .collect();\n\n assert_eq!(\n\n &dirty_indices, &expected,\n\n \"actual dirty lines (left) didn't match expected dirty lines (right) reason={:?}\",\n", "file_path": "term/src/test/mod.rs", "rank": 90, "score": 93144.5845906837 }, { "content": " fn set_mode(&mut self, mode: &str, enable: bool) {\n\n self.print(CSI);\n\n self.print(mode);\n\n self.print(if enable { b\"h\" } else { b\"l\" });\n\n }\n\n\n\n fn set_auto_wrap(&mut self, enable: bool) {\n\n self.set_mode(\"?7\", enable);\n\n }\n\n\n\n #[allow(dead_code)]\n\n fn set_scroll_region(&mut self, top: usize, bottom: usize) {\n\n self.print(CSI);\n\n self.print(format!(\"{};{}r\", top + 1, bottom + 1));\n\n }\n\n\n\n fn delete_lines(&mut self, n: isize) {\n\n self.print(CSI);\n\n self.print(format!(\"{}M\", n));\n\n }\n", "file_path": "term/src/test/mod.rs", "rank": 91, "score": 93144.144504337 }, { "content": " \"{}:{}: line {} text didn't match '{}' vs '{}'\",\n\n file,\n\n line_no,\n\n idx,\n\n line_str.escape_default(),\n\n expect_str.escape_default()\n\n );\n\n }\n\n }\n\n\n\n assert_eq!(\n\n lines.len(),\n\n expect_lines.len(),\n\n \"{}:{}: expectation has wrong number of lines\",\n\n file,\n\n line_no\n\n );\n\n}\n\n\n\nbitflags! {\n\n struct Compare : u8{\n\n const TEXT = 1;\n\n const ATTRS = 2;\n\n const DIRTY = 4;\n\n }\n\n}\n\n\n", "file_path": "term/src/test/mod.rs", "rank": 92, "score": 93141.7470849403 }, { "content": "\n\n if compare.contains(Compare::ATTRS) {\n\n let line_attrs: Vec<_> = line.cells().iter().map(|c| c.attrs().clone()).collect();\n\n let expect_attrs: Vec<_> = expect.cells().iter().map(|c| c.attrs().clone()).collect();\n\n assert_eq!(\n\n expect_attrs,\n\n line_attrs,\n\n \"{}:{}: line {} `{}` attrs didn't match (left=expected, right=actual)\",\n\n file,\n\n line_no,\n\n idx,\n\n line.as_str()\n\n );\n\n }\n\n if compare.contains(Compare::TEXT) {\n\n let line_str = line.as_str();\n\n let expect_str = expect.as_str();\n\n assert_eq!(\n\n line_str,\n\n expect_str,\n", "file_path": "term/src/test/mod.rs", "rank": 93, "score": 93141.60901158437 }, { "content": "\n\n fn hyperlink(&mut self, link: &Arc<Hyperlink>) {\n\n let osc = OperatingSystemCommand::SetHyperlink(Some(link.as_ref().clone()));\n\n self.print(format!(\"{}\", osc));\n\n }\n\n\n\n fn hyperlink_off(&mut self) {\n\n self.print(\"\\x1b]8;;\\x1b\\\\\");\n\n }\n\n\n\n fn soft_reset(&mut self) {\n\n self.print(CSI);\n\n self.print(\"!p\");\n\n }\n\n\n\n fn assert_cursor_pos(&self, x: usize, y: i64, reason: Option<&str>) {\n\n let cursor = self.cursor_pos();\n\n let expect = CursorPosition {\n\n x,\n\n y,\n", "file_path": "term/src/test/mod.rs", "rank": 94, "score": 93140.28062796021 }, { "content": "/// Create a `Terminal` with the recommended settings for use with\n\n/// a `LineEditor`.\n\npub fn line_editor_terminal() -> anyhow::Result<impl Terminal> {\n\n let hints = ProbeHints::new_from_env().mouse_reporting(Some(false));\n\n let caps = Capabilities::new_with_hints(hints)?;\n\n new_terminal(caps)\n\n}\n", "file_path": "termwiz/src/lineedit/mod.rs", "rank": 95, "score": 92384.67775038579 }, { "content": "#[derive(Debug)]\n\nstruct LocalClip {\n\n clip: RefCell<Option<String>>,\n\n}\n\n\n\nimpl LocalClip {\n\n fn new() -> Self {\n\n Self {\n\n clip: RefCell::new(None),\n\n }\n\n }\n\n}\n\n\n\nimpl Clipboard for LocalClip {\n\n fn set_contents(&self, clip: Option<String>) -> anyhow::Result<()> {\n\n *self.clip.borrow_mut() = clip;\n\n Ok(())\n\n }\n\n\n\n fn get_contents(&self) -> anyhow::Result<String> {\n\n self.clip\n", "file_path": "term/src/test/mod.rs", "rank": 96, "score": 86023.52569460013 }, { "content": "fn main() {\n\n let mut flags = ConstantsFlags::all();\n\n flags.remove(ConstantsFlags::SEMVER_FROM_CARGO_PKG);\n\n // Generate the 'cargo:' key output\n\n generate_cargo_keys(ConstantsFlags::all()).expect(\"Unable to generate the cargo keys!\");\n\n\n\n // If a file named `.tag` is present, we'll take its contents for the\n\n // version number that we report in wezterm -h.\n\n let mut ci_tag = String::new();\n\n if let Ok(tag) = std::fs::read(\".tag\") {\n\n if let Ok(s) = String::from_utf8(tag) {\n\n ci_tag = s.trim().to_string();\n\n println!(\"cargo:rerun-if-changed=.tag\");\n\n }\n\n }\n\n println!(\"cargo:rustc-env=WEZTERM_CI_TAG={}\", ci_tag);\n\n println!(\"cargo:rustc-env=MACOSX_DEPLOYMENT_TARGET=10.9\");\n\n\n\n #[cfg(windows)]\n\n {\n", "file_path": "build.rs", "rank": 97, "score": 85373.9169417209 }, { "content": "fn scrollback_size(config: &Arc<dyn TerminalConfiguration>, allow_scrollback: bool) -> usize {\n\n if allow_scrollback {\n\n config.scrollback_size()\n\n } else {\n\n 0\n\n }\n\n}\n\n\n\nimpl Screen {\n\n /// Create a new Screen with the specified dimensions.\n\n /// The Cells in the viewable portion of the screen are set to the\n\n /// default cell attributes.\n\n pub fn new(\n\n physical_rows: usize,\n\n physical_cols: usize,\n\n config: &Arc<dyn TerminalConfiguration>,\n\n allow_scrollback: bool,\n\n ) -> Screen {\n\n let physical_rows = physical_rows.max(1);\n\n let physical_cols = physical_cols.max(1);\n", "file_path": "term/src/screen.rs", "rank": 98, "score": 85239.77752300375 }, { "content": "<!-- cargo-sync-readme start -->\n\n\n\nThe purpose of this crate is to make it a bit more ergonomic for portable\n\napplications that need to work with the platform level `RawFd` and\n\n`RawHandle` types.\n\n\n\nRather than conditionally using `RawFd` and `RawHandle`, the `FileDescriptor`\n\ntype can be used to manage ownership, duplicate, read and write.\n\n\n\n## FileDescriptor\n\n\n\nThis is a bit of a contrived example, but demonstrates how to avoid\n\nthe conditional code that would otherwise be required to deal with\n\ncalling `as_raw_fd` and `as_raw_handle`:\n\n\n\n```\n\nuse filedescriptor::{FileDescriptor, FromRawFileDescriptor};\n\nuse std::io::Write;\n\n\n\nfn get_stdout() -> anyhow::Result<FileDescriptor> {\n\n let stdout = std::io::stdout();\n\n let handle = stdout.lock();\n\n FileDescriptor::dup(&handle)\n\n}\n\n\n\nfn print_something() -> anyhow::Result<()> {\n\n get_stdout()?.write(b\"hello\")?;\n\n Ok(())\n\n}\n\n```\n\n\n\n## Pipe\n\nThe `Pipe` type makes it more convenient to create a pipe and manage\n\nthe lifetime of both the read and write ends of that pipe.\n\n\n\n```\n\nuse filedescriptor::Pipe;\n\nuse std::io::{Read, Write};\n\nuse anyhow::Error;\n\n\n\nlet mut pipe = Pipe::new()?;\n\npipe.write.write(b\"hello\")?;\n\ndrop(pipe.write);\n\n\n\nlet mut s = String::new();\n\npipe.read.read_to_string(&mut s)?;\n\nassert_eq!(s, \"hello\");\n\n```\n\n\n\n## Socketpair\n\nThe `socketpair` function returns a pair of connected `SOCK_STREAM`\n\nsockets and functions both on posix and windows systems.\n\n\n\n```\n\nuse std::io::{Read, Write};\n\nuse anyhow::Error;\n\n\n\nlet (mut a, mut b) = filedescriptor::socketpair()?;\n\na.write(b\"hello\")?;\n\ndrop(a);\n\n\n\nlet mut s = String::new();\n\nb.read_to_string(&mut s)?;\n\nassert_eq!(s, \"hello\");\n", "file_path": "filedescriptor/README.md", "rank": 99, "score": 23.197002051962063 } ]
Rust
src/materialized/src/bin/materialized/sys.rs
bobbyiliev/materialize
44e3bcae151179075232ad436ae72f5883361fd1
use std::alloc::{self, Layout}; use std::io::{self, Write}; use std::process; use std::ptr; use std::sync::atomic::{AtomicUsize, Ordering}; use anyhow::{bail, Context}; use nix::errno; use nix::sys::signal; use tracing::trace; #[cfg(not(any(target_os = "macos", target_os = "linux", target_os = "ios")))] pub fn adjust_rlimits() { trace!("rlimit crate does not support this OS; not adjusting nofile limit"); } #[cfg(any(target_os = "macos", target_os = "linux", target_os = "ios"))] pub fn adjust_rlimits() { use rlimit::Resource; use tracing::warn; let (soft, hard) = match Resource::NOFILE.get() { Ok(limits) => limits, Err(e) => { trace!("unable to read initial nofile rlimit: {}", e); return; } }; trace!("initial nofile rlimit: ({}, {})", soft, hard); #[cfg(target_os = "macos")] let hard = { use mz_ore::result::ResultExt; use std::cmp; use sysctl::Sysctl; let res = sysctl::Ctl::new("kern.maxfilesperproc") .and_then(|ctl| ctl.value()) .map_err_to_string() .and_then(|v| match v { sysctl::CtlValue::Int(v) => u64::try_from(v) .map_err(|_| format!("kern.maxfilesperproc unexpectedly negative: {}", v)), o => Err(format!("unexpected sysctl value type: {:?}", o)), }); match res { Ok(v) => { trace!("sysctl kern.maxfilesperproc hard limit: {}", v); cmp::min(v, hard) } Err(e) => { trace!("error while reading sysctl: {}", e); hard } } }; trace!("attempting to adjust nofile rlimit to ({0}, {0})", hard); if let Err(e) = Resource::NOFILE.set(hard, hard) { trace!("error adjusting nofile rlimit: {}", e); return; } let (soft, hard) = match Resource::NOFILE.get() { Ok(limits) => limits, Err(e) => { trace!("unable to read adjusted nofile rlimit: {}", e); return; } }; trace!("adjusted nofile rlimit: ({}, {})", soft, hard); const RECOMMENDED_SOFT: u64 = 1024; if soft < RECOMMENDED_SOFT { warn!( "soft nofile rlimit ({}) is dangerously low; at least {} is recommended", soft, RECOMMENDED_SOFT ) } } pub fn enable_sigbus_sigsegv_backtraces() -> Result<(), anyhow::Error> { const STACK_SIZE: usize = 2 << 20; const STACK_ALIGN: usize = 16; let buf_layout = Layout::from_size_align(STACK_SIZE, STACK_ALIGN).expect("layout known to be valid"); let buf = unsafe { alloc::alloc(buf_layout) }; let stack = libc::stack_t { ss_sp: buf as *mut libc::c_void, ss_flags: 0, ss_size: STACK_SIZE, }; let ret = unsafe { libc::sigaltstack(&stack, ptr::null_mut()) }; if ret == -1 { let errno = errno::from_i32(errno::errno()); bail!("failed to configure alternate signal stack: {}", errno); } let action = signal::SigAction::new( signal::SigHandler::Handler(handle_sigbus_sigsegv), signal::SaFlags::SA_NODEFER | signal::SaFlags::SA_ONSTACK, signal::SigSet::empty(), ); unsafe { signal::sigaction(signal::SIGBUS, &action) } .context("failed to install SIGBUS handler")?; unsafe { signal::sigaction(signal::SIGSEGV, &action) } .context("failed to install SIGSEGV handler")?; Ok(()) } pub fn enable_sigusr2_coverage_dump() -> Result<(), anyhow::Error> { let action = signal::SigAction::new( signal::SigHandler::Handler(handle_sigusr2_signal), signal::SaFlags::SA_NODEFER | signal::SaFlags::SA_ONSTACK, signal::SigSet::empty(), ); unsafe { signal::sigaction(signal::SIGUSR2, &action) } .context("failed to install SIGUSR2 handler")?; Ok(()) } extern "C" fn handle_sigbus_sigsegv(_: i32) { static SEEN: AtomicUsize = AtomicUsize::new(0); match SEEN.fetch_add(1, Ordering::SeqCst) { 0 => { panic!("received SIGSEGV or SIGBUS (maybe a stack overflow?)"); } _ => { let _ = io::stderr().write_all(b"SIGBUS or SIGSEGV while handling SIGSEGV or SIGBUS\n"); let _ = io::stderr().write_all(b"(maybe a stack overflow while allocating?)\n"); process::abort(); } } } pub fn enable_termination_signal_cleanup() -> Result<(), anyhow::Error> { let action = signal::SigAction::new( signal::SigHandler::Handler(handle_termination_signal), signal::SaFlags::SA_NODEFER | signal::SaFlags::SA_ONSTACK, signal::SigSet::empty(), ); for signum in &[ signal::SIGHUP, signal::SIGINT, signal::SIGALRM, signal::SIGTERM, signal::SIGUSR1, ] { unsafe { signal::sigaction(*signum, &action) } .with_context(|| format!("failed to install handler for {}", signum))?; } Ok(()) } extern "C" { fn __llvm_profile_write_file() -> libc::c_int; } extern "C" fn handle_sigusr2_signal(_: i32) { let _ = unsafe { __llvm_profile_write_file() }; } extern "C" fn handle_termination_signal(signum: i32) { let _ = unsafe { __llvm_profile_write_file() }; let action = signal::SigAction::new( signal::SigHandler::SigDfl, signal::SaFlags::SA_NODEFER | signal::SaFlags::SA_ONSTACK, signal::SigSet::empty(), ); unsafe { signal::sigaction(signum.try_into().unwrap(), &action) } .unwrap_or_else(|_| panic!("failed to uninstall handler for {}", signum)); let ret = unsafe { libc::raise(signum) }; if ret == -1 { let errno = errno::from_i32(errno::errno()); panic!("failed to re-raise signal {}: {}", signum, errno); } }
use std::alloc::{self, Layout}; use std::io::{self, Write}; use std::process; use std::ptr; use std::sync::atomic::{AtomicUsize, Ordering}; use anyhow::{bail, Context}; use nix::errno; use nix::sys::signal; use tracing::trace; #[cfg(not(any(target_os = "macos", target_os = "linux", target_os = "ios")))] pub fn adjust_rlimits() { trace!("rlimit crate does not support this OS; not adjusting nofile limit"); } #[cfg(any(target_os = "macos", target_os = "linux", target_os = "ios"))] pub fn adjust_rlimits() { use rlimit::Resource; use tracing::warn; let (soft, hard) = match Resource::NOFILE.get() { Ok(limits) => limits, Err(e) => { trace!("unable to read initial nofile rlimit: {}", e); return; } }; trace!("initial nofile rlimit: ({}, {})", soft, hard); #[cfg(target_os = "macos")] let hard = { use mz_ore::result::ResultExt; use std::cmp; use sysctl::Sysctl; let res = sysctl::Ctl::new("kern.maxfilesperproc") .and_then(|ctl| ctl.value()) .map_err_to_string() .and_then(|v| match v { sysctl::CtlValue::Int(v) => u64::try_from(v) .map_err(|_| format!("kern.maxfilesperproc unexpectedly negative: {}", v)), o => Err(format!("unexpected sysctl value type: {:?}", o)), }); match res { Ok(v) => { trace!("sysctl kern.maxfilesperproc hard limit: {}", v); cmp::min(v, hard) } Err(e) => { trace!("error while reading sysctl: {}", e); hard } } }; trace!("attempting to adjust nofile rlimit to ({0}, {0})", hard); if let Err(e) = Resource::NOFILE.set(hard, hard) { trace!("error adjusting nofile rlimit: {}", e); return; } let (soft, hard) = match Resource::NOFILE.get() { Ok(limits) => limits, Err(e) => { trace!("unable to read adjusted nofile rlimit: {}", e); return; } }; trace!("adjusted nofile rlimit: ({}, {})", soft, hard); const RECOMMENDED_SOFT: u64 = 1024; if soft < RECOMMENDED_SOFT { warn!( "soft nofile rlimit ({}) is dangerously low; at least {} is recommended", soft, RECOMMENDED_SOFT ) } } pub fn enable_sigbus_sigsegv_backtraces() -> Result<(), anyhow::Error> { const STACK_SIZE: usize = 2 << 20; const STACK_ALIGN: usize = 16; let buf_layout = Layout::from_size_align(STACK_SIZE, STACK_ALIGN).expect("layout known to be valid"); let buf = unsafe { alloc::alloc(buf_layout) }; let stack = libc::stack_
pub fn enable_sigusr2_coverage_dump() -> Result<(), anyhow::Error> { let action = signal::SigAction::new( signal::SigHandler::Handler(handle_sigusr2_signal), signal::SaFlags::SA_NODEFER | signal::SaFlags::SA_ONSTACK, signal::SigSet::empty(), ); unsafe { signal::sigaction(signal::SIGUSR2, &action) } .context("failed to install SIGUSR2 handler")?; Ok(()) } extern "C" fn handle_sigbus_sigsegv(_: i32) { static SEEN: AtomicUsize = AtomicUsize::new(0); match SEEN.fetch_add(1, Ordering::SeqCst) { 0 => { panic!("received SIGSEGV or SIGBUS (maybe a stack overflow?)"); } _ => { let _ = io::stderr().write_all(b"SIGBUS or SIGSEGV while handling SIGSEGV or SIGBUS\n"); let _ = io::stderr().write_all(b"(maybe a stack overflow while allocating?)\n"); process::abort(); } } } pub fn enable_termination_signal_cleanup() -> Result<(), anyhow::Error> { let action = signal::SigAction::new( signal::SigHandler::Handler(handle_termination_signal), signal::SaFlags::SA_NODEFER | signal::SaFlags::SA_ONSTACK, signal::SigSet::empty(), ); for signum in &[ signal::SIGHUP, signal::SIGINT, signal::SIGALRM, signal::SIGTERM, signal::SIGUSR1, ] { unsafe { signal::sigaction(*signum, &action) } .with_context(|| format!("failed to install handler for {}", signum))?; } Ok(()) } extern "C" { fn __llvm_profile_write_file() -> libc::c_int; } extern "C" fn handle_sigusr2_signal(_: i32) { let _ = unsafe { __llvm_profile_write_file() }; } extern "C" fn handle_termination_signal(signum: i32) { let _ = unsafe { __llvm_profile_write_file() }; let action = signal::SigAction::new( signal::SigHandler::SigDfl, signal::SaFlags::SA_NODEFER | signal::SaFlags::SA_ONSTACK, signal::SigSet::empty(), ); unsafe { signal::sigaction(signum.try_into().unwrap(), &action) } .unwrap_or_else(|_| panic!("failed to uninstall handler for {}", signum)); let ret = unsafe { libc::raise(signum) }; if ret == -1 { let errno = errno::from_i32(errno::errno()); panic!("failed to re-raise signal {}: {}", signum, errno); } }
t { ss_sp: buf as *mut libc::c_void, ss_flags: 0, ss_size: STACK_SIZE, }; let ret = unsafe { libc::sigaltstack(&stack, ptr::null_mut()) }; if ret == -1 { let errno = errno::from_i32(errno::errno()); bail!("failed to configure alternate signal stack: {}", errno); } let action = signal::SigAction::new( signal::SigHandler::Handler(handle_sigbus_sigsegv), signal::SaFlags::SA_NODEFER | signal::SaFlags::SA_ONSTACK, signal::SigSet::empty(), ); unsafe { signal::sigaction(signal::SIGBUS, &action) } .context("failed to install SIGBUS handler")?; unsafe { signal::sigaction(signal::SIGSEGV, &action) } .context("failed to install SIGSEGV handler")?; Ok(()) }
function_block-function_prefixed
[ { "content": "fn encode_element(buf: &mut BytesMut, elem: Option<&Value>, ty: &Type) -> Result<(), io::Error> {\n\n match elem {\n\n None => buf.put_i32(-1),\n\n Some(elem) => {\n\n let base = buf.len();\n\n buf.put_i32(0);\n\n elem.encode_binary(ty, buf)?;\n\n let len = pg_len(\"encoded element\", buf.len() - base - 4)?;\n\n buf[base..base + 4].copy_from_slice(&len.to_be_bytes());\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/pgrepr/src/value.rs", "rank": 0, "score": 414098.36710431543 }, { "content": "/// Parse a jemalloc profile file, producing a vector of stack traces along with their weights.\n\npub fn parse_jeheap<R: BufRead>(r: R) -> anyhow::Result<StackProfile> {\n\n let mut cur_stack = None;\n\n let mut profile = <StackProfile as Default>::default();\n\n let mut lines = r.lines();\n\n let first_line = match lines.next() {\n\n Some(s) => s,\n\n None => bail!(\"Heap dump file was empty\"),\n\n }?;\n\n // The first line of the file should be e.g. \"heap_v2/524288\", where the trailing\n\n // number is the inverse probability of a byte being sampled.\n\n let sampling_rate = str::parse::<usize>(first_line.trim_start_matches(\"heap_v2/\"))? as f64;\n\n for line in lines {\n\n let line = line?;\n\n let line = line.trim();\n\n let words = line.split_ascii_whitespace().collect::<Vec<_>>();\n\n if words.len() > 0 && words[0] == \"@\" {\n\n if cur_stack.is_some() {\n\n bail!(\"Stack without corresponding weight!\")\n\n }\n\n let mut addrs = words[1..]\n", "file_path": "src/prof/src/jemalloc.rs", "rank": 1, "score": 372974.714509532 }, { "content": "// HACK: This should be a method on StreamReadHandle that actually queries the\n\n// runtime.\n\n//\n\n// TODO: We're slightly better, now that we have `get_seal()`. Maybe that's already enough?\n\nfn sealed_ts<K: Data, V: Data>(read: &StreamReadHandle<K, V>) -> Result<u64, Box<dyn Error>> {\n\n let seal_ts = read.snapshot()?.get_seal();\n\n\n\n if let Some(sealed) = seal_ts.first() {\n\n Ok(*sealed)\n\n } else {\n\n Ok(0)\n\n }\n\n}\n\n\n", "file_path": "src/persist/examples/kafka_upsert.rs", "rank": 2, "score": 372544.3580027561 }, { "content": "pub fn parse_map<'a, V, E>(\n\n s: &'a str,\n\n is_value_type_map: bool,\n\n gen_elem: impl FnMut(Cow<'a, str>) -> Result<V, E>,\n\n) -> Result<BTreeMap<String, V>, ParseError>\n\nwhere\n\n E: fmt::Display,\n\n{\n\n parse_map_inner(s, is_value_type_map, gen_elem)\n\n .map_err(|details| ParseError::invalid_input_syntax(\"map\", s).with_details(details))\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 3, "score": 366892.6948497694 }, { "content": "// From postgres-types/src/private.rs.\n\nfn read_value<'a, T>(type_: &PgType, buf: &mut &'a [u8]) -> Result<T, Box<dyn Error + Sync + Send>>\n\nwhere\n\n T: FromSql<'a>,\n\n{\n\n let len = read_be_i32(buf)?;\n\n let value = if len < 0 {\n\n None\n\n } else {\n\n if len as usize > buf.len() {\n\n return Err(\"invalid buffer size\".into());\n\n }\n\n let (head, tail) = buf.split_at(len as usize);\n\n *buf = tail;\n\n Some(head)\n\n };\n\n T::from_sql_nullable(type_, value)\n\n}\n\n\n", "file_path": "src/sqllogictest/src/runner.rs", "rank": 4, "score": 339400.8669043597 }, { "content": "fn decode_copy_data(mut buf: Cursor, frame_len: usize) -> Result<FrontendMessage, io::Error> {\n\n let mut data = Vec::with_capacity(frame_len);\n\n for _ in 0..frame_len {\n\n data.push(buf.read_byte()?);\n\n }\n\n Ok(FrontendMessage::CopyData(data))\n\n}\n\n\n", "file_path": "src/pgwire/src/codec.rs", "rank": 5, "score": 337133.7503261285 }, { "content": "fn pg_len(what: &str, len: usize) -> Result<i32, io::Error> {\n\n len.try_into().map_err(|_| {\n\n io::Error::new(\n\n io::ErrorKind::Other,\n\n format!(\"{} does not fit into an i32\", what),\n\n )\n\n })\n\n}\n\n\n", "file_path": "src/pgrepr/src/value.rs", "rank": 6, "score": 336659.7391818324 }, { "content": "pub fn extract_protobuf_header(buf: &[u8]) -> Result<(i32, &[u8])> {\n\n let (schema_id, buf) = extract_schema_id(buf, \"protobuf\")?;\n\n\n\n match buf.get(0) {\n\n Some(0) => Ok((schema_id, &buf[1..])),\n\n Some(message_id) => bail!(\n\n \"unsupported Confluent-style protobuf message descriptor id: \\\n\n expected 0, but found: {}. \\\n\n See https://github.com/MaterializeInc/materialize/issues/9250\",\n\n message_id\n\n ),\n\n None => bail!(\n\n \"Confluent-style protobuf datum is too few bytes: expected a message id after magic \\\n\n and schema id, got a buffer of length {}\",\n\n buf.len()\n\n ),\n\n }\n\n}\n", "file_path": "src/interchange/src/confluent.rs", "rank": 7, "score": 330304.71741876623 }, { "content": "pub fn extract_avro_header(buf: &[u8]) -> Result<(i32, &[u8])> {\n\n extract_schema_id(buf, \"avro\")\n\n}\n\n\n", "file_path": "src/interchange/src/confluent.rs", "rank": 8, "score": 330304.71741876623 }, { "content": "/// Reports whether the given stream begins with a pgwire handshake.\n\n///\n\n/// To avoid false negatives, there must be at least eight bytes in `buf`.\n\npub fn match_handshake(buf: &[u8]) -> bool {\n\n // The pgwire StartupMessage looks like this:\n\n //\n\n // i32 - Length of entire message.\n\n // i32 - Protocol version number.\n\n // [String] - Arbitrary key-value parameters of any length.\n\n //\n\n // Since arbitrary parameters can be included in the StartupMessage, the\n\n // first Int32 is worthless, since the message could have any length.\n\n // Instead, we sniff the protocol version number.\n\n if buf.len() < 8 {\n\n return false;\n\n }\n\n let version = NetworkEndian::read_i32(&buf[4..8]);\n\n VERSIONS.contains(&version)\n\n}\n\n\n\n/// Parameters for the [`run`] function.\n\npub struct RunParams<'a, A> {\n\n /// The TLS mode of the pgwire server.\n", "file_path": "src/pgwire/src/protocol.rs", "rank": 9, "score": 323402.1849503311 }, { "content": "fn decode_query(mut buf: Cursor) -> Result<FrontendMessage, io::Error> {\n\n Ok(FrontendMessage::Query {\n\n sql: buf.read_cstr()?.to_string(),\n\n })\n\n}\n\n\n", "file_path": "src/pgwire/src/codec.rs", "rank": 10, "score": 319910.1005898536 }, { "content": "fn decode_close(mut buf: Cursor) -> Result<FrontendMessage, io::Error> {\n\n match buf.read_byte()? {\n\n b'S' => Ok(FrontendMessage::CloseStatement {\n\n name: buf.read_cstr()?.to_owned(),\n\n }),\n\n b'P' => Ok(FrontendMessage::ClosePortal {\n\n name: buf.read_cstr()?.to_owned(),\n\n }),\n\n b => Err(input_err(format!(\n\n \"invalid type byte in close message: {}\",\n\n b\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "src/pgwire/src/codec.rs", "rank": 11, "score": 319910.1005898536 }, { "content": "fn decode_password(mut buf: Cursor) -> Result<FrontendMessage, io::Error> {\n\n Ok(FrontendMessage::Password {\n\n password: buf.read_cstr()?.to_owned(),\n\n })\n\n}\n\n\n", "file_path": "src/pgwire/src/codec.rs", "rank": 12, "score": 319910.1005898536 }, { "content": "fn decode_execute(mut buf: Cursor) -> Result<FrontendMessage, io::Error> {\n\n let portal_name = buf.read_cstr()?.to_string();\n\n let max_rows = buf.read_i32()?;\n\n Ok(FrontendMessage::Execute {\n\n portal_name,\n\n max_rows,\n\n })\n\n}\n\n\n", "file_path": "src/pgwire/src/codec.rs", "rank": 13, "score": 319910.1005898536 }, { "content": "fn decode_parse(mut buf: Cursor) -> Result<FrontendMessage, io::Error> {\n\n let name = buf.read_cstr()?;\n\n let sql = buf.read_cstr()?;\n\n\n\n let mut param_types = vec![];\n\n for _ in 0..buf.read_i16()? {\n\n param_types.push(buf.read_u32()?);\n\n }\n\n\n\n Ok(FrontendMessage::Parse {\n\n name: name.into(),\n\n sql: sql.into(),\n\n param_types,\n\n })\n\n}\n\n\n", "file_path": "src/pgwire/src/codec.rs", "rank": 14, "score": 319910.1005898536 }, { "content": "fn decode_describe(mut buf: Cursor) -> Result<FrontendMessage, io::Error> {\n\n let first_char = buf.read_byte()?;\n\n let name = buf.read_cstr()?.to_string();\n\n match first_char {\n\n b'S' => Ok(FrontendMessage::DescribeStatement { name }),\n\n b'P' => Ok(FrontendMessage::DescribePortal { name }),\n\n other => Err(input_err(format!(\"Invalid describe type: {:#x?}\", other))),\n\n }\n\n}\n\n\n", "file_path": "src/pgwire/src/codec.rs", "rank": 15, "score": 319910.1005898536 }, { "content": "fn decode_bind(mut buf: Cursor) -> Result<FrontendMessage, io::Error> {\n\n let portal_name = buf.read_cstr()?.to_string();\n\n let statement_name = buf.read_cstr()?.to_string();\n\n\n\n let mut param_formats = Vec::new();\n\n for _ in 0..buf.read_i16()? {\n\n param_formats.push(buf.read_format()?);\n\n }\n\n\n\n let mut raw_params = Vec::new();\n\n for _ in 0..buf.read_i16()? {\n\n let len = buf.read_i32()?;\n\n if len == -1 {\n\n raw_params.push(None); // NULL\n\n } else {\n\n // TODO(benesch): this should use bytes::Bytes to avoid the copy.\n\n let mut value = Vec::new();\n\n for _ in 0..len {\n\n value.push(buf.read_byte()?);\n\n }\n", "file_path": "src/pgwire/src/codec.rs", "rank": 16, "score": 319910.1005898536 }, { "content": "/// Blocking logic to read from a file, intended for its own thread.\n\npub fn read_file_task<Ctor, I, Err>(\n\n path: PathBuf,\n\n tx: std::sync::mpsc::SyncSender<Result<MessagePayload, anyhow::Error>>,\n\n activator: Option<SyncActivator>,\n\n read_style: FileReadStyle,\n\n compression: Compression,\n\n iter_ctor: Ctor,\n\n) where\n\n I: IntoIterator<Item = Result<MessagePayload, Err>> + Send + 'static,\n\n Ctor: FnOnce(Box<dyn AvroRead + Send>) -> Result<I, Err>,\n\n Err: Into<anyhow::Error>,\n\n{\n\n trace!(\"reading file {}\", path.display());\n\n let file = match std::fs::File::open(&path).with_context(|| {\n\n format!(\n\n \"file source: unable to open file at path {}\",\n\n path.to_string_lossy(),\n\n )\n\n }) {\n\n Ok(file) => file,\n", "file_path": "src/storage/src/source/file.rs", "rank": 17, "score": 318658.8776285278 }, { "content": "fn decode_copy_fail(mut buf: Cursor) -> Result<FrontendMessage, io::Error> {\n\n Ok(FrontendMessage::CopyFail(buf.read_cstr()?.to_string()))\n\n}\n\n\n\n/// Decodes data within pgwire messages.\n\n///\n\n/// The API provided is very similar to [`bytes::Buf`], but operations return\n\n/// errors rather than panicking. This is important for safety, as we don't want\n\n/// to crash if the user sends us malformed pgwire messages.\n\n///\n\n/// There are also some special-purpose methods, like [`Cursor::read_cstr`],\n\n/// that are specific to pgwire messages.\n", "file_path": "src/pgwire/src/codec.rs", "rank": 18, "score": 315885.5259265753 }, { "content": "pub fn read_long<R: Read>(reader: &mut R) -> Result<i64, AvroError> {\n\n zag_i64(reader)\n\n}\n\n\n", "file_path": "src/avro/src/util.rs", "rank": 19, "score": 314861.3406713669 }, { "content": "pub fn safe_len(len: usize) -> Result<usize, AvroError> {\n\n let max_bytes = max_allocation_bytes(512 * 1024 * 1024);\n\n\n\n if len <= max_bytes {\n\n Ok(len)\n\n } else {\n\n Err(AvroError::Allocation {\n\n attempted: len,\n\n allowed: max_bytes,\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_zigzag() {\n\n let mut a = Vec::new();\n", "file_path": "src/avro/src/util.rs", "rank": 20, "score": 312025.6450356312 }, { "content": "fn decode_variable<R: Read>(reader: &mut R) -> Result<u64, AvroError> {\n\n let mut i = 0u64;\n\n let mut buf = [0u8; 1];\n\n\n\n let mut j = 0;\n\n loop {\n\n if j > 9 {\n\n // if j * 7 > 64\n\n return Err(AvroError::Decode(DecodeError::IntDecodeOverflow));\n\n }\n\n reader.read_exact(&mut buf[..])?;\n\n i |= (u64::from(buf[0] & 0x7F)) << (j * 7);\n\n if (buf[0] >> 7) == 0 {\n\n break;\n\n } else {\n\n j += 1;\n\n }\n\n }\n\n\n\n Ok(i)\n\n}\n\n\n", "file_path": "src/avro/src/util.rs", "rank": 21, "score": 309442.56051757664 }, { "content": "#[inline]\n\nfn decode_long_nonneg<R: Read>(reader: &mut R) -> Result<u64, AvroError> {\n\n let u = match zag_i64(reader)? {\n\n i if i >= 0 => i as u64,\n\n i => return Err(AvroError::Decode(DecodeError::ExpectedNonnegInteger(i))),\n\n };\n\n Ok(u)\n\n}\n\n\n", "file_path": "src/avro/src/decode.rs", "rank": 22, "score": 305581.9401427486 }, { "content": "pub fn give_value<D: AvroDecode>(d: D, v: &Value) -> Result<D::Out, AvroError> {\n\n use ValueOrReader::Value as V;\n\n match v {\n\n Value::Null => d.scalar(Scalar::Null),\n\n Value::Boolean(val) => d.scalar(Scalar::Boolean(*val)),\n\n Value::Int(val) => d.scalar(Scalar::Int(*val)),\n\n Value::Long(val) => d.scalar(Scalar::Long(*val)),\n\n Value::Float(val) => d.scalar(Scalar::Float(*val)),\n\n Value::Double(val) => d.scalar(Scalar::Double(*val)),\n\n Value::Date(val) => d.scalar(Scalar::Date(*val)),\n\n Value::Timestamp(val) => d.scalar(Scalar::Timestamp(*val)),\n\n // The &[u8] parameter here (and elsewhere in this function) is arbitrary, but we have to put in something in order for the function\n\n // to type-check\n\n Value::Decimal(val) => d.decimal::<&[u8]>(val.precision, val.scale, V(&val.unscaled)),\n\n Value::Bytes(val) => d.bytes::<&[u8]>(V(val)),\n\n Value::String(val) => d.string::<&[u8]>(V(val)),\n\n Value::Fixed(_len, val) => d.fixed::<&[u8]>(V(val)),\n\n Value::Enum(idx, symbol) => d.enum_variant(symbol, *idx as usize),\n\n Value::Union {\n\n index,\n", "file_path": "src/avro/src/decode.rs", "rank": 23, "score": 304756.19793467264 }, { "content": "pub fn plan_read_then_write(\n\n kind: MutationKind,\n\n scx: &StatementContext,\n\n params: &Params,\n\n query::ReadThenWritePlan {\n\n id,\n\n mut selection,\n\n finishing,\n\n assignments,\n\n }: query::ReadThenWritePlan,\n\n) -> Result<Plan, anyhow::Error> {\n\n selection.bind_parameters(&params)?;\n\n let selection = selection.optimize_and_lower(&scx.into())?;\n\n let mut assignments_outer = HashMap::new();\n\n for (idx, mut set) in assignments {\n\n set.bind_parameters(&params)?;\n\n let set = set.lower_uncorrelated()?;\n\n assignments_outer.insert(idx, set);\n\n }\n\n\n\n Ok(Plan::ReadThenWrite(ReadThenWritePlan {\n\n id,\n\n selection,\n\n finishing,\n\n assignments: assignments_outer,\n\n kind,\n\n }))\n\n}\n\n\n", "file_path": "src/sql/src/plan/statement/dml.rs", "rank": 24, "score": 302666.7648534512 }, { "content": "#[test]\n\nfn test_no_default_value() -> Result<(), String> {\n\n let reader_schema = Schema::from_str(\n\n r#\"{\n\n \"type\": \"record\",\n\n \"name\": \"Test\",\n\n \"fields\": [\n\n {\"name\": \"H\", \"type\": \"int\"}\n\n ]\n\n }\"#,\n\n )\n\n .unwrap();\n\n let resolved_schema = resolve_schemas(&LONG_RECORD_SCHEMA, &reader_schema);\n\n match resolved_schema {\n\n Ok(_) => Err(String::from(\"Expected SchemaResolutionError, got Ok\")),\n\n Err(ref e) => {\n\n if let AvroError::ResolveSchema(_) = e {\n\n Ok(())\n\n } else {\n\n Err(format!(\"Expected SchemaResultionError, got {}\", e))\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/avro/tests/io.rs", "rank": 25, "score": 301530.57087251055 }, { "content": "#[test]\n\nfn test_type_exception() -> Result<(), String> {\n\n let writer_schema = Schema::from_str(\n\n r#\"\n\n {\n\n \"type\": \"record\",\n\n \"name\": \"Test\",\n\n \"fields\": [\n\n {\"name\": \"F\", \"type\": \"int\"},\n\n {\"name\": \"E\", \"type\": \"int\"}\n\n ]\n\n }\n\n \"#,\n\n )\n\n .unwrap();\n\n let datum_to_write = Value::Record(vec![\n\n (\"E\".to_string(), Value::Int(5)),\n\n (\"F\".to_string(), Value::String(String::from(\"Bad\"))),\n\n ]);\n\n let encoded = to_avro_datum(&writer_schema, datum_to_write);\n\n match encoded {\n\n Ok(_) => Err(String::from(\"Expected ValidationError, got Ok\")),\n\n Err(ref e) => match e.downcast_ref::<ValidationError>() {\n\n Some(_) => Ok(()),\n\n None => Err(format!(\"Expected ValidationError, got {}\", e)),\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/avro/tests/io.rs", "rank": 26, "score": 301276.21926094155 }, { "content": "/// Validate that the string is an allowed variable name (lowercase letters, numbers and dashes)\n\npub fn validate_ident(name: &str) -> Result<(), anyhow::Error> {\n\n lazy_static! {\n\n static ref VALID_KEY_REGEX: Regex = Regex::new(\"^[a-z0-9\\\\-]*$\").unwrap();\n\n }\n\n if !VALID_KEY_REGEX.is_match(name) {\n\n bail!(\n\n \"invalid builtin argument name '{}': \\\n\n only lowercase letters, numbers, and hyphens allowed\",\n\n name\n\n );\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/testdrive/src/parser.rs", "rank": 27, "score": 300697.267059566 }, { "content": "pub fn zag_i32<R: Read>(reader: &mut R) -> Result<i32, AvroError> {\n\n let i = zag_i64(reader)?;\n\n if i < i64::from(i32::min_value()) || i > i64::from(i32::max_value()) {\n\n Err(AvroError::Decode(DecodeError::I32OutOfRange(i)))\n\n } else {\n\n Ok(i as i32)\n\n }\n\n}\n\n\n", "file_path": "src/avro/src/util.rs", "rank": 28, "score": 298814.4013601596 }, { "content": "pub fn zag_i64<R: Read>(reader: &mut R) -> Result<i64, AvroError> {\n\n let z = decode_variable(reader)?;\n\n Ok(if z & 0x1 == 0 {\n\n (z >> 1) as i64\n\n } else {\n\n !(z >> 1) as i64\n\n })\n\n}\n\n\n", "file_path": "src/avro/src/util.rs", "rank": 29, "score": 298814.4013601596 }, { "content": "fn parse_frame_len(src: &[u8]) -> Result<usize, io::Error> {\n\n let n = usize::cast_from(NetworkEndian::read_u32(src));\n\n if n > netio::MAX_FRAME_SIZE {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n netio::FrameTooBig,\n\n ));\n\n } else if n < 4 {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidInput,\n\n \"invalid frame length\",\n\n ));\n\n }\n\n Ok(n - 4)\n\n}\n\n\n\nimpl Decoder for Codec {\n\n type Item = FrontendMessage;\n\n type Error = io::Error;\n\n\n", "file_path": "src/pgwire/src/codec.rs", "rank": 30, "score": 297351.0688079734 }, { "content": "fn analyze_type(ty: &syn::Type) -> Result<Type> {\n\n match ty {\n\n syn::Type::Path(syn::TypePath { qself: None, path }) => match path.segments.len() {\n\n 2 => {\n\n let name = path.segments.iter().map(|s| s.ident.to_string()).join(\"::\");\n\n Ok(Type::Abstract(name))\n\n }\n\n 1 => {\n\n let segment = path.segments.last().unwrap();\n\n let segment_name = segment.ident.to_string();\n\n\n\n let container = |construct_ty: fn(Box<Type>) -> Type| match &segment.arguments {\n\n syn::PathArguments::AngleBracketed(args) if args.args.len() == 1 => {\n\n match args.args.last().unwrap() {\n\n syn::GenericArgument::Type(ty) => {\n\n let inner = Box::new(analyze_type(ty)?);\n\n Ok(construct_ty(inner))\n\n }\n\n _ => bail!(\"Container type argument is not a basic (i.e., non-lifetime, non-constraint) type argument: {}\", ty.into_token_stream()),\n\n }\n", "file_path": "src/walkabout/src/ir.rs", "rank": 31, "score": 296840.9994950369 }, { "content": "/// Parses a SQL string containing a single data type.\n\npub fn parse_data_type(sql: &str) -> Result<UnresolvedDataType, ParserError> {\n\n let tokens = lexer::lex(sql)?;\n\n let mut parser = Parser::new(sql, tokens);\n\n let data_type = parser.parse_data_type()?;\n\n if parser.next_token().is_some() {\n\n parser_err!(\n\n parser,\n\n parser.peek_prev_pos(),\n\n \"extra token after data type\"\n\n )\n\n } else {\n\n Ok(data_type)\n\n }\n\n}\n\n\n\nmacro_rules! maybe {\n\n ($e:expr) => {{\n\n if let Some(v) = $e {\n\n return Ok(v);\n\n }\n\n }};\n\n}\n\n\n", "file_path": "src/sql-parser/src/parser.rs", "rank": 32, "score": 296358.3271745347 }, { "content": "pub fn neg_interval(a: Datum) -> Result<Datum, EvalError> {\n\n neg_interval_inner(a).map(Datum::from)\n\n}\n\n\n", "file_path": "src/expr/src/scalar/func.rs", "rank": 33, "score": 296340.1864413796 }, { "content": "/// Decode a `Value` encoded in Avro format given its `Schema` and anything implementing `io::Read`\n\n/// to read from.\n\n///\n\n/// In case a reader `Schema` is provided, schema resolution will also be performed.\n\n///\n\n/// **NOTE** This function has a quite small niche of usage and does NOT take care of reading the\n\n/// header and consecutive data blocks; use [`Reader`](struct.Reader.html) if you don't know what\n\n/// you are doing, instead.\n\npub fn from_avro_datum<R: AvroRead>(schema: &Schema, reader: &mut R) -> Result<Value, AvroError> {\n\n let value = decode(schema.top_node(), reader)?;\n\n Ok(value)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::types::{Record, ToAvro};\n\n use crate::Reader;\n\n\n\n use std::io::Cursor;\n\n\n\n static SCHEMA: &str = r#\"\n\n {\n\n \"type\": \"record\",\n\n \"name\": \"test\",\n\n \"fields\": [\n\n {\"name\": \"a\", \"type\": \"long\", \"default\": 42},\n\n {\"name\": \"b\", \"type\": \"string\"}\n", "file_path": "src/avro/src/reader.rs", "rank": 34, "score": 295430.20924510364 }, { "content": "/// Escapes a list, record, or map element in place.\n\n///\n\n/// The element must start at `start` and extend to the end of the buffer. The\n\n/// buffer will be resized if escaping is necessary to account for the\n\n/// additional escape characters.\n\n///\n\n/// The `needs_escaping` function is used to determine whether an element needs\n\n/// to be escaped. It is provided with the bytes of each element and should\n\n/// return whether the element needs to be escaped.\n\nfn escape_elem<F, E>(buf: &mut F, start: usize)\n\nwhere\n\n F: FormatBuffer,\n\n E: ElementEscaper,\n\n{\n\n let elem = &buf.as_ref()[start..];\n\n if !E::needs_escaping(elem) {\n\n return;\n\n }\n\n\n\n // We'll need two extra bytes for the quotes at the start and end of the\n\n // element, plus an extra byte for each quote and backslash.\n\n let extras = 2 + elem.iter().filter(|b| matches!(b, b'\"' | b'\\\\')).count();\n\n let orig_end = buf.len();\n\n let new_end = buf.len() + extras;\n\n\n\n // Pad the buffer to the new length. These characters will all be\n\n // overwritten.\n\n //\n\n // NOTE(benesch): we never read these characters, so we could instead use\n", "file_path": "src/repr/src/strconv.rs", "rank": 35, "score": 292442.9627873262 }, { "content": "fn decode_terminate(mut _buf: Cursor) -> Result<FrontendMessage, io::Error> {\n\n // Nothing more to decode.\n\n Ok(FrontendMessage::Terminate)\n\n}\n\n\n", "file_path": "src/pgwire/src/codec.rs", "rank": 36, "score": 287758.9564038578 }, { "content": "fn decode_sync(mut _buf: Cursor) -> Result<FrontendMessage, io::Error> {\n\n // Nothing more to decode.\n\n Ok(FrontendMessage::Sync)\n\n}\n\n\n", "file_path": "src/pgwire/src/codec.rs", "rank": 37, "score": 287758.9564038578 }, { "content": "fn decode_flush(mut _buf: Cursor) -> Result<FrontendMessage, io::Error> {\n\n // Nothing more to decode.\n\n Ok(FrontendMessage::Flush)\n\n}\n\n\n", "file_path": "src/pgwire/src/codec.rs", "rank": 38, "score": 287758.9564038578 }, { "content": "pub fn parse_numeric(s: &str) -> Result<OrderedDecimal<Numeric>, ParseError> {\n\n let mut cx = numeric::cx_datum();\n\n let mut n = match cx.parse(s.trim()) {\n\n Ok(n) => n,\n\n Err(..) => {\n\n return Err(ParseError::invalid_input_syntax(\"numeric\", s));\n\n }\n\n };\n\n\n\n let cx_status = cx.status();\n\n\n\n // Check for values that can only be generated by invalid syntax.\n\n if (n.is_infinite() && !cx_status.overflow())\n\n || (n.is_nan() && n.is_negative())\n\n || n.is_signaling_nan()\n\n {\n\n return Err(ParseError::invalid_input_syntax(\"numeric\", s));\n\n }\n\n\n\n // Process value; only errors if value is out of range of numeric's max precision.\n", "file_path": "src/repr/src/strconv.rs", "rank": 39, "score": 286790.9766378891 }, { "content": "/// Plans a slice of expressions.\n\n///\n\n/// This function is a simple convenience function for mapping [`plan_expr`]\n\n/// over a slice of expressions. The planned expressions are returned in the\n\n/// same order as the input. If any of the expressions fail to plan, returns an\n\n/// error instead.\n\nfn plan_exprs<E>(ecx: &ExprContext, exprs: &[E]) -> Result<Vec<CoercibleScalarExpr>, PlanError>\n\nwhere\n\n E: std::borrow::Borrow<Expr<Aug>>,\n\n{\n\n let mut out = vec![];\n\n for expr in exprs {\n\n out.push(plan_expr(ecx, expr.borrow())?);\n\n }\n\n Ok(out)\n\n}\n\n\n", "file_path": "src/sql/src/plan/query.rs", "rank": 40, "score": 286460.63313526195 }, { "content": "fn decode_copy_done(mut _buf: Cursor) -> Result<FrontendMessage, io::Error> {\n\n // Nothing more to decode.\n\n Ok(FrontendMessage::CopyDone)\n\n}\n\n\n", "file_path": "src/pgwire/src/codec.rs", "rank": 41, "score": 283734.3817405795 }, { "content": "fn parse_map_inner<'a, V, E>(\n\n s: &'a str,\n\n is_value_type_map: bool,\n\n mut gen_elem: impl FnMut(Cow<'a, str>) -> Result<V, E>,\n\n) -> Result<BTreeMap<String, V>, String>\n\nwhere\n\n E: fmt::Display,\n\n{\n\n let mut map = BTreeMap::new();\n\n let buf = &mut LexBuf::new(s);\n\n\n\n // Consume opening paren.\n\n if !buf.consume('{') {\n\n bail!(\n\n \"expected '{{', found {}\",\n\n match buf.next() {\n\n Some(c) => format!(\"{}\", c),\n\n None => \"empty string\".to_string(),\n\n }\n\n )\n", "file_path": "src/repr/src/strconv.rs", "rank": 42, "score": 283143.0009626483 }, { "content": "/// Returns `n`'s precision, i.e. the total number of digits represented by `n`\n\n/// in standard notation not including a zero in the \"one's place\" in (-1,1).\n\npub fn get_precision<const N: usize>(n: &Decimal<N>) -> u32 {\n\n let e = n.exponent();\n\n if e >= 0 {\n\n // Positive exponent\n\n n.digits() + u32::try_from(e).unwrap()\n\n } else {\n\n // Negative exponent\n\n let d = n.digits();\n\n let e = u32::try_from(e.abs()).unwrap();\n\n // Precision is...\n\n // - d if decimal point splits numbers\n\n // - e if e dominates number of digits\n\n std::cmp::max(d, e)\n\n }\n\n}\n\n\n", "file_path": "src/repr/src/adt/numeric.rs", "rank": 43, "score": 282507.8693858553 }, { "content": "// This function is derived from code in the avro_rs project. Update the license\n\n// header on this file accordingly if you move it to a new home.\n\npub fn from_json(json: &JsonValue, schema: SchemaNode) -> Result<Value, anyhow::Error> {\n\n match (json, schema.inner) {\n\n (JsonValue::Null, SchemaPiece::Null) => Ok(Value::Null),\n\n (JsonValue::Bool(b), SchemaPiece::Boolean) => Ok(Value::Boolean(*b)),\n\n (JsonValue::Number(ref n), SchemaPiece::Int) => {\n\n Ok(Value::Int(n.as_i64().unwrap().try_into()?))\n\n }\n\n (JsonValue::Number(ref n), SchemaPiece::Long) => Ok(Value::Long(n.as_i64().unwrap())),\n\n (JsonValue::Number(ref n), SchemaPiece::Float) => {\n\n Ok(Value::Float(n.as_f64().unwrap() as f32))\n\n }\n\n (JsonValue::Number(ref n), SchemaPiece::Double) => Ok(Value::Double(n.as_f64().unwrap())),\n\n (JsonValue::Number(ref n), SchemaPiece::Date) => Ok(Value::Date(\n\n chrono::NaiveDate::from_ymd(1970, 1, 1) + chrono::Duration::days(n.as_i64().unwrap()),\n\n )),\n\n (JsonValue::Number(ref n), SchemaPiece::TimestampMilli) => {\n\n let ts = n.as_i64().unwrap();\n\n Ok(Value::Timestamp(chrono::NaiveDateTime::from_timestamp(\n\n ts / 1_000,\n\n ((ts % 1_000) * 1_000_000) as u32,\n", "file_path": "src/testdrive/src/format/avro.rs", "rank": 44, "score": 282170.60964756244 }, { "content": "/// Encodes an BlobUnsealedBatch into the Arrow file format.\n\n///\n\n/// NB: This is currently unused, but it's here because we may want to use it\n\n/// for the local cache and so we can easily compare arrow vs parquet.\n\npub fn encode_unsealed_arrow<W: Write>(w: &mut W, batch: &BlobUnsealedBatch) -> Result<(), Error> {\n\n let mut metadata = BTreeMap::new();\n\n metadata.insert(\n\n INLINE_METADATA_KEY.into(),\n\n encode_unsealed_inline_meta(batch, ProtoBatchFormat::ArrowKvtd),\n\n );\n\n let schema = Schema::from(SCHEMA_ARROW_KVTD.fields.clone()).with_metadata(metadata);\n\n let options = WriteOptions { compression: None };\n\n let mut writer = FileWriter::try_new(w, &schema, None, options)?;\n\n for records in batch.updates.iter() {\n\n writer.write(&encode_arrow_batch_kvtd(records), None)?;\n\n }\n\n writer.finish()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/persist/src/indexed/columnar/arrow.rs", "rank": 45, "score": 279601.2005028077 }, { "content": "/// Decodes a BlobUnsealedBatch from the Arrow file format.\n\n///\n\n/// NB: This is currently unused, but it's here because we may want to use it\n\n/// for the local cache and so we can easily compare arrow vs parquet.\n\npub fn decode_unsealed_arrow<R: Read + Seek>(r: &mut R) -> Result<BlobUnsealedBatch, Error> {\n\n let file_meta = read_file_metadata(r)?;\n\n let (format, meta) =\n\n decode_unsealed_inline_meta(file_meta.schema.metadata.get(INLINE_METADATA_KEY))?;\n\n\n\n let updates = match format {\n\n ProtoBatchFormat::Unknown => return Err(\"unknown format\".into()),\n\n ProtoBatchFormat::ArrowKvtd => decode_arrow_file_kvtd(r, file_meta)?,\n\n ProtoBatchFormat::ParquetKvtd => {\n\n return Err(\"ParquetKvtd format not supported in arrow\".into())\n\n }\n\n };\n\n\n\n let ret = BlobUnsealedBatch {\n\n desc: SeqNo(meta.seqno_lower)..SeqNo(meta.seqno_upper),\n\n updates,\n\n };\n\n ret.validate()?;\n\n Ok(ret)\n\n}\n\n\n", "file_path": "src/persist/src/indexed/columnar/arrow.rs", "rank": 46, "score": 279572.1217076591 }, { "content": "/// Decodes a BlobUnsealedBatch from the Parquet format.\n\npub fn decode_unsealed_parquet<R: Read + Seek>(r: &mut R) -> Result<BlobUnsealedBatch, Error> {\n\n let metadata = read_metadata(r).map_err(|err| err.to_string())?;\n\n let metadata = metadata\n\n .key_value_metadata()\n\n .as_ref()\n\n .and_then(|x| x.iter().find(|x| x.key == INLINE_METADATA_KEY));\n\n let (format, meta) = decode_unsealed_inline_meta(metadata.and_then(|x| x.value.as_ref()))?;\n\n\n\n let updates = match format {\n\n ProtoBatchFormat::Unknown => return Err(\"unknown format\".into()),\n\n ProtoBatchFormat::ArrowKvtd => {\n\n return Err(\"ArrowKvtd format not supported in parquet\".into())\n\n }\n\n ProtoBatchFormat::ParquetKvtd => decode_parquet_file_kvtd(r)?,\n\n };\n\n\n\n let ret = BlobUnsealedBatch {\n\n desc: SeqNo(meta.seqno_lower)..SeqNo(meta.seqno_upper),\n\n updates,\n\n };\n\n ret.validate()?;\n\n Ok(ret)\n\n}\n\n\n", "file_path": "src/persist/src/indexed/columnar/parquet.rs", "rank": 47, "score": 279565.86272884463 }, { "content": "/// Parses a buffer of two's complement digits in big-endian order and converts\n\n/// them to [`Decimal<N>`].\n\npub fn twos_complement_be_to_numeric_inner<D: Dec<N>, const N: usize>(\n\n input: &mut [u8],\n\n) -> Result<Decimal<N>, anyhow::Error> {\n\n let is_neg = if (input[0] & 0x80) != 0 {\n\n // byte-level negate all negative values, guaranteeing all bytes are\n\n // readable as unsigned.\n\n negate_twos_complement_le(input.iter_mut().rev());\n\n true\n\n } else {\n\n false\n\n };\n\n\n\n let head = input.len() % 16;\n\n let i = twos_complement_be_to_u128(&input[0..head]);\n\n let mut cx = D::context();\n\n let mut d = cx.from_u128(i);\n\n\n\n for c in input[head..].chunks(16) {\n\n assert_eq!(c.len(), 16);\n\n // essentially d << 128\n", "file_path": "src/repr/src/adt/numeric.rs", "rank": 48, "score": 279443.4619689743 }, { "content": "#[inline]\n\nfn decode_len<R: Read>(reader: &mut R) -> Result<usize, AvroError> {\n\n zag_i64(reader).and_then(|i| safe_len(i as usize))\n\n}\n\n\n", "file_path": "src/avro/src/decode.rs", "rank": 49, "score": 278888.0889103903 }, { "content": "pub fn format_pg_legacy_char<B>(buf: &mut B, c: u8) -> Result<(), EvalError>\n\nwhere\n\n B: FormatBuffer,\n\n{\n\n // PostgreSQL is willing to hold invalid UTF-8 in a `Datum::String`, but\n\n // we are not.\n\n match str::from_utf8(&[c]) {\n\n Ok(s) => {\n\n buf.write_str(s);\n\n Ok(())\n\n }\n\n Err(_) => Err(EvalError::InvalidByteSequence {\n\n byte_sequence: format!(\"{:#02x}\", c),\n\n encoding_name: \"UTF8\".into(),\n\n }),\n\n }\n\n}\n\n\n\nsqlfunc!(\n\n #[sqlname = \"pglegacychartostr\"]\n", "file_path": "src/expr/src/scalar/func/impls/pg_legacy_char.rs", "rank": 50, "score": 278591.83369244554 }, { "content": "/// Lexes a SQL query.\n\n///\n\n/// Returns a list of tokens alongside their corresponding byte offset in the\n\n/// input string. Returns an error if the SQL query is lexically invalid.\n\n///\n\n/// See the module documentation for more information about the lexical\n\n/// structure of SQL.\n\npub fn lex(query: &str) -> Result<Vec<(Token, usize)>, ParserError> {\n\n let buf = &mut LexBuf::new(query);\n\n let mut tokens = vec![];\n\n while let Some(ch) = buf.next() {\n\n let pos = buf.pos() - ch.len_utf8();\n\n let token = match ch {\n\n _ if ch.is_ascii_whitespace() => continue,\n\n '-' if buf.consume('-') => {\n\n lex_line_comment(buf);\n\n continue;\n\n }\n\n '/' if buf.consume('*') => {\n\n lex_multiline_comment(buf)?;\n\n continue;\n\n }\n\n '\\'' => Token::String(lex_string(buf)?),\n\n 'x' | 'X' if buf.consume('\\'') => Token::HexString(lex_string(buf)?),\n\n 'e' | 'E' if buf.consume('\\'') => lex_extended_string(buf)?,\n\n 'A'..='Z' | 'a'..='z' | '_' | '\\u{80}'..=char::MAX => lex_ident(buf),\n\n '\"' => lex_quoted_ident(buf)?,\n", "file_path": "src/sql-parser/src/lexer.rs", "rank": 51, "score": 277970.7177351316 }, { "content": "/// Returns a new context appropriate for operating on numeric datums.\n\npub fn cx_datum() -> Context<Numeric> {\n\n CX_DATUM.clone()\n\n}\n\n\n", "file_path": "src/repr/src/adt/numeric.rs", "rank": 52, "score": 277755.896766486 }, { "content": "/// Constructs a sink that consumes its input and sends it nowhere.\n\npub fn dev_null<T, E>() -> DevNull<T, E> {\n\n DevNull(PhantomData, PhantomData)\n\n}\n\n\n\n/// A sink that consumes its input and sends it nowhere.\n\n///\n\n/// Primarily useful as a base sink when folding multiple sinks into one using\n\n/// [`futures::sink::SinkExt::fanout`].\n\n#[derive(Debug)]\n\npub struct DevNull<T, E>(PhantomData<T>, PhantomData<E>);\n\n\n\nimpl<T, E> Sink<T> for DevNull<T, E> {\n\n type Error = E;\n\n\n\n fn poll_ready(self: Pin<&mut Self>, _: &mut Context) -> Poll<Result<(), Self::Error>> {\n\n Poll::Ready(Ok(()))\n\n }\n\n\n\n fn start_send(self: Pin<&mut Self>, _: T) -> Result<(), Self::Error> {\n\n Ok(())\n", "file_path": "src/ore/src/future.rs", "rank": 53, "score": 276976.8531052865 }, { "content": "/// Check a const-sized array of `constraints` against a sequence of `quantifiers`,\n\n/// and call `on_failure` for constraints that are not satisfied by the sequence.\n\n///\n\n/// A `constraint[i]` is violated iff one of the following conditions are met:\n\n/// - The number of `quantifiers` with satisfied type is within the given range.\n\n/// - All `quantifiers` with satisfied type also satisfy the quantifier box constraints.\n\nfn check_constraints<'a, const N: usize>(\n\n constraints: &[QuantifierConstraint; N],\n\n quantifiers: impl Iterator<Item = BoundRef<'a, Quantifier>>,\n\n model: &'a Model,\n\n mut on_failure: impl FnMut(&QuantifierConstraint) -> (),\n\n) {\n\n // count quantifiers with satisfied type and associated box types\n\n let mut q_type_counts = [0; N];\n\n let mut b_type_counts = [0; N];\n\n for q in quantifiers {\n\n for i in 0..N {\n\n if constraints[i].satisfies_q_type(&q) {\n\n q_type_counts[i] += 1;\n\n b_type_counts[i] += constraints[i].satisfies_b_type(&q, model) as usize;\n\n }\n\n }\n\n }\n\n // call on_failure for violated constraints\n\n for i in 0..N {\n\n let q_type_count_ok = constraints[i].contains(&q_type_counts[i]);\n", "file_path": "src/sql/src/query_model/validator/quantifier.rs", "rank": 54, "score": 276805.6734944304 }, { "content": "/// Encodes an BlobTraceBatchPart into the Arrow file format.\n\n///\n\n/// NB: This is currently unused, but it's here because we may want to use it\n\n/// for the local cache and so we can easily compare arrow vs parquet.\n\npub fn encode_trace_arrow<W: Write>(w: &mut W, batch: &BlobTraceBatchPart) -> Result<(), Error> {\n\n let mut metadata = BTreeMap::new();\n\n metadata.insert(\n\n INLINE_METADATA_KEY.into(),\n\n encode_trace_inline_meta(batch, ProtoBatchFormat::ArrowKvtd),\n\n );\n\n let schema = Schema::from(SCHEMA_ARROW_KVTD.fields.clone()).with_metadata(metadata);\n\n let options = WriteOptions { compression: None };\n\n let mut writer = FileWriter::try_new(w, &schema, None, options)?;\n\n for records in batch.updates.iter() {\n\n writer.write(&encode_arrow_batch_kvtd(&records), None)?;\n\n }\n\n writer.finish()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/persist/src/indexed/columnar/arrow.rs", "rank": 55, "score": 276431.06377151294 }, { "content": "/// Encodes an BlobTraceBatchPart into the Parquet format.\n\npub fn encode_trace_parquet<W: Write>(w: &mut W, batch: &BlobTraceBatchPart) -> Result<(), Error> {\n\n encode_parquet_kvtd(\n\n w,\n\n encode_trace_inline_meta(batch, ProtoBatchFormat::ParquetKvtd),\n\n &batch.updates,\n\n )\n\n}\n\n\n", "file_path": "src/persist/src/indexed/columnar/parquet.rs", "rank": 56, "score": 276424.8380122322 }, { "content": "/// Decodes a BlobTraceBatchPart from the Arrow file format.\n\n///\n\n/// NB: This is currently unused, but it's here because we may want to use it\n\n/// for the local cache and so we can easily compare arrow vs parquet.\n\npub fn decode_trace_arrow<R: Read + Seek>(r: &mut R) -> Result<BlobTraceBatchPart, Error> {\n\n let file_meta = read_file_metadata(r)?;\n\n let (format, meta) =\n\n decode_trace_inline_meta(file_meta.schema.metadata.get(INLINE_METADATA_KEY))?;\n\n\n\n let updates = match format {\n\n ProtoBatchFormat::Unknown => return Err(\"unknown format\".into()),\n\n ProtoBatchFormat::ArrowKvtd => decode_arrow_file_kvtd(r, file_meta)?,\n\n ProtoBatchFormat::ParquetKvtd => {\n\n return Err(\"ParquetKvtd format not supported in arrow\".into())\n\n }\n\n };\n\n\n\n let ret = BlobTraceBatchPart {\n\n desc: meta.desc.map_or_else(\n\n || {\n\n Description::new(\n\n Antichain::from_elem(u64::minimum()),\n\n Antichain::from_elem(u64::minimum()),\n\n Antichain::from_elem(u64::minimum()),\n", "file_path": "src/persist/src/indexed/columnar/arrow.rs", "rank": 57, "score": 276402.2864706712 }, { "content": "/// Decodes a BlobTraceBatchPart from the Parquet format.\n\npub fn decode_trace_parquet<R: Read + Seek>(r: &mut R) -> Result<BlobTraceBatchPart, Error> {\n\n let metadata = read_metadata(r).map_err(|err| err.to_string())?;\n\n let metadata = metadata\n\n .key_value_metadata()\n\n .as_ref()\n\n .and_then(|x| x.iter().find(|x| x.key == INLINE_METADATA_KEY));\n\n let (format, meta) = decode_trace_inline_meta(metadata.and_then(|x| x.value.as_ref()))?;\n\n\n\n let updates = match format {\n\n ProtoBatchFormat::Unknown => return Err(\"unknown format\".into()),\n\n ProtoBatchFormat::ArrowKvtd => {\n\n return Err(\"ArrowKVTD format not supported in parquet\".into())\n\n }\n\n ProtoBatchFormat::ParquetKvtd => decode_parquet_file_kvtd(r)?,\n\n };\n\n\n\n let ret = BlobTraceBatchPart {\n\n desc: meta.desc.map_or_else(\n\n || {\n\n Description::new(\n", "file_path": "src/persist/src/indexed/columnar/parquet.rs", "rank": 58, "score": 276396.0607113904 }, { "content": "fn write_error_heading(stream: &mut StandardStream, color_spec: &ColorSpec) -> io::Result<()> {\n\n stream.set_color(color_spec.clone().set_fg(Some(Color::Red)))?;\n\n write!(stream, \"error: \")?;\n\n stream.set_color(color_spec)\n\n}\n\n\n\nimpl From<anyhow::Error> for Error {\n\n fn from(source: anyhow::Error) -> Error {\n\n Error {\n\n source,\n\n location: None,\n\n }\n\n }\n\n}\n\n\n\npub(crate) struct ErrorLocation {\n\n filename: PathBuf,\n\n snippet: String,\n\n line: usize,\n\n col: usize,\n", "file_path": "src/testdrive/src/error.rs", "rank": 59, "score": 276172.2595662848 }, { "content": "/// Returns a new context appropriate for operating on numeric aggregates.\n\npub fn cx_agg() -> Context<NumericAgg> {\n\n CX_AGG.clone()\n\n}\n\n\n", "file_path": "src/repr/src/adt/numeric.rs", "rank": 60, "score": 274173.2544184305 }, { "content": "pub fn parse_list<'a, T, E>(\n\n s: &'a str,\n\n is_element_type_list: bool,\n\n make_null: impl FnMut() -> T,\n\n gen_elem: impl FnMut(Cow<'a, str>) -> Result<T, E>,\n\n) -> Result<Vec<T>, ParseError>\n\nwhere\n\n E: fmt::Display,\n\n{\n\n parse_list_inner(s, is_element_type_list, make_null, gen_elem)\n\n .map_err(|details| ParseError::invalid_input_syntax(\"list\", s).with_details(details))\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 61, "score": 274171.6110194943 }, { "content": "pub fn parse_array<'a, T, E>(\n\n s: &'a str,\n\n make_null: impl FnMut() -> T,\n\n gen_elem: impl FnMut(Cow<'a, str>) -> Result<T, E>,\n\n) -> Result<Vec<T>, ParseError>\n\nwhere\n\n E: fmt::Display,\n\n{\n\n parse_array_inner(s, make_null, gen_elem)\n\n .map_err(|details| ParseError::invalid_input_syntax(\"array\", s).with_details(details))\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 62, "score": 274171.6110194943 }, { "content": "pub fn format_numeric<F>(buf: &mut F, n: &OrderedDecimal<Numeric>) -> Nestable\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n write!(buf, \"{}\", n.0.to_standard_notation_string());\n\n Nestable::Yes\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 63, "score": 274067.91431316116 }, { "content": "/// Extracts the schema_id placed in front of the serialized message by the confluent stack\n\n/// Optionally expect an empty\n\n///\n\n/// This function returns the schema_id and a subslice of the rest of the buffer\n\nfn extract_schema_id<'buf>(buf: &'buf [u8], protocol: &str) -> Result<(i32, &'buf [u8])> {\n\n // The first byte is a magic byte (0) that indicates the Confluent\n\n // serialization format version, and the next four bytes are a big\n\n // endian 32-bit schema ID.\n\n //\n\n // https://docs.confluent.io/current/schema-registry/docs/serializer-formatter.html#wire-format\n\n //\n\n // For formats like protobuf, confluent adds additional information related to\n\n // which message in the proto file\n\n\n\n let expected_len = 5;\n\n\n\n if buf.len() < expected_len {\n\n bail!(\n\n \"Confluent-style {} datum is too few bytes: expected at least {} bytes, got {}\",\n\n protocol,\n\n expected_len,\n\n buf.len()\n\n );\n\n }\n", "file_path": "src/interchange/src/confluent.rs", "rank": 64, "score": 273911.34707834804 }, { "content": "pub fn ensure() -> Result<(), anyhow::Error> {\n\n println!(\"ensuring all npm packages are up-to-date...\");\n\n\n\n let client = reqwest::blocking::Client::new();\n\n for pkg in NPM_PACKAGES {\n\n if pkg.compute_digest().ok().as_deref() == Some(&pkg.digest) {\n\n println!(\"{} is up-to-date\", pkg.name);\n\n continue;\n\n } else {\n\n println!(\"{} needs updating...\", pkg.name);\n\n }\n\n\n\n let url = format!(\n\n \"https://registry.npmjs.org/{}/-/{}-{}.tgz\",\n\n pkg.name,\n\n pkg.name.split('/').last().unwrap(),\n\n pkg.version,\n\n );\n\n let res = client\n\n .get(&url)\n", "file_path": "src/npm/src/lib.rs", "rank": 65, "score": 273572.5695458639 }, { "content": "/// Creates a temporary copy of Materialize's mzdata databases\n\n///\n\n/// This is useful because running validations against the catalog can conflict with the running\n\n/// Materialize instance. Therefore it's better to run validations on a copy of the catalog.\n\npub fn mzdata_copy(catalog_path: &PathBuf) -> Result<TempDir, anyhow::Error> {\n\n let temp_dir = tempfile::tempdir()?;\n\n fs::copy(\n\n catalog_path.join(STASH_DB_NAME),\n\n temp_dir.path().join(STASH_DB_NAME),\n\n )?;\n\n fs::copy(\n\n catalog_path.join(STORAGE_DB_NAME),\n\n temp_dir.path().join(STORAGE_DB_NAME),\n\n )?;\n\n Ok(temp_dir)\n\n}\n\n\n", "file_path": "src/testdrive/src/util/mz_data.rs", "rank": 66, "score": 271654.15370653145 }, { "content": "/// Creates a temporary copy of Materialize's mzdata's catalog databases\n\n///\n\n/// This is useful because running validations against the catalog can conflict with the running\n\n/// Materialize instance. Therefore it's better to run validations on a copy of the catalog.\n\npub fn catalog_copy(catalog_path: &PathBuf) -> Result<TempDir, anyhow::Error> {\n\n let temp_dir = tempfile::tempdir()?;\n\n fs::copy(\n\n catalog_path.join(STASH_DB_NAME),\n\n temp_dir.path().join(STASH_DB_NAME),\n\n )?;\n\n Ok(temp_dir)\n\n}\n", "file_path": "src/testdrive/src/util/mz_data.rs", "rank": 67, "score": 271654.09641721443 }, { "content": "/// Converts a Materialize row into a vector of PostgreSQL values.\n\n///\n\n/// Calling this function is equivalent to mapping [`Value::from_datum`] over\n\n/// every datum in `row`.\n\npub fn values_from_row(row: Row, typ: &RelationType) -> Vec<Option<Value>> {\n\n row.iter()\n\n .zip(typ.column_types.iter())\n\n .map(|(col, typ)| Value::from_datum(col, &typ.scalar_type))\n\n .collect()\n\n}\n", "file_path": "src/pgrepr/src/value.rs", "rank": 68, "score": 271563.4928089844 }, { "content": "/// Encodes an BlobUnsealedBatch into the Parquet format.\n\npub fn encode_unsealed_parquet<W: Write>(\n\n w: &mut W,\n\n batch: &BlobUnsealedBatch,\n\n) -> Result<(), Error> {\n\n encode_parquet_kvtd(\n\n w,\n\n encode_unsealed_inline_meta(batch, ProtoBatchFormat::ParquetKvtd),\n\n &batch.updates,\n\n )\n\n}\n\n\n", "file_path": "src/persist/src/indexed/columnar/parquet.rs", "rank": 69, "score": 271131.95713067387 }, { "content": "/// Creates a future which will read exactly enough bytes to fill `buf`, unless\n\n/// EOF is reached first. If a short read should be considered an error, use\n\n/// [`tokio::io::AsyncReadExt::read_exact`] instead.\n\n///\n\n/// The returned future will resolve to the number of bytes read.\n\n///\n\n/// In the case of an error the contents of the buffer are unspecified.\n\npub fn read_exact_or_eof<'a, A>(reader: &'a mut A, buf: &'a mut [u8]) -> ReadExactOrEof<'a, A>\n\nwhere\n\n A: AsyncRead,\n\n{\n\n ReadExactOrEof {\n\n reader,\n\n buf,\n\n pos: 0,\n\n }\n\n}\n\n\n\nimpl<A> Future for ReadExactOrEof<'_, A>\n\nwhere\n\n A: AsyncRead + Unpin,\n\n{\n\n type Output = io::Result<usize>;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {\n\n while self.pos < self.buf.len() {\n\n let me = &mut *self;\n", "file_path": "src/ore/src/netio/read_exact.rs", "rank": 70, "score": 270905.58084841294 }, { "content": "/// Writes each `elem` into `buf`, separating the elems with `sep`.\n\npub fn format_elems<F, T, E>(\n\n buf: &mut F,\n\n elems: impl IntoIterator<Item = T>,\n\n mut format_elem: impl FnMut(ListElementWriter<F>, T) -> Result<Nestable, E>,\n\n sep: char,\n\n) -> Result<(), E>\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n let mut elems = elems.into_iter().peekable();\n\n while let Some(elem) = elems.next() {\n\n let start = buf.len();\n\n if let Nestable::MayNeedEscaping = format_elem(ListElementWriter(buf), elem)? {\n\n escape_elem::<_, ListElementEscaper>(buf, start);\n\n }\n\n if elems.peek().is_some() {\n\n buf.write_char(sep)\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 71, "score": 270422.2419014008 }, { "content": "pub fn format_record<F, T, E>(\n\n buf: &mut F,\n\n elems: impl IntoIterator<Item = T>,\n\n mut format_elem: impl FnMut(RecordElementWriter<F>, T) -> Result<Nestable, E>,\n\n) -> Result<Nestable, E>\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n buf.write_char('(');\n\n let mut elems = elems.into_iter().peekable();\n\n while let Some(elem) = elems.next() {\n\n let start = buf.len();\n\n if let Nestable::MayNeedEscaping = format_elem(RecordElementWriter(buf), elem)? {\n\n escape_elem::<_, RecordElementEscaper>(buf, start);\n\n }\n\n if elems.peek().is_some() {\n\n buf.write_char(',')\n\n }\n\n }\n\n buf.write_char(')');\n", "file_path": "src/repr/src/strconv.rs", "rank": 72, "score": 270408.3594141735 }, { "content": "pub fn format_list<F, T, E>(\n\n buf: &mut F,\n\n elems: impl IntoIterator<Item = T>,\n\n format_elem: impl FnMut(ListElementWriter<F>, T) -> Result<Nestable, E>,\n\n) -> Result<Nestable, E>\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n buf.write_char('{');\n\n format_elems(buf, elems, format_elem, ',')?;\n\n buf.write_char('}');\n\n Ok(Nestable::Yes)\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 73, "score": 270408.3594141735 }, { "content": "pub fn format_map<F, T, E>(\n\n buf: &mut F,\n\n elems: impl IntoIterator<Item = (impl AsRef<str>, T)>,\n\n mut format_elem: impl FnMut(MapValueWriter<F>, T) -> Result<Nestable, E>,\n\n) -> Result<Nestable, E>\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n buf.write_char('{');\n\n let mut elems = elems.into_iter().peekable();\n\n while let Some((key, value)) = elems.next() {\n\n // Map key values are always Strings, which always evaluate to\n\n // Nestable::MayNeedEscaping.\n\n let key_start = buf.len();\n\n buf.write_str(key.as_ref());\n\n escape_elem::<_, MapElementEscaper>(buf, key_start);\n\n\n\n buf.write_str(\"=>\");\n\n\n\n let value_start = buf.len();\n", "file_path": "src/repr/src/strconv.rs", "rank": 74, "score": 270408.3594141735 }, { "content": "pub fn format_array<F, T, E>(\n\n buf: &mut F,\n\n dims: &[ArrayDimension],\n\n elems: impl IntoIterator<Item = T>,\n\n mut format_elem: impl FnMut(ListElementWriter<F>, T) -> Result<Nestable, E>,\n\n) -> Result<Nestable, E>\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n format_array_inner(buf, dims, &mut elems.into_iter(), &mut format_elem)?;\n\n Ok(Nestable::Yes)\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 75, "score": 270408.3594141735 }, { "content": "pub fn parse_legacy_vector<'a, T, E>(\n\n s: &'a str,\n\n gen_elem: impl FnMut(Cow<'a, str>) -> Result<T, E>,\n\n) -> Result<Vec<T>, ParseError>\n\nwhere\n\n E: fmt::Display,\n\n{\n\n parse_legacy_vector_inner(s, gen_elem)\n\n .map_err(|details| ParseError::invalid_input_syntax(\"int2vector\", s).with_details(details))\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 76, "score": 270408.3594141735 }, { "content": "pub fn format_array_inner<F, T, E>(\n\n buf: &mut F,\n\n dims: &[ArrayDimension],\n\n elems: &mut impl Iterator<Item = T>,\n\n format_elem: &mut impl FnMut(ListElementWriter<F>, T) -> Result<Nestable, E>,\n\n) -> Result<(), E>\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n if dims.is_empty() {\n\n buf.write_str(\"{}\");\n\n return Ok(());\n\n }\n\n\n\n buf.write_char('{');\n\n for j in 0..dims[0].length {\n\n if j > 0 {\n\n buf.write_char(',');\n\n }\n\n if dims.len() == 1 {\n", "file_path": "src/repr/src/strconv.rs", "rank": 77, "score": 266815.89958670345 }, { "content": "pub fn format_legacy_vector<F, T, E>(\n\n buf: &mut F,\n\n elems: impl IntoIterator<Item = T>,\n\n format_elem: impl FnMut(ListElementWriter<F>, T) -> Result<Nestable, E>,\n\n) -> Result<Nestable, E>\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n format_elems(buf, elems, format_elem, ' ')?;\n\n Ok(Nestable::MayNeedEscaping)\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 78, "score": 266815.89958670345 }, { "content": "pub fn parse_legacy_vector_inner<'a, T, E>(\n\n s: &'a str,\n\n mut gen_elem: impl FnMut(Cow<'a, str>) -> Result<T, E>,\n\n) -> Result<Vec<T>, String>\n\nwhere\n\n E: fmt::Display,\n\n{\n\n let mut elems = vec![];\n\n let buf = &mut LexBuf::new(s);\n\n\n\n let mut gen = |elem| gen_elem(elem).map_err_to_string();\n\n\n\n loop {\n\n buf.take_while(|ch| ch.is_ascii_whitespace());\n\n match buf.peek() {\n\n Some(_) => {\n\n let elem = buf.take_while(|ch| !ch.is_ascii_whitespace());\n\n elems.push(gen(elem.into())?);\n\n }\n\n None => break,\n\n }\n\n }\n\n\n\n Ok(elems)\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 79, "score": 266815.89958670345 }, { "content": "// From postgres-types/src/private.rs.\n\nfn read_be_i32(buf: &mut &[u8]) -> Result<i32, Box<dyn Error + Sync + Send>> {\n\n if buf.len() < 4 {\n\n return Err(\"invalid buffer size\".into());\n\n }\n\n let mut bytes = [0; 4];\n\n bytes.copy_from_slice(&buf[..4]);\n\n *buf = &buf[4..];\n\n Ok(i32::from_be_bytes(bytes))\n\n}\n\n\n", "file_path": "src/sqllogictest/src/runner.rs", "rank": 80, "score": 266671.7644493747 }, { "content": "fn lex_multiline_comment(buf: &mut LexBuf) -> Result<(), ParserError> {\n\n let pos = buf.pos() - 2;\n\n let mut nesting = 0;\n\n while let Some(ch) = buf.next() {\n\n match ch {\n\n '*' if buf.consume('/') => {\n\n if nesting == 0 {\n\n return Ok(());\n\n } else {\n\n nesting -= 1;\n\n }\n\n }\n\n '/' if buf.consume('*') => nesting += 1,\n\n _ => (),\n\n }\n\n }\n\n bail!(pos, \"unterminated multiline comment\")\n\n}\n\n\n", "file_path": "src/sql-parser/src/lexer.rs", "rank": 81, "score": 263569.57831026113 }, { "content": "/// Transforms a vector containing indexes of needed columns into one containing\n\n/// the \"skips\" an iterator over a Row would need to perform to see those values.\n\n///\n\n/// This function requires that all of the elements in `indexes` are strictly\n\n/// increasing.\n\n/// E.g. [3, 6, 10, 15] turns into [3, 3, 4, 5]\n\npub fn convert_indexes_to_skips(mut indexes: Vec<usize>) -> Vec<usize> {\n\n for i in 1..indexes.len() {\n\n soft_assert_or_log!(\n\n indexes[i - 1] < indexes[i],\n\n \"convert_indexes_to_skip needs indexes to be strictly increasing. Received: {:?}\",\n\n indexes,\n\n );\n\n }\n\n\n\n for i in (1..indexes.len()).rev() {\n\n indexes[i] -= indexes[i - 1];\n\n indexes[i] -= 1;\n\n }\n\n\n\n indexes\n\n}\n\n\n", "file_path": "src/dataflow-types/src/plan/reduce.rs", "rank": 82, "score": 263026.9869039868 }, { "content": "pub fn plan_raise(scx: &StatementContext, r: RaiseStatement) -> Result<Plan, anyhow::Error> {\n\n scx.require_experimental_mode(\"RAISE statement\")?;\n\n Ok(Plan::Raise(RaisePlan {\n\n severity: r.severity,\n\n }))\n\n}\n", "file_path": "src/sql/src/plan/statement/raise.rs", "rank": 83, "score": 262164.65591907944 }, { "content": "fn typecheck_debezium(value_desc: &RelationDesc) -> Result<(usize, usize), anyhow::Error> {\n\n let (before_idx, before_ty) = value_desc\n\n .get_by_name(&\"before\".into())\n\n .ok_or_else(|| anyhow!(\"'before' column missing from debezium input\"))?;\n\n let (after_idx, after_ty) = value_desc\n\n .get_by_name(&\"after\".into())\n\n .ok_or_else(|| anyhow!(\"'after' column missing from debezium input\"))?;\n\n if !matches!(before_ty.scalar_type, ScalarType::Record { .. }) {\n\n bail!(\"'before' column must be of type record\");\n\n }\n\n if before_ty != after_ty {\n\n bail!(\"'before' type differs from 'after' column\");\n\n }\n\n Ok((before_idx, after_idx))\n\n}\n\n\n", "file_path": "src/sql/src/plan/statement/ddl.rs", "rank": 84, "score": 260085.88778193673 }, { "content": "fn write_value_ref(schema: &Schema, value: &Value, buffer: &mut Vec<u8>) -> Result<(), Error> {\n\n if !value.validate(schema.top_node()) {\n\n return Err(ValidationError::new(\"value does not match schema\").into());\n\n }\n\n encode_ref(value, schema.top_node(), buffer);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/avro/src/writer.rs", "rank": 85, "score": 259659.32849585736 }, { "content": "pub fn transform_expr(scx: &StatementContext, expr: &mut Expr<Aug>) -> Result<(), PlanError> {\n\n run_transforms(scx, |t, expr| t.visit_expr_mut(expr), expr)\n\n}\n\n\n\npub(crate) fn run_transforms<F, A>(\n\n scx: &StatementContext,\n\n mut f: F,\n\n ast: &mut A,\n\n) -> Result<(), PlanError>\n\nwhere\n\n F: for<'ast> FnMut(&mut dyn VisitMut<'ast, Aug>, &'ast mut A),\n\n{\n\n let mut func_rewriter = FuncRewriter::new(scx);\n\n f(&mut func_rewriter, ast);\n\n func_rewriter.status?;\n\n\n\n let mut desugarer = Desugarer::new();\n\n f(&mut desugarer, ast);\n\n desugarer.status\n\n}\n\n\n", "file_path": "src/sql/src/plan/transform_ast.rs", "rank": 86, "score": 258750.68152212107 }, { "content": "/// Extension methods for [`std::result::Result`].\n\npub trait ResultExt<T, E> {\n\n /// Applies [`Into::into`] to a contained [`Err`] value, leaving an [`Ok`]\n\n /// value untouched.\n\n fn err_into<E2>(self) -> Result<T, E2>\n\n where\n\n E: Into<E2>;\n\n\n\n /// Formats an [`Err`] value as a detailed error message, preserving any context information.\n\n ///\n\n /// This is equivalent to `format!(\"{:#}\", err)`, except that it's easier to type.\n\n fn err_to_string(&self) -> Option<String>\n\n where\n\n E: std::fmt::Display;\n\n\n\n /// Maps a `Result<T, E>` to `Result<T, String>` by converting the [`Err`] result into a string\n\n /// using the \"alternate\" formatting.\n\n fn map_err_to_string(self) -> Result<T, String>\n\n where\n\n E: std::fmt::Display;\n\n}\n", "file_path": "src/ore/src/result.rs", "rank": 87, "score": 258569.36739093816 }, { "content": "/// Encode a compatible value (implementing the `ToAvro` trait) into Avro format, also\n\n/// performing schema validation.\n\n///\n\n/// **NOTE** This function has a quite small niche of usage and does NOT generate headers and sync\n\n/// markers; use [`Writer`](struct.Writer.html) to be fully Avro-compatible if you don't know what\n\n/// you are doing, instead.\n\npub fn to_avro_datum<T: ToAvro>(schema: &Schema, value: T) -> Result<Vec<u8>, Error> {\n\n let mut buffer = Vec::new();\n\n write_avro_datum(schema, value, &mut buffer)?;\n\n Ok(buffer)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::io::Cursor;\n\n use std::str::FromStr;\n\n\n\n use serde::{Deserialize, Serialize};\n\n\n\n use super::*;\n\n use crate::types::Record;\n\n use crate::util::zig_i64;\n\n use crate::Reader;\n\n\n\n static SCHEMA: &str = r#\"\n\n {\n", "file_path": "src/avro/src/writer.rs", "rank": 88, "score": 258560.32316749258 }, { "content": "/// Plans an expression in the AS OF position of a `SELECT` or `TAIL` statement.\n\npub fn plan_as_of(scx: &StatementContext, expr: Option<Expr<Aug>>) -> Result<QueryWhen, PlanError> {\n\n let mut expr = match expr {\n\n None => return Ok(QueryWhen::Immediately),\n\n Some(expr) => expr,\n\n };\n\n\n\n let scope = Scope::empty();\n\n let desc = RelationDesc::empty();\n\n let qcx = QueryContext::root(scx, QueryLifetime::OneShot(scx.pcx()?));\n\n\n\n transform_ast::transform_expr(scx, &mut expr)?;\n\n\n\n let ecx = &ExprContext {\n\n qcx: &qcx,\n\n name: \"AS OF\",\n\n scope: &scope,\n\n relation_type: &desc.typ(),\n\n allow_aggregates: false,\n\n allow_subqueries: false,\n\n allow_windows: false,\n\n };\n\n let expr = plan_expr(ecx, &expr)?\n\n .type_as_any(ecx)?\n\n .lower_uncorrelated()?;\n\n Ok(QueryWhen::AtTimestamp(expr))\n\n}\n\n\n", "file_path": "src/sql/src/plan/query.rs", "rank": 89, "score": 258238.66708929458 }, { "content": "/// Parse a query result type string into a vec of expected types\n\nfn parse_types(input: &str) -> Result<Vec<Type>, anyhow::Error> {\n\n input\n\n .chars()\n\n .map(|char| {\n\n Ok(match char {\n\n 'T' => Type::Text,\n\n 'I' => Type::Integer,\n\n 'R' => Type::Real,\n\n 'B' => Type::Bool,\n\n 'O' => Type::Oid,\n\n _ => bail!(\"Unexpected type char {} in: {}\", char, input),\n\n })\n\n })\n\n .collect()\n\n}\n\n\n\nlazy_static! {\n\n static ref WHITESPACE_REGEX: Regex = Regex::new(r\"\\s+\").unwrap();\n\n}\n\n\n", "file_path": "src/sqllogictest/src/parser.rs", "rank": 90, "score": 257768.23139841945 }, { "content": "/// Computes the hash of an object implementing [`Hash`].\n\npub fn hash<T: Hash>(t: &T) -> u64 {\n\n let mut hasher = DefaultHasher::new();\n\n t.hash(&mut hasher);\n\n hasher.finish()\n\n}\n", "file_path": "src/ore/src/hash.rs", "rank": 91, "score": 257605.21697320484 }, { "content": "fn lex_parameter(buf: &mut LexBuf) -> Result<Token, ParserError> {\n\n let pos = buf.pos() - 1;\n\n let n = buf\n\n .take_while(|ch| matches!(ch, '0'..='9'))\n\n .parse()\n\n .map_err(|_| ParserError::new(pos, \"invalid parameter number\"))?;\n\n Ok(Token::Parameter(n))\n\n}\n\n\n", "file_path": "src/sql-parser/src/lexer.rs", "rank": 92, "score": 257529.96324445802 }, { "content": "fn lex_string(buf: &mut LexBuf) -> Result<String, ParserError> {\n\n let mut s = String::new();\n\n loop {\n\n let pos = buf.pos() - 1;\n\n loop {\n\n match buf.next() {\n\n Some('\\'') if buf.consume('\\'') => s.push('\\''),\n\n Some('\\'') => break,\n\n Some(c) => s.push(c),\n\n None => bail!(pos, \"unterminated quoted string\"),\n\n }\n\n }\n\n if !lex_to_adjacent_string(buf) {\n\n return Ok(s);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/sql-parser/src/lexer.rs", "rank": 93, "score": 257529.96324445802 }, { "content": "fn lex_op(buf: &mut LexBuf) -> Result<Token, ParserError> {\n\n buf.prev();\n\n let mut s = String::new();\n\n\n\n // In PostgreSQL, operators might be composed of any of the characters in\n\n // the set below...\n\n while let Some(ch) = buf.next() {\n\n match ch {\n\n // ...except the sequences `--` and `/*` start comments, even within\n\n // what would otherwise be an operator...\n\n '-' if buf.consume('-') => lex_line_comment(buf),\n\n '/' if buf.consume('*') => lex_multiline_comment(buf)?,\n\n #[rustfmt::skip]\n\n '+'|'-'|'*'|'/'|'<'|'>'|'='|'~'|'!'|'@'|'#'|'%'|'^'|'&'|'|'|'`'|'?' => s.push(ch),\n\n _ => {\n\n buf.prev();\n\n break;\n\n }\n\n }\n\n }\n", "file_path": "src/sql-parser/src/lexer.rs", "rank": 94, "score": 257529.96324445802 }, { "content": "fn lex_number(buf: &mut LexBuf) -> Result<Token, ParserError> {\n\n buf.prev();\n\n let mut s = buf.take_while(|ch| matches!(ch, '0'..='9')).to_owned();\n\n\n\n // Optional decimal component.\n\n if buf.consume('.') {\n\n s.push('.');\n\n s.push_str(buf.take_while(|ch| matches!(ch, '0'..='9')));\n\n }\n\n\n\n // Optional exponent.\n\n if buf.consume('e') || buf.consume('E') {\n\n s.push('E');\n\n let require_exp = if buf.consume('-') {\n\n s.push('-');\n\n true\n\n } else {\n\n buf.consume('+')\n\n };\n\n let exp = buf.take_while(|ch| matches!(ch, '0'..='9'));\n", "file_path": "src/sql-parser/src/lexer.rs", "rank": 95, "score": 257529.96324445802 }, { "content": "/// Given some stack traces, generate a map of addresses to their\n\n/// corresponding symbols.\n\n///\n\n/// Each address could correspond to more than one symbol, because\n\n/// of inlining. (E.g. if 0x1234 comes from \"g\", which is inlined in \"f\", the corresponding vec of symbols will be [\"f\", \"g\"].)\n\npub fn symbolicate(profile: &StackProfile) -> HashMap<usize, Vec<String>> {\n\n let mut all_addrs = vec![];\n\n for (stack, _annotation) in profile.stacks.iter() {\n\n all_addrs.extend(stack.addrs.iter().cloned());\n\n }\n\n // Sort so addresses from the same images are together,\n\n // to avoid thrashing `backtrace::resolve`'s cache of\n\n // parsed images.\n\n all_addrs.sort_unstable();\n\n all_addrs.dedup();\n\n all_addrs\n\n .into_iter()\n\n .map(|addr| {\n\n let mut syms = vec![];\n\n backtrace::resolve(addr as *mut c_void, |sym| {\n\n let name = sym\n\n .name()\n\n .map(|sn| sn.to_string())\n\n .unwrap_or_else(|| \"???\".to_string());\n\n syms.push(name);\n\n });\n\n syms.reverse();\n\n (addr, syms)\n\n })\n\n .collect()\n\n}\n", "file_path": "src/prof/src/lib.rs", "rank": 96, "score": 255120.70581956595 }, { "content": "fn lex_dollar_string(buf: &mut LexBuf) -> Result<Token, ParserError> {\n\n let pos = buf.pos() - 1;\n\n let tag = format!(\"${}$\", buf.take_while(|ch| ch != '$'));\n\n let _ = buf.next();\n\n if let Some(s) = buf.take_to_delimiter(&tag) {\n\n Ok(Token::String(s.into()))\n\n } else {\n\n Err(ParserError::new(pos, \"unterminated dollar-quoted string\"))\n\n }\n\n}\n\n\n", "file_path": "src/sql-parser/src/lexer.rs", "rank": 97, "score": 254735.9099650499 }, { "content": "fn lex_quoted_ident(buf: &mut LexBuf) -> Result<Token, ParserError> {\n\n let mut s = String::new();\n\n let pos = buf.pos() - 1;\n\n loop {\n\n match buf.next() {\n\n Some('\"') if buf.consume('\"') => s.push('\"'),\n\n Some('\"') => break,\n\n Some(c) => s.push(c),\n\n None => bail!(pos, \"unterminated quoted identifier\"),\n\n }\n\n }\n\n Ok(Token::Ident(s))\n\n}\n\n\n", "file_path": "src/sql-parser/src/lexer.rs", "rank": 98, "score": 254735.9099650499 }, { "content": "fn lex_extended_string(buf: &mut LexBuf) -> Result<Token, ParserError> {\n\n fn lex_unicode_escape(buf: &mut LexBuf, n: usize) -> Result<char, ParserError> {\n\n let pos = buf.pos() - 2;\n\n buf.next_n(n)\n\n .and_then(|s| u32::from_str_radix(s, 16).ok())\n\n .and_then(|codepoint| char::try_from(codepoint).ok())\n\n .ok_or_else(|| ParserError::new(pos, \"invalid unicode escape\"))\n\n }\n\n\n\n // We do not support octal (\\o) or hexadecimal (\\x) escapes, since it is\n\n // possible to construct invalid UTF-8 with these escapes. We could check\n\n // for and reject invalid UTF-8, of course, but it is too annoying to be\n\n // worth doing right now. We still lex the escapes to produce nice error\n\n // messages.\n\n\n\n fn lex_octal_escape(buf: &mut LexBuf) -> ParserError {\n\n let pos = buf.pos() - 2;\n\n buf.take_while(|ch| matches!(ch, '0'..='7'));\n\n ParserError::new(pos, \"octal escapes are not supported\")\n\n }\n", "file_path": "src/sql-parser/src/lexer.rs", "rank": 99, "score": 254735.9099650499 } ]